prompt
stringlengths 19
879k
| completion
stringlengths 3
53.8k
| api
stringlengths 8
59
|
---|---|---|
from __future__ import print_function, division
"""
For investigating accuracy of field distribution required to provide
realistic BMPG membership probabilities based on current best fit to group
"""
import logging
import numpy as np
from distutils.dir_util import mkpath
import sys
from astropy.io import fits
sys.path.insert(0, '..')
import chronostar.traceorbit as torb
import chronostar.transform as tf
import chronostar.retired2.datatool as dt
def MVGaussian(vec_x, mean, cov, inv_cov = None):
"""
Evaluate the MVGaussian defined by mean and cov at vec_x
Parameters
----------
vec_x : [dim] float array
the point at which to evaluate the function
mean : [dim] float array
the mean of the MVGaussian distribution
cov : [dim, dim] float array
the covaraince matrix of the MVGaussian distribution
Returns
-------
(float)
evaluation of vec_x
"""
if inv_cov is None:
inv_cov = np.linalg.inv(cov)
dim = vec_x.shape[0]
assert (mean.shape == vec_x.shape)
assert (cov.shape == (dim, dim))
coeff = 1./np.sqrt((2*np.pi)**dim * np.linalg.det(cov))
diff = vec_x - mean
expon = -0.5 * np.dot(diff,
np.dot(inv_cov, diff))
return coeff * np.exp(expon)
logging.basicConfig(level=logging.INFO, stream=sys.stdout)
rdir = "../results/em_fit/gaia_dr2_bp/"
# final groups represent the mode of the final sampling stage
final_groups_file = rdir + "final/final_groups.npy"
final_chain0_file = rdir + "final/group0/final_chain.npy"
final_chain1_file = rdir + "final/group1/final_chain.npy"
final_membership_file = rdir + "final/final_membership.npy"
bp_xyzuvw_file = "../data/gaia_dr2_bp_xyzuvw.fits"
gaia_xyzuvw_mean_file = "../data/gaia_dr2_mean_xyzuvw.npy"
gaia_astr_file = "../data/all_rvs_w_ok_plx.fits"
# analysis files
andir = "../results/bp_members/"
mkpath(andir)
gaia_bpmg_evals_file = andir + "gaia_bpmg_evals.npy"
gaia_gaia_evals_file = andir + "gaia_gaia_evals.npy"
bpmg_candidates_mask_file = andir + "bpmg_candidates_mask.npy"
gaia_mean_file = andir + "gaia_mean.npy"
gaia_cov_file = andir + "gaia_cov.npy"
bpmg_memb_probs_file = andir + "bpmg_memb_probs.npy"
gaia_xyzuvw = np.load(gaia_xyzuvw_mean_file)
z_final = np.load(final_membership_file)
bp_hdul = fits.open(bp_xyzuvw_file)
gaia_hdul = fits.open(gaia_astr_file)
bp_xyzuvw = bp_hdul[1].data
bp_xyzuvw_cov = bp_hdul[2].data
bp_core_mask = np.where(z_final[:,0] > 0.75)
bp_core_xyzuvw = bp_xyzuvw[bp_core_mask]
bpmg_then = np.load(final_groups_file)[0]
bpmg_mean_now = torb.trace_cartesian_orbit(bpmg_then.mean, bpmg_then.age,
single_age=True)
bpmg_cov_now = tf.transform_covmatrix(
bpmg_then.generateCovMatrix(), torb.trace_cartesian_orbit,
bpmg_then.mean, dim=6,
args=(bpmg_then.age, True)
)
ngaia_stars = gaia_xyzuvw.shape[0]
try:
gaia_bpmg_evals = np.load(gaia_bpmg_evals_file)
except IOError:
print("Evaluating gaia stars at BPMG current MVGauss distribution")
gaia_bpmg_evals = np.zeros(ngaia_stars)
bpmg_invcov_now = np.linalg.inv(bpmg_cov_now)
for i, gaia_star in enumerate(gaia_xyzuvw):
if (i % 100000 == 0):
print("{:10} of {:10}... {:6.1f}%".format(i, ngaia_stars,
i / ngaia_stars*100))
# UNTESTED!!!
gaia_bpmg_evals[i] = MVGaussian(gaia_star, bpmg_mean_now,
bpmg_cov_now,
inv_cov=bpmg_invcov_now)
np.save(gaia_bpmg_evals_file, gaia_bpmg_evals)
bpmg_candidates_mask = np.where(gaia_bpmg_evals >
np.percentile(gaia_bpmg_evals,99.99))
np.save(bpmg_candidates_mask_file, bpmg_candidates_mask)
try:
gaia_mean = np.load(gaia_mean_file)
gaia_cov = | np.load(gaia_cov_file) | numpy.load |
import abc
import datetime
import numpy as np # type: ignore
from image import Image
from numba import jit # type: ignore
from scipy import ndimage # type: ignore
from typing import Tuple
class TextureSynthesizer(abc.ABC):
"""
A TextureSynthesizer object synthesizes output images of arbitrary size that
resemble the texture captured in a source image.
"""
def __init__(self,
source_image_path: str,
source_image_size: Tuple[int, int],
output_image_path: str,
output_image_size: Tuple[int, int]) -> None:
"""
Constructs a TextureSynthesizer superclass object with the given source
and output image paths, along with a source and output image size.
Args:
source_image_path: Path to load the source image.
source_image_size: Size of the source image.
output_image_path: Path to save the output image.
output_image_size: Size of the output image.
"""
assert source_image_size >= (1, 1), "Source image size cannot be zero or negative."
assert output_image_size >= (1, 1), "Output image size cannot be zero or negative."
self.__source_image = Image(source_image_path)
self.__source_image_size = source_image_size
self.__output_image = Image(output_image_path)
self.__output_image_size = output_image_size
def synthesize(self) -> None:
"""Synthesizes the output image from the source image."""
# Load the source image.
self.__source_image.load()
self.__source_image.resize(*self.__source_image_size)
# Create the output image.
self.__output_image.create(*self.__output_image_size)
beg = datetime.datetime.now()
self.render(self.__source_image, self.__output_image)
end = datetime.datetime.now()
self.__output_image.save()
duration = end - beg
print(f'Finished texture synthesis in {duration}.')
@abc.abstractmethod
def render(self, source_image: Image, output_image: Image) -> None:
"""
Renders an output Image from the given source Image.
Args:
source_image: The source Image.
output_image: The output Image.
"""
raise NotImplementedError("TextureSynthesizer.render() is not implemented.")
@staticmethod
def _apply_distance_filter(image: Image, window: Image, members: np.ndarray, weights: np.ndarray) -> np.ndarray:
"""
Returns a matrix containing the weighted squared difference of the pixel
values between each window in the given Image and the reference window.
Pixels that fall outside the Image are reflected across the boundaries
of the Image.
Args:
image: The Image.
window: The reference window.
members: Elements to compare between the windows.
weights: The weighting of each pixel difference within a window.
Returns:
A matrix containing the desired weighted squared differences.
"""
distances = | np.zeros(image.size) | numpy.zeros |
"""
This module contains a class for discrete
1-dimensional exponential families. The main
uses for this class are exact (post-selection)
hypothesis tests and confidence intervals.
"""
import numpy as np
import warnings
from ..truncated import find_root
def crit_func(test_statistic, left_cut, right_cut):
"""
A generic critical function for an interval,
with weights at the endpoints.
((test_statistic < CL) + (test_statistic > CR) +
gammaL * (test_statistic == CL) +
gammaR * (test_statistic == CR))
where (CL, gammaL) = left_cut, (CR, gammaR) = right_cut.
Parameters
----------
test_statistic : np.float
Observed value of test statistic.
left_cut : (float, float)
(CL, gammaL): left endpoint and value at exactly the left endpoint (should be in [0,1]).
right_cut : (float, float)
(CR, gammaR): right endpoint and value at exactly the right endpoint (should be in [0,1]).
Returns
-------
decision : np.float
"""
CL, gammaL = left_cut
CR, gammaR = right_cut
value = ((test_statistic < CL) + (test_statistic > CR)) * 1.
if gammaL != 0:
value += gammaL * (test_statistic == CL)
if gammaR != 0:
value += gammaR * (test_statistic == CR)
return value
class discrete_family(object):
def __init__(self, sufficient_stat, weights):
r"""
A discrete 1-dimensional
exponential family with reference measure $\sum_j w_j \delta_{X_j}$
and sufficient statistic `sufficient_stat`. For any $\theta$, the distribution
is
.. math::
P_{\theta} = \sum_{j} e^{\theta X_j - \Lambda(\theta)} w_j \delta_{X_j}
where
.. math::
\Lambda(\theta) = \log \left(\sum_j w_j e^{\theta X_j} \right).
Parameters
----------
sufficient_stat : `np.float((n))`
weights : `np.float(n)`
Notes
-----
The weights are normalized to sum to 1.
"""
xw = np.array(sorted(zip(sufficient_stat, weights)))
self._x = xw[:,0]
self._w = xw[:,1]
self._lw = | np.log(xw[:,1]) | numpy.log |
import scipy.io.wavfile as sio
import scipy.signal as sis
from scipy import interpolate
import numpy as np
import math
import matplotlib.pyplot as plt
import mylib as myl
import sys
import copy as cp
import re
import scipy.fftpack as sf
# NOTE: int2float might be removed after scipy update/check
# (check defaults in myl.sig_preproc)
# read wav file
# IN:
# fileName
# OUT:
# signal ndarray
# sampleRate
def wavread(f,opt={'do_preproc':True}):
## signal input
fs, s_in = sio.read(f)
# int -> float
s = myl.wav_int2float(s_in)
# preproc
if opt['do_preproc']:
s = myl.sig_preproc(s)
return s, fs
# DCT
# IN:
# y - 1D signal vector
# opt
# ['fs'] - sample rate
# ['wintyp'] - <'kaiser'>, any type supported by
# scipy.signal.get_window()
# ['winparam'] - <1> additionally needed window parameters,
# scalar, string, list ..., depends on 'wintyp'
# ['nsm'] - <3> number of spectral moments
# ['rmo'] - skip first (lowest) cosine (=constant offset)
# in spectral moment calculation <1>|0
# ['lb'] - lower cutoff frequency for coef truncation <0>
# ['ub'] - upper cutoff frequency (if 0, no cutoff) <0>
# Recommended e.g. for f0 DCT, so that only influence
# of events with <= 10Hz on f0 contour is considered)
# ['peak_prct'] - <80> lower percentile threshold to be superseeded for
# amplitude maxima in DCT spectrum
# OUT:
# dct
# ['c_orig'] all coefs
# ['f_orig'] their frequencies
# ['c'] coefs with freq between lb and ub
# ['f'] their freqs
# ['i'] their indices in c_orig
# ['sm'] spectral moments based on c
# ['opt'] input options
# ['m'] y mean
# ['sd'] y standard dev
# ['cbin'] array of sum(abs(coef)) in frequency bins
# ['fbin'] corresponding lower boundary freqs
# ['f_max'] frequency of global amplitude maximum
# ['f_lmax'] frequencies of local maxima (array of minlen 1)
# ['c_cog'] the coef amplitude of the cog freq (sm[0])
# PROBLEMS:
# - if segment is too short (< 5 samples) lowest freqs associated to
# DCT components are too high for ub, that is dct_trunc() returns
# empty array.
# -> np.nan assigned to respective variables
def dct_wrapper(y,opt):
dflt={'wintyp':'kaiser','winparam':1,'nsm':3,'rmo':True,
'lb':0,'ub':0,'peak_prct':80}
opt = myl.opt_default(opt,dflt)
# weight window
w = sig_window(opt['wintyp'],len(y),opt['winparam'])
y = y*w
#print(1,len(y))
# centralize
y = y-np.mean(y)
#print(2,len(y))
# DCT coefs
c = sf.dct(y,norm='ortho')
#print(3,len(c))
# indices (starting with 0)
ly = len(y)
ci = myl.idx_a(ly)
# corresponding cos frequencies
f = ci+1 * (opt['fs']/(ly*2))
# band pass truncation of coefs
# indices of coefs with lb <= freq <= ub
i = dct_trunc(f,ci,opt)
#print('f ci i',f,ci,i)
# analysis segment too short -> DCT freqs above ub
if len(i)==0:
sm = myl.ea()
while len(sm) <= opt['nsm']:
sm = np.append(sm,np.nan)
return {'c_orig':c,'f_orig':f,'c':myl.ea(),'f':myl.ea(),'i':[],'sm':sm,'opt':opt,
'm':np.nan,'sd':np.nan,'cbin':myl.ea(),'fbin':myl.ea(),
'f_max':np.nan, 'f_lmax':myl.ea(), 'c_cog': np.nan}
# mean abs error from band-limited IDCT
#mae = dct_mae(c,i,y)
# remove constant offset with index 0
# already removed by dct_trunc in case lb>0. Thus checked for i[0]==0
# (i[0] indeed represents constant offset; tested by
# cr = np.zeros(ly); cr[0]=c[0]; yr = sf.idct(cr); print(yr)
if opt['rmo']==True and len(i)>1 and i[0]==0:
j = i[1:len(i)]
else:
j = i
if type(j) is not list: j = [j]
# coefs and their frequencies between lb and ub
# (+ constant offset removed)
fi = f[j]
ci = c[j]
# spectral moments
if len(j)>0:
sm = specmom(ci,fi,opt['nsm'])
else:
sm = np.zeros(opt['nsm'])
# frequency bins
fbin, cbin = dct_fbin(fi,ci,opt)
# frequencies of global and local maxima in DCT spectrum
f_max, f_lmax, px = dct_peak(ci,fi,sm[0],opt)
# return
return {'c_orig':c,'f_orig':f,'c':ci,'f':fi,'i':j,'sm':sm,'opt':opt,
'm':np.mean(y),'sd':np.std(y),'cbin':cbin,'fbin':fbin,
'f_max':f_max, 'f_lmax':f_lmax, 'c_cog': px}
# returns local and max peak frequencies
# IN:
# x: array of abs coef amplitudes
# f: corresponding frequencies
# cog: center of gravity
# OUT:
# f_gm: freq of global maximu
# f_lm: array of freq of local maxima
# px: threshold to be superseeded (derived from prct specs)
def dct_peak(x,f,cog,opt):
x = abs(cp.deepcopy(x))
## global maximum
i = myl.find(x,'is','max')
if len(i)>1:
i=int(np.mean(i))
f_gm = float(f[i])
## local maxima
# threshold to be superseeded
px = dct_px(x,f,cog,opt)
idx = myl.find(x,'>=',px)
# 2d array of neighboring+1 indices
# e.g. [[0,1,2],[5,6],[9,10]]
ii = []
# min freq distance between maxima
fd_min = 1
for i in myl.idx(idx):
if len(ii)==0:
ii.append([idx[i]])
elif idx[i]>ii[-1][-1]+1:
xi = x[ii[-1]]
fi = f[ii[-1]]
j = myl.find(xi,'is','max')
#print('xi',xi,'fi',fi,'f',f[idx[i]])
if len(j)>0 and f[idx[i]]>fi[j[0]]+fd_min:
#print('->1')
ii.append([idx[i]])
else:
#print('->2')
ii[-1].append(idx[i])
#myl.stopgo() #!c
else:
ii[-1].append(idx[i])
# get index of x maximum within each subsegment
# and return corresponding frequencies
f_lm = []
for si in ii:
zi = myl.find(x[si],'is','max')
if len(zi)>1:
zi=int(np.mean(zi))
else:
zi = zi[0]
i = si[zi]
if not np.isnan(i):
f_lm.append(f[i])
#print('px',px)
#print('i',ii)
#print('x',x)
#print('f',f)
#print('m',f_gm,f_lm)
#myl.stopgo()
return f_gm, f_lm, px
# return center-of-gravity related amplitude
# IN:
# x: array of coefs
# f: corresponding freqs
# cog: center of gravity freq
# opt
# OUT:
# coef amplitude related to cog
def dct_px(x,f,cog,opt):
x = abs(cp.deepcopy(x))
# cog outside freq range
if cog <= f[0]:
return x[0]
elif cog >= f[-1]:
return x[-1]
# find f-indices adjacent to cog
for i in range(len(f)-1):
if f[i] == cog:
return x[i]
elif f[i+1] == cog:
return x[i+1]
elif f[i] < cog and f[i+1] > cog:
# interpolate
#xi = np.interp(cog,f[i:i+2],x[i:i+2])
#print('cog:',cog,'xi',f[i:i+2],x[i:i+2],'->',xi)
return np.interp(cog,f[i:i+2],x[i:i+2])
return np.percentile(x,opt['peak_prct'])
# pre-emphasis
# alpha > 1 (interpreted as lower cutoff freq)
# alpha <- exp(-2 pi alpha delta)
# s'[n] = s[n]-alpha*s[n-1]
# IN:
# signal
# alpha - s[n-1] weight <0.95>
# fs - sample rate <-1>
# do_scale - <FALSE> if TRUE than the pre-emphasized signal is scaled to
# same abs_mean value as original signal (in general pre-emphasis
# leads to overall energy loss)
def pre_emphasis(y,a=0.95,fs=-1,do_scale=False):
# determining alpha directly or from cutoff freq
if a>1:
if fs <= 0:
print('pre emphasis: alpha cannot be calculated deltaT. Set to 0.95')
a = 0.95
else:
a = math.exp(-2*math.pi*a*1/fs)
#print('alpha',a)
# shifted signal
ype = np.append(y[0], y[1:] - a * y[:-1])
# scaling
if do_scale:
sf = np.mean(abs(y))/np.mean(abs(ype))
ype*=sf
## plot
#ys = y[30000:40000]
#ypes = ype[30000:40000]
#t = np.linspace(0,len(ys),len(ys))
#fig, spl = plt.subplots(2,1,squeeze=False)
#cid1 = fig.canvas.mpl_connect('button_press_event', onclick_next)
#cid2 = fig.canvas.mpl_connect('key_press_event', onclick_exit)
#spl[0,0].plot(t,ys)
#spl[1,0].plot(t,ypes)
#plt.show()
##
return ype
# frequency bins: symmetric 2-Hz windows around freq integers
# in bandpass overlapped by 1 Hz
# IN:
# f - ndarray frequencies
# c - ndarray coefs
# opt['lb'] - lower and upper truncation freqs
# ['ub']
# OUT:
# fbin - ndarray, lower bnd of freq bins
# cbin - ndarray, summed abs coef values in these bins
def dct_fbin(f,c,opt):
fb = myl.idx_seg(math.floor(opt['lb']),math.ceil(opt['ub']))
cbin = np.zeros(len(fb)-1);
for j in myl.idx_a(len(fb)-1):
k = myl.intersect(myl.find(f,'>=',fb[j]),
myl.find(f,'<=',fb[j+1]))
cbin[j] = sum(abs(c[k]))
fbin = fb[myl.idx_a(len(fb)-1)]
return fbin, cbin
# spectral moments
# IN:
# c - ndarray, coefficients
# f - ndarray, related frequencies <1:len(c)>
# n - number of spectral moments <3>
# OUT:
# m - ndarray moments (increasing)
def specmom(c,f=[],n=3):
if len(f)==0:
f = myl.idx_a(len(c))+1
c = abs(c)
s = sum(c)
k=0;
m = np.asarray([])
for i in myl.idx_seg(1,n):
m = myl.push(m, sum(c*((f-k)**i))/s)
k = m[-1]
return m
# wrapper around IDCT
# IN:
# c - coef vector derived by dct
# i - indices of coefs to be taken for IDCT; if empty (default),
# all coefs taken)
# OUT:
# y - IDCT result
def idct_bp(c,i=myl.ea()):
if len(i)==0:
return sf.idct(c,norm='ortho')
cr = np.zeros(len(c))
cr[i]=c[i]
return sf.idct(cr)
# mean abs error from IDCT
def dct_mae(c,i,y):
cr = np.zeros(len(c))
cr[i]=c[i]
yr = sf.idct(cr)
return myl.mae(yr,y)
# indices to truncate DCT output to freq band
# IN:
# f - ndarray, all frequencies
# ci - all indices of coef ndarray
# opt['lb'] - lower cutoff freq
# ['ub'] - upper cutoff freq
# OUT:
# i - ndarray, indices in F of elements to be kept
def dct_trunc(f,ci,opt):
if opt['lb']>0:
ihp = myl.find(f,'>=',opt['lb'])
else:
ihp = ci
if opt['ub']>0:
ilp = myl.find(f,'<=',opt['ub'])
else:
ilp = ci
return myl.intersect(ihp,ilp)
# wrapper around wavread and energy calculation
# IN:
# f: wavFileName (any number of channels) or array containing
# the signal (any number of channels=columns)
# opt: energy extraction and postprocessing
# .win, .wintyp, .winparam: window parameters
# .sts: stepsize for energy contour
# .do_preproc: centralizing signal
# .do_out: remove outliers
# .do_interp: linear interpolation over silence
# .do_smooth: smoothing (median or savitzky golay)
# .out dict; see pp_outl()
# .smooth dict; see pp_smooth()
# fs: <-1> needed if f is array
# OUT:
# y: time + energy contour 2-dim np.array
# (1st column: time, other columns: energy)
def wrapper_energy(f,opt = {}, fs = -1):
opt = myl.opt_default(opt,{'wintyp':'hamming',
'winparam':'',
'sts':0.01,
'win':0.05,
'do_preproc': True,
'do_out': False,
'do_interp': False,
'do_smooth': False,
'out': {},
'smooth': {}})
opt['out'] = myl.opt_default(opt['out'], {'f': 3,
'm': 'mean'})
opt['smooth'] = myl.opt_default(opt['smooth'],{"mtd": "sgolay",
"win": 7,
"ord": 3})
if type(f) is str:
s, fs = wavread(f,opt)
else:
if fs < 0:
sys.exit("array input requires sample rate fs. Exit.")
s = f
opt['fs']=fs
# convert to 2-dim array; each column represents a channel
if np.ndim(s)==1:
s = np.expand_dims(s, axis=1)
# output (.T-ed later, reserve first list for time)
y = myl.ea()
# over channels
for i in np.arange(0,s.shape[1]):
e = sig_energy(s[:,i],opt)
# setting outlier to 0
if opt['do_out']:
e = pp_outl(e,opt['out'])
# interpolation over 0
if opt['do_interp']:
e = pp_interp(e)
# smoothing
if opt['do_smooth']:
e = pp_smooth(e,opt['smooth'])
# <0 -> 0
e[myl.find(e,'<',0)]=0
y = myl.push(y,e)
# output
if np.ndim(y)==1:
y = np.expand_dims(y, axis=1)
else:
y = y.T
# concat time as 1st column
sts = opt['sts']
t = np.arange(0,sts*y.shape[0],sts)
if len(t) != y.shape[0]:
while len(t) > y.shape[0]:
t = t[0:len(t)-1]
while len(t) < y.shape[0]:
t = np.append(t,t[-1]+sts)
t = np.expand_dims(t, axis=1)
y = np.concatenate((t,y),axis=1)
return y
### replacing outliers by 0 ###################
def pp_outl(y,opt):
if "m" not in opt:
return y
# ignore zeros
opt['zi'] = True
io = myl.outl_idx(y,opt)
if np.size(io)>0:
y[io] = 0
return y
### interpolation over 0 (+constant extrapolation) #############
def pp_interp(y,opt={}):
xi = myl.find(y,'==',0)
xp = myl.find(y,'>',0)
yp = y[xp]
if "kind" in opt:
f = interpolate.interp1d(xp,yp,kind=opt["kind"],
fill_value=(yp[0],yp[-1]))
yi = f(xi)
else:
yi = np.interp(xi,xp,yp)
y[xi]=yi
return y
#!check
### smoothing ########################################
# remark: savgol_filter() causes warning
# Using a non-tuple sequence for multidimensional indexing is deprecated
# will be out with scipy.signal 1.2.0
# (https://github.com/scipy/scipy/issues/9086)
def pp_smooth(y,opt):
if opt['mtd']=='sgolay':
if len(y) <= opt['win']:
return y
y = sis.savgol_filter(y,opt['win'],opt['ord'])
elif opt['mtd']=='med':
y = sis.medfilt(y,opt['win'])
return y
# calculates energy contour from acoustic signal
# do_preproc per default False. If not yet preprocessed by myl.sig_preproc()
# set to True
# IN:
# x ndarray signal
# opt['fs'] - sample frequency
# ['wintyp'] - <'hamming'>, any type supported by
# scipy.signal.get_window()
# ['winparam'] - <''> additionally needed window parameters,
# scalar, string, list ...
# ['sts'] - stepsize of moving window
# ['win'] - window length
# OUT:
# y ndarray energy contour
def sig_energy(x,opt):
dflt={'wintyp':'hamming','winparam':'','sts':0.01,'win':0.05}
opt = myl.opt_default(opt,dflt)
# stepsize and winlength in samples
sts = round(opt['sts']*opt['fs'])
win = min([math.floor(len(x)/2),round(opt['win']*opt['fs'])])
# weighting window
w = sig_window(opt['wintyp'],win,opt['winparam'])
# energy values
y = np.asarray([])
for j in myl.idx_a(len(x)-win,sts):
s = x[j:j+len(w)]*w
y = myl.push(y,myl.rmsd(s))
return y
# wrapper around windows
# IN:
# typ: any type supported by scipy.signal.get_window()
# lng: <1> length
# par: <''> additional parameters as string, scalar, list etc
# OUT:
# window array
def sig_window(typ,l=1,par=''):
if typ=='none' or typ=='const':
return np.ones(l)
if ((type(par) is str) and (len(par) == 0)):
return sis.get_window(typ,l)
return sis.get_window((typ,par),l)
# pause detection
# IN:
# s - mono signal
# opt['fs'] - sample frequency
# ['ons'] - idx onset <0> (to be added to time output)
# ['flt']['f'] - filter options, boundary frequencies in Hz
# (2 values for btype 'band', else 1): <8000> (evtl. lowered by fu_filt())
# ['btype'] - <'band'>|'high'|<'low'>
# ['ord'] - butterworth order <5>
# ['fs'] - (internally copied)
# ['l'] - analysis window length (in sec)
# ['l_ref'] - reference window length (in sec)
# ['e_rel'] - min energy quotient analysisWindow/referenceWindow
# ['fbnd'] - True|<False> assume pause at beginning and end of file
# ['n'] - <-1> extract exactly n pauses (if > -1)
# ['min_pau_l'] - min pause length <0.5> sec
# ['min_chunk_l'] - min inter-pausal chunk length <0.2> sec
# ['force_chunk'] - <False>, if True, pause-only is replaced by chunk-only
# ['margin'] - <0> time to reduce pause on both sides (sec; if chunks need init and final silence)
# OUT:
# pau['tp'] 2-dim array of pause [on off] (in sec)
# ['tpi'] 2-dim array of pause [on off] (indices in s = sampleIdx-1 !!)
# ['tc'] 2-dim array of speech chunks [on off] (i.e. non-pause, in sec)
# ['tci'] 2-dim array of speech chunks [on off] (indices)
# ['e_ratio'] - energy ratios corresponding to pauses in ['tp'] (analysisWindow/referenceWindow)
def pau_detector(s,opt={}):
if 'fs' not in opt:
sys.exit('pau_detector: opt does not contain key fs.')
dflt = {'e_rel':0.0767,'l':0.1524,'l_ref':5,'n':-1,'fbnd':False,'ons':0,'force_chunk':False,
'min_pau_l':0.4,'min_chunk_l':0.2,'margin':0,
'flt':{'btype':'low','f':np.asarray([8000]),'ord':5}}
opt = myl.opt_default(opt,dflt)
opt['flt']['fs'] = opt['fs']
## removing DC, low-pass filtering
flt = fu_filt(s,opt['flt'])
y = flt['y']
## pause detection for >=n pauses
t, e_ratio = pau_detector_sub(y,opt)
if len(t)>0:
## extending 1st and last pause to file boundaries
if opt['fbnd']==True:
t[0,0]=0
t[-1,-1]=len(y)-1
## merging pauses across too short chunks
## merging chunks across too small pauses
if (opt['min_pau_l']>0 or opt['min_chunk_l']>0):
t, e_ratio = pau_detector_merge(t,e_ratio,opt)
## too many pauses?
# -> subsequently remove the ones with highest e-ratio
if (opt['n']>0 and len(t)>opt['n']):
t, e_ratio = pau_detector_red(t,e_ratio,opt)
## speech chunks
tc = pau2chunk(t,len(y))
## pause-only -> chunk-only
if (opt['force_chunk']==True and len(tc)==0):
tc = cp.deepcopy(t)
t = np.asarray([])
e_ratio = np.asarray([])
## add onset
t = t+opt['ons']
tc = tc+opt['ons']
## return dict
## incl fields with indices to seconds (index+1=sampleIndex)
pau={'tpi':t, 'tci':tc, 'e_ratio': e_ratio}
pau['tp'] = myl.idx2sec(t,opt['fs'])
pau['tc'] = myl.idx2sec(tc,opt['fs'])
#print(pau)
return pau
# merging pauses across too short chunks
# merging chunks across too small pauses
# IN:
# t [[on off]...] of pauses
# e [e_rat ...]
# OUT:
# t [[on off]...] merged
# e [e_rat ...] merged (simply mean of merged segments taken)
def pau_detector_merge(t,e,opt):
## min pause and chunk length in samples
mpl = myl.sec2smp(opt['min_pau_l'],opt['fs'])
mcl = myl.sec2smp(opt['min_chunk_l'],opt['fs'])
## merging chunks across short pauses
tm = np.asarray([])
em = np.asarray([])
for i in myl.idx_a(len(t)):
if ((t[i,1]-t[i,0] >= mpl) or
(opt['fbnd']==True and (i==0 or i==len(t)-1))):
tm = myl.push(tm,t[i,:])
em = myl.push(em,e[i])
# nothing done in previous step?
if len(tm)==0:
tm = cp.deepcopy(t)
em = cp.deepcopy(e)
if len(tm)==0:
return t, e
## merging pauses across short chunks
tn = np.asarray([tm[0,:]])
en = np.asarray([em[0]])
if (tn[0,0]<mcl): tn[0,0]=0
for i in np.arange(1,len(tm),1):
if (tm[i,0] - tn[-1,1] < mcl):
tn[-1,1] = tm[i,1]
en[-1] = np.mean([en[-1],em[i]])
else:
tn = myl.push(tn,tm[i,:])
en = myl.push(en,em[i])
#print("t:\n", t, "\ntm:\n", tm, "\ntn:\n", tn) #!v
return tn, en
# pause to chunk intervals
# IN:
# t [[on off]] of pause segments (indices in signal)
# l length of signal vector
# OUT:
# tc [[on off]] of speech chunks
def pau2chunk(t,l):
if len(t)==0:
return np.asarray([[0,l-1]])
if t[0,0]>0:
tc = np.asarray([[0,t[0,0]-1]])
else:
tc = np.asarray([])
for i in np.arange(0,len(t)-1,1):
if t[i,1] < t[i+1,0]-1:
tc = myl.push(tc,[t[i,1]+1,t[i+1,0]-1])
if t[-1,1]<l-1:
tc = myl.push(tc,[t[-1,1]+1,l-1])
return tc
# called by pau_detector
# IN:
# as for pau_detector
# OUT:
# t [on off]
# e_ratio
def pau_detector_sub(y,opt):
## settings
# reference window span
rl = math.floor(opt['l_ref']*opt['fs'])
# signal length
ls = len(y)
# min pause length
ml = opt['l']*opt['fs']
# global rmse and pause threshold
e_rel = cp.deepcopy(opt['e_rel'])
# global rmse
# as fallback in case reference window is likely to be pause
# almost-zeros excluded (cf percentile) since otherwise pauses
# show a too high influence, i.e. lower the reference too much
# so that too few pauses detected
#e_glob = myl.rmsd(y)
ya = abs(y)
qq = np.percentile(ya,[50])
e_glob = myl.rmsd(ya[ya>qq[0]])
t_glob = opt['e_rel']*e_glob
# stepsize
sts=max([1,math.floor(0.05*opt['fs'])])
# energy calculation in analysis and reference windows
wopt_en = {'win':ml,'rng':[0,ls]}
wopt_ref = {'win':rl,'rng':[0,ls]}
# loop until opt.n criterion is fulfilled
# increasing energy threshold up to 1
while e_rel < 1:
# pause [on off], pause index
t=np.asarray([])
j=0
# [e_y/e_rw] indices as in t
e_ratio=np.asarray([])
i_steps = np.arange(1,ls,sts)
for i in i_steps:
# window
yi = myl.windowing_idx(i,wopt_en)
e_y = myl.rmsd(y[yi])
# energy in reference window
e_r = myl.rmsd(y[myl.windowing_idx(i,wopt_ref)])
# take overall energy as reference if reference window is pause
if (e_r <= t_glob):
e_r = e_glob
# if rmse in window below threshold
if e_y <= e_r*e_rel:
yis = yi[0]
yie = yi[-1]
if len(t)-1==j:
# values belong to already detected pause
if len(t)>0 and yis<t[j,1]:
t[j,1]=yie
# evtl. needed to throw away superfluous
# pauses with high e_ratio
e_ratio[j]=np.mean([e_ratio[j],e_y/e_r])
else:
t = myl.push(t,[yis, yie])
e_ratio = myl.push(e_ratio,e_y/e_r)
j=j+1
else:
t=myl.push(t,[yis, yie])
e_ratio = myl.push(e_ratio,e_y/e_r)
# (more than) enough pauses detected?
if len(t) >= opt['n']: break
e_rel = e_rel+0.1
if opt['margin']==0 or len(t)==0:
return t, e_ratio
# shorten pauses by margins
mar=int(opt['margin']*opt['fs'])
tm, erm = myl.ea(), myl.ea()
for i in myl.idx_a(len(t)):
# only slim non-init and -fin pauses
if i>0:
ts = t[i,0]+mar
else:
ts = t[i,0]
if i < len(t)-1:
te = t[i,1]-mar
else:
te = t[i,1]
# pause disappeared
if te <= ts:
# ... but needs to be kept
if opt['n']>0:
tm = myl.push(tm,[t[i,0],t[i,1]])
erm = myl.push(erm,e_ratio[i])
continue
# pause still there
tm = myl.push(tm,[ts,te])
erm = myl.push(erm,e_ratio[i])
return tm, erm
def pau_detector_red(t,e_ratio,opt):
# keep boundary pauses
if opt['fbnd']==True:
n=opt['n']-2
#bp = [t[0,],t[-1,]]
bp = np.concatenate((np.array([t[0,]]),np.array([t[-1,]])),axis=0)
ii = np.arange(1,len(t)-1,1)
t = t[ii,]
e_ratio=e_ratio[ii]
else:
n=opt['n']
bp=np.asarray([])
if n==0:
t=[]
# remove pause with highest e_ratio
while len(t)>n:
i = myl.find(e_ratio,'is','max')
j = myl.find(np.arange(1,len(e_ratio),1),'!=',i[0])
t = t[j,]
e_ratio = e_ratio[j]
# re-add boundary pauses if removed
if opt['fbnd']==True:
if len(t)==0:
t=np.concatenate((np.array([bp[0,]]),np.array([bp[1,]])),axis=0)
else:
t=np.concatenate((np.array([bp[0,]]),np.array([t]),np.array([bp[1,]])),axis=0)
return t, e_ratio
# spectral balance calculation according to Fant 2000
# IN:
# sig: signal (vowel segment)
# fs: sampe rate
# opt:
# 'win': length of central window in ms <len(sig)>; -1 is same as len(sig)
# 'ub': upper freq boundary in Hz <-1> default: no low-pass filtering
# 'domain': <'freq'>|'time'; pre-emp in frequency (Fant) or time domain
# 'alpha': <0.95> for time domain only y[n] = x[n]-alpha*x[n-1]
# if alpha>0 it is interpreted as lower freq threshold for pre-emp
# OUT:
# sb: spectral tilt
def splh_spl(sig,fs,opt_in={}):
opt = cp.deepcopy(opt_in)
opt = myl.opt_default(opt,{'win':len(sig),'f':-1,'btype':'none',
'domain':'freq','alpha':0.95})
#print(opt)
#myl.stopgo()
## cut out center window ##################################
ls = len(sig)
if opt['win'] <= 0:
opt['win'] = ls
if opt['win'] < ls:
wi = myl.windowing_idx(int(ls/2),
{'rng':[0, ls],
'win':int(opt['win']*fs)})
y = sig[wi]
else:
y = cp.deepcopy(sig)
if len(y)==0:
return np.nan
# reference sound pressure level
p_ref = pRef('spl')
## pre-emp in time domain ####################################
if opt['domain']=='time':
# low pass filtering
if opt['btype'] != 'none':
flt = fu_filt(y,{'fs':fs,'f':opt['f'],'ord':6,
'btype':opt['btype']})
y = flt['y']
yp = pre_emphasis(y,opt['alpha'],fs,False)
y_db = 20*np.log10(myl.rmsd(y)/p_ref)
yp_db = 20*np.log10(myl.rmsd(yp)/p_ref)
#print(yp_db - y_db)
return yp_db - y_db
## pre-emp in frequency domain ##############################
# according to Fant
# actual length of cut signal
n = len(y)
## hamming windowing
y *= np.hamming(n)
## spectrum
Y = np.fft.fft(y,n)
N = int(len(Y)/2)
## frequency components
XN = np.fft.fftfreq(n,d=1/fs)
X = XN[0:N]
# same as X = np.linspace(0, fs/2, N, endpoint=True)
## amplitudes
# sqrt(Y.real**2 + Y.imag**2)
# to be normalized:
# *2 since only half of transform is used
# /N since output needs to be normalized by number of samples
# (tested on sinus, cf
# http://www.cbcity.de/die-fft-mit-python-einfach-erklaert)
a = 2*np.abs(Y[:N])/N
## vowel-relevant upper frequency boundary
if opt['btype'] != 'none':
vi = fu_filt_freq(X,opt)
if len(vi)>0:
X = X[vi]
a = a[vi]
## Fant preemphasis filter (Fant et al 2000, p10f eq 20)
preemp = 10*np.log10((1+X**2/200**2)/(1+X**2/5000**2))
ap = 10*np.log10(a)+preemp
# retransform to absolute scale
ap = 10**(ap/10)
# corresponds to gain values in Fant 2000, p11
#for i in myl.idx(a):
# print(X[i],preemp[i])
#myl.stopgo()
## get sound pressure level of both spectra
# as 20*log10(P_eff/P_ref)
spl = 20*np.log10(myl.rmsd(a)/p_ref)
splh = 20*np.log10(myl.rmsd(ap)/p_ref)
## get energy level of both spectra
#spl = 20*np.log10(myl.mse(a)/p_ref)
#splh = 20*np.log10(myl.mse(ap)/p_ref)
## spectral balance
sb = splh-spl
#print(spl,splh,sb)
#myl.stopgo()
#fig = plt.figure()
#plt.plot(X,20*np.log10(a),'b')
#plt.plot(X,20*np.log10(preemp),'g')
#plt.plot(X,20*np.log10(ap),'r')
#plt.show()
return sb
# returns indices of freq in x fullfilling conditions in opt
# IN:
# X: freq array
# opt: 'btype' - 'none'|'low'|'high'|'band'|'stop'
# 'f': 1 freq for low|high, 2 freq for band|stop
# OUT:
# i: indices in X fulfilling condition
def fu_filt_freq(X,opt):
typ = opt['btype']
f = opt['f']
# all indices
if typ=='none':
return myl.idx_a(len(X))
# error handling
if re.search('(band|stop)',typ) and (not myl.listType(f)):
print('filter type requires frequency list. Done nothing.')
return myl.idx_a(len(X))
if re.search('(low|high)',typ) and myl.listType(f):
print('filter type requires only 1 frequency value. Done nothing.')
return myl.idx_a(len(X))
if typ=='low':
return np.nonzero(X<=f)
elif typ=='high':
return np.nonzero(X>=f)
elif typ == 'band':
i = set(np.nonzero(X>=f[0]))
return np.sort(np.array(i.intersection(set(np.nonzero(X<=f[1])))))
elif typ == 'stop':
i = set(np.nonzero(X<=f[0]))
return np.sort(np.array(i.union(set(np.nonzero(X>=f[1])))))
return myl.idx_a(len(X))
# returns reverence levels for typ
# IN:
# typ
# 'spl': sound pressure level
# 'i': intensity level
# OUT:
# corresponding reference level
def pRef(typ):
if typ=='spl':
return 2*10**(-5)
return 10**(-12)
# syllable nucleus detection
# IN:
# s - mono signal
# opt['fs'] - sample frequency
# ['ons'] - onset in sec <0> (to be added to time output)
# ['flt']['f'] - filter options, boundary frequencies in Hz
# (2 values for btype 'band', else 1): <np.asarray([200,4000])>
# ['btype'] - <'band'>|'high'|'low'
# ['ord'] - butterworth order <5>
# ['fs'] - (internally copied)
# ['l'] - analysis window length
# ['l_ref'] - reference window length
# ['d_min'] - min distance between subsequent nuclei (in sec)
# ['e_min'] - min energy required for nucleus as a proportion to max energy <0.16>
# ['e_rel'] - min energy quotient analysisWindow/referenceWindow
# ['e_val'] - quotient, how sagged the energy valley between two nucleus
# candidates should be. Measured relative to the lower energy
# candidate. The lower, the deeper the required valley between
# two peaks. Meaningful range ]0, 1]. Recommended range:
# [0.9 1[
# ['center'] - boolean; subtract mean energy
# OUT:
# ncl['t'] - vector of syl ncl time stamps (in sec)
# ['ti'] - corresponding vector idx in s
# ['e_ratio'] - corresponding energy ratios (analysisWindow/referenceWindow)
# bnd['t'] - vector of syl boundary time stamps (in sec)
# ['ti'] - corresponding vector idx in s
# ['e_ratio'] - corresponding energy ratios (analysisWindow/referenceWindow)
def syl_ncl(s,opt={}):
## settings
if 'fs' not in opt:
sys.exit('syl_ncl: opt does not contain key fs.')
dflt = {'flt':{'f':np.asarray([200,4000]),'btype':'band','ord':5},
'e_rel':1.05,'l':0.08,'l_ref':0.15, 'd_min':0.12, 'e_min':0.1,
'ons':0, 'e_val': 1, 'center': False}
opt = myl.opt_default(opt,dflt)
opt['flt']['fs'] = opt['fs']
if syl_ncl_trouble(s,opt):
t = np.asarray([round(len(s)/2+opt['ons'])])
ncl = {'ti':t, 't':myl.idx2sec(t,opt['fs']), 'e_ratio':[0]}
bnd = cp.deepcopy(ncl)
return ncl, bnd
# reference window length
rws = math.floor(opt['l_ref']*opt['fs'])
# energy win length
ml = math.floor(opt['l']*opt['fs'])
# stepsize
sts = max([1,math.floor(0.03*opt['fs'])])
# minimum distance between subsequent nuclei
# (in indices)
#md = math.floor(opt['d_min']*opt['fs']/sts)
md = math.floor(opt['d_min']*opt['fs'])
# bandpass filtering
flt = fu_filt(s,opt['flt'])
y = flt['y']
# signal length
ls = len(y)
# minimum energy as proportion of maximum energy found
e_y = np.asarray([])
i_steps = np.arange(1,ls,sts)
for i in i_steps:
yi = np.arange(i,min([ls,i+ml-1]),1)
e_y = np.append(e_y,myl.rmsd(y[yi]))
if bool(opt['center']):
e_y -= np.mean(e_y)
e_min = opt['e_min']*max(e_y)
# output vector collecting nucleus sample indices
t = np.asarray([])
all_i = np.asarray([])
all_e = np.asarray([])
all_r = np.asarray([])
# energy calculation in analysis and reference windows
wopt_en = {'win':ml,'rng':[0,ls]}
wopt_ref = {'win':rws,'rng':[0,ls]}
for i in i_steps:
yi = myl.windowing_idx(i,wopt_en)
#yi = np.arange(yw[0],yw[1],1)
ys = y[yi]
e_y = myl.rmsd(ys)
#print(ys,'->',e_y)
ri = myl.windowing_idx(i,wopt_ref)
#ri = np.arange(rw[0],rw[1],1)
rs = y[ri]
e_rw = myl.rmsd(rs)
all_i = np.append(all_i,i)
all_e = np.append(all_e,e_y)
all_r = np.append(all_r,e_rw)
# local energy maxima
# (do not use min duration md for order option, since local
# maximum might be obscured already by energy increase
# towards neighboring peak further away than md, and not only by
# closer than md peaks)
idx = sis.argrelmax(all_e,order=1)
#plot_sylncl(all_e,idx) #!v
#print(opt["ons"]/opt["fs"] + np.array(idx)*sts/opt["fs"]) #!v
#myl.stopgo() #!v
### maxima related to syl ncl
## a) energy constraints
# timestamps (idx)
tx = np.asarray([])
# energy ratios
e_ratiox = np.asarray([])
# idx in all_i
tix = np.asarray([]).astype(int)
for i in idx[0]:
# valley between this and previous nucleus deep enough?
if len(tix)>0:
ie = all_e[tix[-1]:i]
if len(ie)<3:
continue
valley = np.min(ie)
nclmin = np.min([ie[0],all_e[i]])
if valley >= opt['e_val'] * nclmin:
# replace previous nucleus by current one
if all_e[i] > ie[0]: #!n
all_e[tix[-1]] = all_e[i] #!n
tx[-1] = all_i[i] #!n
tix[-1] = i #!n
e_ratiox[-1] = all_e[i]/all_r[i] #!n
#print("valley constraint -- tx:", all_i[i]/opt["fs"], "nclmin:", nclmin, "valley:", valley, "ie0:", ie[0], "all_e:", all_e[i], "--> skip!") #!v
continue
if ((all_e[i] >= all_r[i]*opt['e_rel']) and (all_e[i] > e_min)):
tx = np.append(tx,all_i[i])
tix = np.append(tix,i)
e_ratiox = np.append(e_ratiox, all_e[i]/all_r[i])
#else: #!v
# print("min_en constraint -- tx:", all_i[i]/opt["fs"], "all_e:", all_e[i], "all_r:", all_r[i], "e_min:", e_min, "--> skip!") #!v
#print(len(tx)) #!v
if len(tx)==0:
dflt = {'ti':myl.ea(),
't':myl.ea(),
'e_ratio':myl.ea()}
return dflt, dflt
#plot_sylncl(all_e,tix) #!v
## b) min duration constraints
# init by first found ncl
t = np.array([tx[0]])
e_ratio = np.array([e_ratiox[0]])
# idx in all_i
ti = np.array([tix[0]]).astype(int)
for i in range(1,len(tx)):
# ncl too close
if np.abs(tx[i]-t[-1]) < md:
# current ncl with higher energy: replace last stored one
if e_ratiox[i] > e_ratio[-1]:
t[-1] = tx[i]
ti[-1] = tix[i]
e_ratio[-1] = e_ratiox[i]
else:
t = np.append(t,tx[i])
ti = np.append(ti,tix[i])
e_ratio = np.append(e_ratio,e_ratiox[i])
#plot_sylncl(all_e,ti) #!v
### minima related to syl bnd
tb = np.asarray([])
e_ratio_b = np.asarray([])
if len(t)>1:
for i in range(len(ti)-1):
j = myl.idx_seg(ti[i],ti[i+1])
j_min = myl.find(all_e[j],'is','min')
if len(j_min)==0: j_min=[0]
# bnd idx
bj = j[0]+j_min[0]
tb = np.append(tb,all_i[bj])
e_ratio_b = np.append(e_ratio_b, all_e[bj]/all_r[bj])
# add onset
t = t+opt['ons']
tb = tb+opt['ons']
# output dict,
# incl idx to seconds
ncl = {'ti':t, 't':myl.idx2sec(t,opt['fs']), 'e_ratio':e_ratio}
bnd = {'ti':tb, 't':myl.idx2sec(tb,opt['fs']), 'e_ratio':e_ratio_b}
#print(ncl['t'], e_ratio)
return ncl, bnd
def syl_ncl_trouble(s,opt):
if len(s)/opt['fs'] < 0.1:
return True
return False
# wrapper around Butter filter
# IN:
# 1-dim vector
# opt['fs'] - sample rate
# ['f'] - scalar (high/low) or 2-element vector (band) of boundary freqs
# ['order'] - order
# ['btype'] - band|low|high; all other values: signal returned as is
# OUT:
# flt['y'] - filtered signal
# ['b'] - coefs
# ['a']
def fu_filt(y,opt):
# do nothing
if not re.search('^(high|low|band)$',opt['btype']):
return {'y': y, 'b': myl.ea(), 'a': myl.ea()}
# check f<fs/2
if (opt['btype'] == 'low' and opt['f']>=opt['fs']/2):
opt['f']=opt['fs']/2-100
elif (opt['btype'] == 'band' and opt['f'][1]>=opt['fs']/2):
opt['f'][1]=opt['fs']/2-100
fn = opt['f']/(opt['fs']/2)
b, a = sis.butter(opt['ord'], fn, btype=opt['btype'])
yf = sis.filtfilt(b,a,y)
return {'y':yf,'b':b,'a':a}
##### discontinuity measurement #######################################
# measures delta and linear fit discontinuities between
# adjacent array elements in terms of:
# - delta
# - reset of regression lines
# - root mean squared deviation between overall regression line and
# -- preceding segment's regression line
# -- following segment's regression line
# -- both, preceding and following, regression lines
# - extrapolation rmsd between following regression line
# and following regression line, extrapolated by regression
# on preceding segment
# IN:
# x: nx2 array [[time val] ...]
# OR
# nx1 array [val ...]
# for the latter indices are taken as time stamps
# ts: nx1 array [time ...] of time stamps (or indices for size(x)=nx1)
# at which to calculate discontinuity; if empty, discontinuity is
# calculated at each point in time. If size(x)=nx1 ts MUST contain
# indices
# nx2 array [[t_off t_on] ...] to additionally account for pauses
# opt: dict
# .win: <'glob'>|'loc' calculate discontinuity over entire sequence
# or within window
# .l: <3> if win==loc, length of window in sec or idx
# (splitpoint - .l : splitpoint + .l)
# .do_plot: <0> plots orig contour and linear stylization
# .plot: <{}> dict with plotting options; cf. discont_seg()
# OUT:
# d dict
# (s1: pre-bnd segment [i-l,i[,
# s2: post-bnd segment [i,i+l]
# sc: joint segment [i-l,i+l])
# dlt: delta
# res: reset
# ry1: s1, rmsd between joint vs pre-bnd fit
# ry2: s2, rmsd between joint vs post-bnd fit
# ryc: sc, rmsd between joint vs pre+post-bnd fit
# ry2e: s2: rmsd between pre-bnd fit extrapolated to s2 and post-bnd fit
# rx1: s1, rmsd between joint fit and pre-boundary x-values
# rx2: s2, rmsd between joint fit and post-boundary x-values
# rxc: sc, rmsd between joint fit and pre+post-boundary x-values
# rr1: s1, ratio rmse(joint_fit)/rmse(pre-bnd_fit)
# rr2: s2, ratio rmse(joint_fit)/rmse(post-bnd_fit)
# rrc: sc, ratio rmse(joint_fit)/rmse(pre+post-bnd_fit)
# ra1: c1-rate s1
# ra2: c1-rate s2
# dlt_ra: ra2-ra1
# s1_c3: cubic fitting coefs of s1
# s1_c2
# s1_c1
# s1_c0
# s2_c3: cubic fitting coefs of s2
# s2_c2
# s2_c1
# s2_c0
# dlt_c3: s2_c3-s1_c3
# dlt_c2: s2_c2-s1_c2
# dlt_c1: s2_c1-s1_c1
# dlt_c0: s2_c0-s1_c0
# eucl_c: euclDist(s1_c*,s2_c*)
# corr_c: corr(s1_c*,s2_c*)
# v1: variance in s1
# v2: variance in s2
# vc: variance in sc
# vr: variance ratio (mean(v1,v2))/vc
# dlt_v: v2-v1
# m1: mean in s1
# m2: mean in s2
# dlt_m: m2-m1
# p: pause length (in sec or idx depending on numcol(x);
# always 0, if t is empty or 1-dim)
# i in each list refers to discontinuity between x[i-1] and x[i]
# dimension of each list: if len(ts)==0: n-1 array (first x-element skipped)
# else: mx6; m is number of ts-elements in range of x[:,0],
# resp. in index range of x[1:-1]
## REMARKS:
# for all variables but corr_c and vr higher values indicate higher discontinuity
## variables:
# x1: original f0 contour for s1
# x2: original f0 contour for s2
# xc: original f0 contour for sc
# y1: line fitted on segment a
# y2: line fitted on segment b
# yc: line fitted on segments a+b
# yc1: yc part for x1
# yc2: yc part for x2
# ye: x1/y1-fitted line for x2
# cu1: cubic fit coefs of time-nrmd s1
# cu2: cubic fit coefs of time-nrmd s2
# yu1: polyval(cu1)
# yu2: polyval(cu2); yu1 and yu2 are cut to same length
def discont(x,ts=[],opt={}):
# time: first column or indices
if np.ndim(x)==1:
t = np.arange(0,len(x))
x = np.asarray(x)
else:
t = x[:,0]
x = x[:,1]
# tsi: index pairs in x for which to derive discont values
# [[infimum supremum]...] s1 right-aligned to infimum, s2 left-aligne to supremum
# for 1-dim ts both values are adjacent [[i-1, i]...]
# zp: zero pause True for 1-dim ts input, False for 2-dim
tsi, zp = discont_tsi(t,ts)
# opt init
opt = myl.opt_default(opt,{'win':'glob','l':3,'do_plot':False,
'plot': {}})
# output
d = discont_init()
# linear fits
# over time stamp pairs
for ii in tsi:
## delta
d['dlt'].append(x[ii[1]]-x[ii[0]])
## segments (x, y values of pre-, post, joint segments)
t1,t2,tc,x1,x2,xc,y1,y2,yc,yc1,yc2,ye,cu1,cu2,yu1,yu2 = discont_seg(t,x,ii,opt)
d = discont_feat(d,t1,t2,tc,x1,x2,xc,y1,y2,yc,yc1,yc2,ye,cu1,cu2,yu1,yu2,zp)
# to np.array
for x in d:
d[x] = np.asarray(d[x])
return d
# init discont dict
def discont_init():
return {"dlt": [],
"res": [],
"ry1": [],
"ry2": [],
"ryc": [],
"ry2e": [],
"rx1": [],
"rx2": [],
"rxc": [],
"rr1": [],
"rr2": [],
"rrc": [],
"ra1": [],
"ra2": [],
"dlt_ra": [],
"s1_c3": [],
"s1_c2": [],
"s1_c1": [],
"s1_c0": [],
"s2_c3": [],
"s2_c2": [],
"s2_c1": [],
"s2_c0": [],
"dlt_c3": [],
"dlt_c2": [],
"dlt_c1": [],
"dlt_c0": [],
"eucl_c": [],
"corr_c": [],
"eucl_y": [],
"corr_y": [],
"v1": [],
"v2": [],
"vc": [],
"vr": [],
"dlt_v": [],
"m1": [],
"m2": [],
"dlt_m": [],
"p": []}
# pre/post-boundary and joint segments
def discont_seg(t,x,ii,opt):
# preceding, following segment indices
i1, i2 = discont_idx(t,ii,opt)
#print(ii,"\n-> ", i1,"\n-> ", i2) #!v
#myl.stopgo() #!v
t1, t2, x1, x2 = t[i1], t[i2], x[i1], x[i2]
tc = np.concatenate((t1,t2))
xc = np.concatenate((x1,x2))
# normalized time (only needed for reported polycoefs, not
# for output lines
tn1 = myl.nrm_vec(t1,{'mtd': 'minmax',
'rng': [-1, 1]})
tn2 = myl.nrm_vec(t2,{'mtd': 'minmax',
'rng': [-1, 1]})
# linear fit coefs
c1 = myPolyfit(t1,x1,1)
c2 = myPolyfit(t2,x2,1)
cc = myPolyfit(tc,xc,1)
# cubic fit coefs (for later shape comparison)
cu1 = myPolyfit(tn1,x1,3)
cu2 = myPolyfit(tn2,x2,3)
yu1 = np.polyval(cu1,tn1)
yu2 = np.polyval(cu2,tn2)
# cut to same length (from boundary)
ld = len(yu1)-len(yu2)
if ld>0:
yu1=yu1[ld:len(yu1)]
elif ld<0:
yu2=yu2[0:ld]
# robust treatment
while len(yu2)<len(yu1):
yu2 = | np.append(yu2,yu2[-1]) | numpy.append |
#!/usr/bin/env python
import sys
sys.path.append('../neural_networks')
import numpy as np
import numpy.matlib
import pickle
import copy
from mpl_toolkits.mplot3d import Axes3D
from matplotlib import cm
import matplotlib.pyplot as plt
import os
import time
import copy
from gym_collision_avoidance.envs.policies.CADRL.scripts.neural_networks import neural_network_regr_multi as nn
from gym_collision_avoidance.envs.policies.CADRL.scripts.multi import pedData_processing_multi as pedData
from gym_collision_avoidance.envs.policies.CADRL.scripts.neural_networks.nn_training_param import NN_training_param
from gym_collision_avoidance.envs.policies.CADRL.scripts.neural_networks.multiagent_network_param import Multiagent_network_param
from gym_collision_avoidance.envs.policies.CADRL.scripts.multi import global_var as gb
# setting up global variables
COLLISION_COST = gb.COLLISION_COST
DIST_2_GOAL_THRES = gb.DIST_2_GOAL_THRES
GETTING_CLOSE_PENALTY = gb.GETTING_CLOSE_PENALTY
GETTING_CLOSE_RANGE = gb.GETTING_CLOSE_RANGE
EPS = gb.EPS
# terminal states
NON_TERMINAL = gb.NON_TERMINAL
COLLIDED = gb.COLLIDED
REACHED_GOAL = gb.REACHED_GOAL
# plotting colors
plt_colors = gb.plt_colors
GAMMA = gb.RL_gamma
DT_NORMAL = gb.RL_dt_normal
SMOOTH_COST = gb.SMOOTH_COST
# for 'rotate_constr'
TURNING_LIMIT = np.pi/6.0
# neural network
NN_ranges = gb.NN_ranges
# calculate the minimum distance between two line segments
# not counting the starting point
def find_dist_between_segs(x1, x2, y1, y2):
# x1.shape = (2,)
# x2.shape = (num_actions,2)
# y1.shape = (2,)
# y2.shape = (num_actions,2)
if_one_pt = False
if x2.shape == (2,):
x2 = x2.reshape((1,2))
y2 = y2.reshape((1,2))
if_one_pt = True
start_dist = np.linalg.norm(x1 - y1)
end_dist = np.linalg.norm(x2 - y2, axis=1)
critical_dist = end_dist.copy()
# start_dist * np.ones((num_pts,)) # initialize
# critical points (where d/dt = 0)
z_bar = (x2 - x1) - (y2 - y1) # shape = (num_actions, 2)
inds = np.where((np.linalg.norm(z_bar,axis=1)>0))[0]
t_bar = - np.sum((x1-y1) * z_bar[inds,:], axis=1) \
/ np.sum(z_bar[inds,:] * z_bar[inds,:], axis=1)
t_bar_rep = np.matlib.repmat(t_bar, 2, 1).transpose()
dist_bar = np.linalg.norm(x1 + (x2[inds,:]-x1) * t_bar_rep \
- y1 - (y2[inds,:]-y1) * t_bar_rep, axis=1)
inds_2 = np.where((t_bar > 0) & (t_bar < 1.0))
critical_dist[inds[inds_2]] = dist_bar[inds_2]
# end_dist = end_dist.clip(min=0, max=start_dist)
min_dist = np.amin( | np.vstack((end_dist, critical_dist)) | numpy.vstack |
'''
code in python3
Needs to be in the same directory as SONG (/.../song/)
The python part of song call songy is written in python2. Make the following change:
In /song/python/songy.py line 170:
range(0,N*(N+1)/2) --> list(range(0,int(N*(N+1)/2)))
'''
####################################################################################### import
import numpy as np
import os
import sys
from numba import numba,jit,prange,config
import h5py
import subprocess
#import matplotlib.pyplot as plt
import time
import params_relic as params
import importlib
importlib.reload(params)
sys.path.insert(0, params.song_path+'python') # path to python module of song
import songy as s
from classy import Class
import warnings
warnings.filterwarnings("ignore")
#######################################################################################
####################################################################################### SONG wrapper
def run_song(hkmax,hkmin):
''' Call this function in song repository:
It will create the ini and pre files from the global parameters and run song
'''
ini_file=r"""output = delta_cdm_bk
T_cmb = 2.7255
N_eff = 3.046
reio_parametrization = reio_none
tau_reio = 0.0952
k_pivot = 0.05
A_s = {}
n_s = {}
YHe = 0.2477055
gauge = newtonian
output_single_precision = yes
output_class_perturbations = yes
background_verbose = 1
thermodynamics_verbose = 1
primordial_verbose = 1
spectra_verbose = 1
nonlinear_verbose = 1
lensing_verbose = 1
output_verbose = 1
perturbations_verbose = 1
perturbations2_verbose = 2
transfer_verbose = 1
transfer2_verbose = 1
bessels_verbose = 1
bessels2_verbose = 1
bispectra_verbose = 1
fisher_verbose = 1
format = camb
write parameters = yes
h={}
omega_b={}
omega_cdm={}
Omega_k={}
primordial_local_fnl_phi={}
z_out={}"""
pre_file=r"""sources2_k3_sampling = {}
k3_size = {}
k_min_tau0 = 0.05
k_max_tau0_over_l_max = 2.8
k_step_sub =0.1
k_logstep_super = 1.2
k_step_super = 0.025
k_step_transition = 0.2
quadsources_time_interpolation = cubic
sources_time_interpolation = linear
sources_k3_interpolation = cubic #linear
tau_start_evolution_song = 0
start_small_k_at_tau_c_over_tau_h_song = 0.001
start_large_k_at_tau_h_over_tau_k_song = 0.04
sources2_k_sampling = {}
k_min_custom_song = {}
k_max_custom_song = {}
k_size_custom_song = {}"""
ini="./matter_{}.ini".format(params.key_song)
pre="./matter_{}.pre".format(params.key_song)
file = open(ini, "w")
file.write(ini_file.format(params.A_s,params.n_s,params.h,params.omega_b*params.h**2,
params.omega_cdm*params.h**2,params.omega_k,params.fnl,params.z))
file.close()
file = open(pre, "w")
if params.interp in ['nearest','lin']:
file.write(pre_file.format('lin',int(params.N_song_k3),'lin',hkmin,hkmax,int(params.N_song_k12)))
else :
file.write(pre_file.format('smart',int(params.N_song_k3),'lin',hkmin,hkmax,int(params.N_song_k12)))
file.close()
os.system("./song "+ini+' '+pre)
os.system("mv "+params.song_path+'output/sources_song_z000.dat '+params.song_path+"output/sources_song_z000_{}.dat".format(params.key_song))
def song_output(hkmax,hkmin,force):
''' Once song has run, this function load the output by using
songy (see song/python/songy.py) routine FixedTauFile.
It return the needed output:
-song.get_source(b'delta_cdm') = song :second order kernel mulptiply by two transfer functions i.e.
K(k1,k2,k3)*T_delta(k1)*T_delta(k2) in the expression
int_k1_k2 (K(k1,k2,k3) T_delta(k1) T_delta(k2) zeta(k1) zeta(k2))
-song.tau conformal time corresponding to the redshift. It is needed to get the velocity potential (dK/dtau)
-song.k1, song.k2, song.k3: grie of mode
-song.flatidx: Song output shape is weird ! see song/python/songy.py
-dk12,dk3: step of the grid
'''
filename='sources_song_z000_{}.dat'.format(params.key_song)
if not os.path.isfile(params.song_path+'output/'+filename) :
print(params.song_path+'output/{} not found'.format(filename))
print('===========================================================================================')
run_song(hkmax,hkmin)
elif force:
print('force running SONG')
#os.system("rm "+params.song_path+'output/{} not found'.format(filename))
print('===========================================================================================')
run_song(hkmax,hkmin)
print('===========================================================================================')
print('loading '+params.song_path+'output/{}'.format(filename))
song=s.FixedTauFile(params.song_path+'output/'+filename)
if len(params.source)==0 and (len(song.k1)!=params.N_song_k12 \
or np.min(song.k1)!=hkmin or np.max(song.k1)!=hkmax):
print('The output '+params.song_path+'output/ found does not have the right shape or hkmax/hkmin')
print('SONG N_song_k1={}, you ask {}'.format(len(song.k1),params.N_song_k12))
print('SONG N_song_k3={}, you ask {}'.format(len(song.k3[0]),params.N_song_k3))
print('SONG hkmin={}, you ask {}'.format(np.min(song.k1),hkmin))
print('SONG hkmax={}, you ask {}'.format(np.max(song.k1),hkmax))
print('===========================================================================================')
dk12=song.k1[1]-song.k1[0]
k3=np.concatenate(song.k3)
if params.interp in ['nearest','lin']:
dk3=np.diff(song.k3)[:,0]
else :
dk3=np.array([],dtype=np.float32)
for k1_ind,k1 in enumerate(song.k1):
for k2_ind,k2 in enumerate(song.k2[k1_ind]):
k3_ind=song.flatidx[k1_ind,k2_ind]
dk3=np.append(dk3,song.k3[k3_ind][2]-song.k3[k3_ind][1])
k3sizes_cumsum = np.zeros(len(song.k3sizes_cumsum)+2,dtype=int)
k3sizes_cumsum[1:-1]=song.k3sizes_cumsum
k3sizes_cumsum[-1] =len(k3)
return np.concatenate(song.get_source(b'delta_cdm')),song.tau,song.k1,np.concatenate(song.k2),k3,song.flatidx,dk12,dk3,k3sizes_cumsum
def song_main(hkmax,hkmin,force=False):
'''Main function for SONG '''
source,tau,k1,k2,k3,flatidx,dk12,dk3,k3sizes_cumsum=song_output(hkmax,hkmin,force)
return source,k1/params.h,k2/params.h,k3/params.h,flatidx,dk12/params.h,dk3/params.h,k3sizes_cumsum
#######################################################################################
####################################################################################### first order transfert fct
def trans(clss=True):
'''
This function returns the Primordial power spectrum, the transfer functions of delta_cdm and phi, and
derivative of the last transfer function.
-Primordial power spectrum: Primordial = A_s(k/k_0)**(ns-1) / (k**3/(2*np.pi**2)).
-delta_cdm transfer function: tr_delta_cdm(k,z)*zeta(k)=delta_cdm(k,z)
-potential transfer function: tr_phi(z,k)*zeta(k)=phi(z,k)
'''
if not clss:
song=s.FixedTauFile(params.song_path+"output/sources_song_z000_{}.dat".format(params.key_song))
song.first_order_sources['k']/=params.h
tr_delta_cdm=song.first_order_sources[b'delta_cdm']
tr_delta_b =song.first_order_sources[b'delta_b']
song.first_order_sources[b'delta_m']= (params.omega_b/(params.omega_b+params.omega_cdm)*tr_delta_b \
+ params.omega_cdm/(params.omega_b+params.omega_cdm)*tr_delta_cdm)
song.first_order_sources[b'phi']= song.first_order_sources[b'delta_m']*(-3*params.H**2/2)/(song.first_order_sources['k']**2+3*params.H**2)
dk=np.diff(np.append(song.first_order_sources['k'],song.first_order_sources['k'][-1]*2-song.first_order_sources['k'][-2]))
dT=np.diff(np.append(song.first_order_sources[b'phi'],song.first_order_sources[b'phi'][-1]*2-song.first_order_sources[b'phi'][-2]))
song.first_order_sources[b'dTdk'] = dT/dk
return song.first_order_sources
else:
clss = Class()
clss.set({'gauge': 'newtonian', 'h':params.h,'omega_b':params.omega_b*params.h**2, 'omega_cdm': params.omega_cdm*params.h**2,
'output':'dTk,vTk','z_pk':1000})
clss.compute()
#clss=np.loadtxt('class_tk.dat')
tr=clss.get_transfer(z=params.z)
tr['k'] = tr.pop('k (h/Mpc)')
dk=np.diff(np.append(tr['k'],tr['k'][-1]*2-tr['k'][-2]))
dT=np.diff(np.append(tr['phi'],tr['phi'][-1]*2-tr['phi'][-2]))
tr['dTdk'] = dT/dk
tr['d_m'] = (params.omega_cdm*tr['d_cdm'] + params.omega_b*tr['d_b'])/(params.omega_b+params.omega_cdm)
tr['t_m'] = (params.omega_cdm*tr['t_cdm'] + params.omega_b*tr['t_b'])/(params.omega_b+params.omega_cdm)
tr['v_m'] = -tr['t_m']/tr['k']**2/params.h
return tr
#clas = np.loadtxt('gevolution-1.2/class_tk.dat')
#k=clas[:,0]
#dk=np.diff(np.append(k,k[-1]*2-k[-2]))
#dT=np.diff(np.append(clas[:,6],clas[:,6][-1]*2-clas[:,6][-2]))
#xi = (clas[:,3] - 3*clas[:,6])/k**2
#first_order_sources={'k':clas[:,0],b'delta_cdm':clas[:,3],b'phi':clas[:,6],b'dTdk':dT/dk,b'xi':xi,b'v':-2*clas[:,6]/3/params.H}
#return first_order_sources
def primordial(k):
return params.A_s*(k/(params.k_pivot/params.h))**(params.n_s-1)/k**3*2*np.pi**2
def powerspectrum(k,delta_cdm):
prim = primordial(k)
T=np.interp(k,delta_cdm[0],delta_cdm[1])
return prim*T**2
#######################################################################################
####################################################################################### mode grid
def k_distrib(k_min,N,klbd,absolute=True):
''' Inputs:
-k_min: Minimum mode to be consider. Setting k_min automatically set the step dk=k_min
because in order for k-k1 to be always on the grid k1, we need to include 0 and to have a
constant step dk.
-N size of the grid. In order to include 0. If it is not odd, we set N+=1
(the final ifft return the right even N grid)
-klbd: k_lambda:
if absolute==True:
the function will return the closest in the grid
else:
klbd is considered as being a ratio, return kL=k[N//2:][int(klbd*N//2)]
output:
klin_concat,kmax,N,dk,klambda
-k: list of k coordinate
-kmax: largest mode to be considered
-N like input
-k_min in float32
-kL: actual k_lambda
'''
if N%2==0:
print('N has to be odd to include 0: N+=1')
N+=1
params.N=N
k=np.linspace(-(N//2)*k_min,N//2*k_min,N,dtype=np.float32)
if absolute:
idxL=np.where(np.abs(klbd-k[N//2:])==np.min(np.abs(klbd-k[N//2:])))[0]
kL=k[N//2:][idxL][0]
else:
kL=k[N//2:][int(klbd*N//2)]
return k,np.float32(N//2*k_min),N,np.float32(k_min),kL
def W(grid,field):
if params.coarse_graine:
l = 2*np.pi/params.kmin/(N-1)
k1,k2,k3=grid[0][N//2:]*l/2/np.pi,grid[1]*l/2/np.pi,grid[2]*l/2/np.pi
W=(np.sinc(k1)*np.sinc(k2)*np.sinc(k3))
return W*field
else:
return field
def ifft(field):
'''This function performs the inverse Fourier transform. It uses the numpy function irfftn.
The input array has first to be re-organized.
In this code, the array filed is organized this way field=(z=0:Nyquist,y=-Nyquist:0:Nyquist,x=-Nyquist:0:Nyquist)
which means shape(field)=(N//2+1,N,N) (Reminder: in the code, N is always odd while N_input is even, N=N_input+1).
The python modules takes as an input an array organized as follow:
field=(x=0:Nyquist-1:-1:-Nyquist, y=0:Nyquist-1:-1:-Nyquist, z=0:Nyquist) which means shape(field)=(N//2+1,N-1,N-1)
Note that -Nyquist=+Nyquist since N_input is even.
'''
field[0,params.N//2,:params.N//2]=np.conjugate(field[0,params.N//2,params.N//2+1:][::-1])
field[0,:params.N//2,:] =np.conjugate(field[0,params.N//2+1:,:][::-1,::-1])
return np.fft.irfftn(np.fft.ifftshift(field.transpose()[:-1,:-1],axes=(0,1)),(params.N-1,params.N-1,params.N-1) )
# Equivalent to :
#new_field=np.zeros((N//2+1,N-1,N-1),dtype=np.complex64)
#new_field[:,N//2+1:,N//2+1:]=field[:,1:N//2,1:N//2]
#new_field[:,:N//2+1,:N//2+1]=field[:,N//2:,N//2:]
#new_field[:,:N//2+1,N//2+1:]=field[:,N//2:,1:N//2]
#new_field[:,N//2+1:,:N//2+1]=field[:,1:N//2,N//2:]
#return np.fft.irfftn(new_field.transpose(),(N-1,N-1,N-1))
def fft(f_field):
field=np.zeros((params.N//2+1,params.N,params.N),dtype=np.complex)
field[:,:-1,:-1]=np.fft.fftshift(np.fft.rfftn(f_field),axes=(0,1)).transpose()
field[:,-1],field[:,:,-1]=field[:,0],field[:,:,0]
return field
def read_h5(filename,dtype=np.float32):
if len(filename)==0:
f1 = h5py.File(params.output_path+params.key+'_{}{}_{}.h5'.format(field,order,real), 'r')
dat1=np.array(f1['data'],dtype=dtype)
return dat1
else:
f1 = h5py.File(filename, 'r')
dat1=np.array(f1['data'],dtype=dtype)
return dat1
def save_h5(filename,f):
hf = h5py.File(filename, 'w') # Save in h5 format
hf.create_dataset('data', data=f) #
hf.close()
#######################################################################################
####################################################################################### First order stochastic potential
def zeta_realisation(k_grid):
'''
Generate the linear curvature perturbation field (N//2+1,N,N) at redshift z in half of Fourier space.
The reality condition ensures the other half.
The computation is in 3 steps:
-compute the modulus of k in the grid (k)
-interpolate transfer function and primordial power spectrum tr=T(k) and P=P(k)
-randomly draw the real/imaginary part of the primordial curvature zeta following a Gaussian PDF with std=sqrt(P(k)/2)
'''
def random (k):
with np.errstate(divide='ignore'):
P=primordial(k)
zeta_ini_Re=np.random.normal(0,(params.N-1)**3*np.sqrt(P/2*params.kmin**3/(2*np.pi)**3),k.shape) #https://nms.kcl.ac.uk/eugene.lim/AdvCos/lecture2.pdf
zeta_ini_Im=np.random.normal(0,(params.N-1)**3*np.sqrt(P/2*params.kmin**3/(2*np.pi)**3),k.shape)
# equivalent :
#rho = np.random.normal(0,(N-1)**3*np.sqrt(P*params.kmin**3/(2*np.pi)**3),k.shape)
#phase = np.random.uniform(0,2*np.pi,k.shape)
#zeta_ini_Re=rho*np.cos(phase)
#zeta_ini_Im=rho*np.sin(phase)
return np.complex64(zeta_ini_Re+zeta_ini_Im*1j)
k=np.sqrt(k_grid[0][params.N//2:]**2+k_grid[1]**2+k_grid[2]**2)
zeta=random(k)
zeta[np.isnan(zeta)]=0
# Even N in real space give a N+1 FFT grid with symmetries !
zeta[1:-1,-1,1:-1]=zeta[1:-1,0,1:-1] #z&x Plan
zeta[1:-1,1:-1,-1]=zeta[1:-1,1:-1,0] #z&y Plan
# Zmax plan Surfaces
zeta[-1,1:params.N//2,1:params.N//2] =np.conjugate(zeta[-1,params.N//2+1:-1,params.N//2+1:-1][::-1,::-1])
zeta[-1,params.N//2+1:-1,1:params.N//2]=np.conjugate(zeta[-1,1:params.N//2,params.N//2+1:-1][::-1,::-1])
# Zmax plan lines X constant and Y constant
zeta[-1,params.N//2,1:params.N//2]=np.conjugate(zeta[-1,params.N//2,params.N//2+1:-1][::-1])
zeta[-1,1:params.N//2,params.N//2]=np.conjugate(zeta[-1,params.N//2+1:-1,params.N//2][::-1])
r=zeta[:-1,-1,0] # All edges (x=0,y=0),(x=0,y=-1),(x=-1,y=0) and (x=-1,y=-1) are equal
zeta[:-1,-1,-1],zeta[:-1,0,0],zeta[:-1,0,-1]=r,r,r
r=zeta[-1,0,1:params.N//2] # Zmax edges sym with Y constant
zeta[-1,-1,1:params.N//2],zeta[-1,0,params.N//2+1:-1],zeta[-1,-1,params.N//2+1:-1]=r,np.conjugate(r[::-1]),np.conjugate(r[::-1])
r=zeta[-1,1:params.N//2,0]# Zmax edges sym with X constant
zeta[-1,1:params.N//2,-1],zeta[-1,params.N//2+1:-1,0],zeta[-1,params.N//2+1:-1,-1]=r,np.conjugate(r[::-1]),np.conjugate(r[::-1])
r=zeta[-1,0,0].real # Zmax plan corners all equal and real
zeta[-1,0,0],zeta[-1,-1,0],zeta[-1,-1,-1],zeta[-1,0,-1]=r,r,r,r
r=zeta[-1,params.N//2,0].real # Zmax plan: middle point of edges
zeta[-1,params.N//2,0],zeta[-1,params.N//2,-1]=r,r
r=zeta[-1,0,params.N//2].real
zeta[-1,0,params.N//2],zeta[-1,-1,params.N//2]=r,r
# Zmax middle point real
zeta[-1,params.N//2,params.N//2]=zeta[-1,params.N//2,params.N//2].real
# z=0 Plan
zeta[0,params.N//2,-1]=zeta[0,params.N//2,-1].real
zeta[0,-1,params.N//2]=zeta[0,-1,params.N//2].real
zeta[0,params.N//2+1:-1,-1]=zeta[0,params.N//2+1:-1,0]
zeta[0,-1,params.N//2+1:-1]=np.conjugate(zeta[0,-1,1:params.N//2][::-1])
r=zeta[0,-1,0].real
zeta[0,-1,0],zeta[0,-1,-1]=r,r
zeta[0,:params.N//2] =np.conjugate(zeta[0,params.N//2+1:][::-1,::-1])
zeta [0,params.N//2,:params.N//2]=np.conjugate(zeta [0,params.N//2,params.N//2+1:][::-1])
return zeta
#######################################################################################
####################################################################################### From initial potential to displacement field
def order1(k,transfer):
'''Compute the first order quantities X1/delta1 at a given k.
X being: potential phi1 (==psi1), displacement field xi1, velocity v1
See equation (36) of the note.
'''
d1=np.interp(k,transfer['k'],transfer['d_m'])
phi1= np.interp(k,transfer['k'],transfer['phi'])
psi1= np.interp(k,transfer['k'],transfer['psi'])
xi1 = (d1-3*phi1)/k**2
v1 = np.interp(k,transfer['k'],transfer['v_m'])
return phi1,psi1,xi1,v1,d1
def song2xi(song,k1,k2,k3,flatidx,k3sizes_cumsum,transfer):
d2 =np.zeros_like(song)
xi2 =np.zeros_like(song)
phi2 =np.zeros_like(song)
phi2p=np.zeros_like(song)
chi2 =np.zeros_like(song)
v2 =np.zeros_like(song)
q2 =np.zeros_like(song)
phiLxi_term=np.zeros_like(song)
v_term =np.zeros_like(song)
phi_term =np.zeros_like(song)
xi_term =np.zeros_like(song)
xi2_term =np.zeros_like(song)
phi2_term =np.zeros_like(song)
for ind1 in prange(len(k1)):
if ind1%10==0: print(ind1)
kk1=k1[ind1]
for ind2 in prange(len(k1[:ind1+1])):
kk2=k1[ind2]
iii=k3sizes_cumsum[flatidx[ind1,ind2]]
jjj=k3sizes_cumsum[flatidx[ind1,ind2]+1]
kk3=k3[iii: jjj]
phi1_k1,psi1_k1,xi1_k1,v1_k1,d1_k1= order1(kk1,transfer)
phi1_k2,psi1_k2,xi1_k2,v1_k2,d1_k2= order1(kk2,transfer)
chi1_k1,chi1_k2=phi1_k1-psi1_k1,phi1_k2-psi1_k2
k1dk2=(kk3**2-kk1**2-kk2**2)/2
d2[iii: jjj] =- k1dk2*v1_k1*v1_k2 +song[iii:jjj]
chi2[iii: jjj]=(3*(kk1**2+k1dk2)*(kk2**2+k1dk2)-kk3**2*k1dk2)\
*(3*params.H**2*params.Om*v1_k1*v1_k2/2+psi1_k1*psi1_k2-chi1_k1*chi1_k2/2)/kk3**4
phi2p[iii: jjj]=(-3*params.H**2*params.Om*k1dk2*v1_k1*v1_k2-2*kk3**2*chi2[iii: jjj]\
+k1dk2*phi1_k1*phi1_k2)/21/params.H
phi2[iii: jjj] = ((3*params.H**2/2)*(-params.Om*d2[iii: jjj]+2*chi2[iii: jjj] \
+ 2*psi1_k1*psi1_k2 -2*phi2p[iii:jjj]/params.H ) \
+(k1dk2/2-(kk1**2+kk2**2))*phi1_k1*phi1_k2) /(3*params.H**2+kk3**2)
xi2[iii:jjj] = 1/kk3**2*(d2[iii:jjj]-3*phi2[iii:jjj]-3./2*\
(kk2**2*phi1_k1*xi1_k2+kk1**2*phi1_k2*xi1_k1)\
+1./2*k1dk2*v1_k1*v1_k2-9./2*phi1_k1*phi1_k2-1./2*(kk1**2*kk2**2-k1dk2**2)*xi1_k1*xi1_k2)
v2[iii:jjj]= (params.H*psi1_k2*psi1_k1/2-params.H*(phi2-chi2)[iii:jjj]-phi2p[iii:jjj] \
-3/2/kk3**2*params.H**2*params.Om*((k1dk2+kk2**2)* (d1_k1-2*phi1_k1-psi1_k1)*v1_k2 \
+ (k1dk2+kk1**2)* (d1_k2-2*phi1_k2-psi1_k2)*v1_k1)/2) /(3/2*params.H**2*params.Om)
q2[iii:jjj]=v2[iii:jjj]-((k1dk2+kk1**2)*(2*phi1_k1+psi1_k1)*v1_k2+(k1dk2+kk2**2)*(2*phi1_k2+psi1_k2)*v1_k1)/2/kk3**2
phiLxi_term[iii:jjj]=3./2*(kk2**2*phi1_k1*xi1_k2+kk1**2*phi1_k2*xi1_k1)
v_term [iii:jjj]=-k1dk2*v1_k1*v1_k2
phi_term [iii:jjj]=9./2*phi1_k1*phi1_k2
xi_term [iii:jjj]=1./2*(kk1**2*kk2**2-k1dk2**2)*xi1_k1*xi1_k2
xi2_term [iii:jjj]=kk3**2*xi2[iii:jjj]
phi2_term [iii:jjj]=3*phi2[iii:jjj]
delta_37=phi2_term+xi2_term +phiLxi_term+v_term +phi_term+xi_term
return {'delta_song':song,'delta': d2,'xi':xi2,'phi':phi2,'phip':phi2p,'chi':chi2, 'v':v2, 'q':q2, 'phiLxi_term':phiLxi_term,\
'v_term':v_term,'phi_term':phi_term,'xi_term':xi_term,'delta_37':delta_37,'xi2_term':xi2_term,'phi2_term':phi2_term}
def eq37():
def plot(f,t,c,linewidth=2):
integral=np.loadtxt(params.output_path+'ps_d3+_N65_kmin1.0e-03_kmax3.2e-02_kl5.0e-03_z100_{}2_0.h5.dat'.format(t))
Pk =PKL.Pk(np.float32(f), BoxSize, axis, MAS, threads, verbose)
plt.loglog(integral[:,0],integral[:,1],c,linewidth=linewidth,label='d3p {}'.format(t))
plt.loglog(Pk.k3D, Pk.Pk[:,0],c,linewidth=linewidth,linestyle='--',label='real {}'.format(t))
def grad(tableau):
step=2*np.pi/params.kmin/(params.N-1)
new=np.zeros((params.N+1,params.N+1,params.N+1))
new[1:-1,1:-1,1:-1]=tableau
new[0,1:-1,1:-1]=tableau[-1]
new[1:-1,0,1:-1]=tableau[:,-1]
new[1:-1,1:-1,0]=tableau[:,:,-1]
new[-1,1:-1,1:-1]=tableau[0]
new[1:-1,-1,1:-1]=tableau[:,0]
new[1:-1,1:-1,-1]=tableau[:,:,0]
interm=np.gradient(new,step)
interm=np.array([interm[0][1:-1,1:-1,1:-1],interm[1][1:-1,1:-1,1:-1],interm[2][1:-1,1:-1,1:-1]])
return interm
import Pk_library as PKL
MAS = 'None'
axis = 0
BoxSize = 2*np.pi/params.kmin
step = BoxSize/params.N
threads = 4
verbose = False
x={}
field = ['v','phi','xi']
key=params.output_path+'d3+_N65_kmin1.0e-03_kmax3.2e-02_kl5.0e-03_z100'
for f in field:
x[f]=read_h5(key+'_{}1_{}.h5'.format(f,0))
for f in ['phi','xi','delta']:
x[f+'2']=read_h5(key+'_{}2_{}.h5'.format(f,0))
x['gev_e1']=read_h5('gevolution-1.2/output/lcdm_snape1000_T00.h5')*1e6
x['gev_1'] =read_h5('gevolution-1.2/output/lcdm_snap1000_T00.h5')
x['gev_2'] =read_h5('gevolution-1.2/output/lcdm_snap2000_T00.h5')
term = ['phi_term','phiLxi_term','v_term','xi_term','xi2_term','phi2_term','delta_37','gev','delta']
color= ['red','cyan','orange','violet','green','yellow','blue']
for t in term:
if t=='phi_term':
phi= 9/2*x['phi']**2
plot(phi,t,color[0])
elif t=='phiLxi_term':
Gxi=grad(x['xi'])
Lxi=grad(Gxi[0])[0] + grad(Gxi[1])[1] + grad(Gxi[2])[2]
phiLxi=-3*x['phi']*Lxi
plot(phiLxi,t,color[1])
elif t=='v_term':
Gv=grad(x['v'])
v=1/2*(Gv[0]**2+Gv[1]**2+Gv[2]**2)
plot(v,t,color[2])
elif t=='xi_term':
Gxi=grad(x['xi'])
Lxi=grad(Gxi[0])[0] + grad(Gxi[1])[1] + grad(Gxi[2])[2]
XGxi,YGxi,ZGxi=grad(Gxi[0]),\
grad(Gxi[1]),\
grad(Gxi[2])
XGxi=XGxi[0]**2+XGxi[1]**2+XGxi[2]**2
YGxi=YGxi[0]**2+YGxi[1]**2+YGxi[2]**2
ZGxi=ZGxi[0]**2+ZGxi[1]**2+ZGxi[2]**2
xi=-1/2*(Lxi**2-XGxi-YGxi-ZGxi)
plot(xi,t,color[3])
elif t=='xi2_term':
Gxi=grad(x['xi2'])
Lxi=grad(Gxi[0])[0] + grad(Gxi[1])[1] + grad(Gxi[2])[2]
xi2=-Lxi
plot(xi2,t,color[4])
elif t=='phi2_term':
phi2=3*x['phi2']
plot(phi2,t,color[5])
elif t=='delta_37':
delta2=phi2+xi2+xi+v+phiLxi+phi
plot(delta2,t,color[6],5)
elif t=='delta':
delta2=x['delta2']
integral=np.loadtxt(params.output_path+'ps_d3+_N65_kmin1.0e-03_kmax3.2e-02_kl5.0e-03_z100_delta2_0.h5.dat')
plt.loglog(integral[:,0],integral[:,1],'black',linewidth=5,linestyle='--',label='delta2 d3p')
elif t=='gev':
rho=0.31204608
g_delta1 =x['gev_e1']/rho -1
g_delta1Q=x['gev_1']/rho -1
g_delta2 =x['gev_2']/rho -1
g_delta2=g_delta2+g_delta1Q-g_delta1
Pk =PKL.Pk(g_delta2, BoxSize, axis, MAS, threads, verbose)
plt.loglog(Pk.k3D, Pk.Pk[:,0],'black',linewidth=5,label='Gevolution')
else:
break
plt.legend()
plt.xlabel('k')
plt.ylabel('P(k)')
plt.show()
def zeta2fields(field,zeta,k_grid,tr=0):
'''Compute the whole first order stocastic field from the first order density
computed in zeta_realisation().
field can be 'phi','psi','xi','v','chi'.
'''
k=np.sqrt(k_grid[0][params.N//2:]**2+k_grid[1]**2+k_grid[2]**2)
if field in ['delta','delta_song']:
tr_d=np.interp(k,tr['k'],tr['d_m'])
return zeta*tr_d
elif field=='xi':
tr_d=np.interp(k,tr['k'],tr['d_m'])
tr_p= | np.interp(k,tr['k'],tr['phi']) | numpy.interp |
import numpy as np
def merge_data(data, outcomes, repetitions, metric):
for key in outcomes.keys():
scenario_avgs = []
for scenario in range(len(outcomes[key])):
results = []
for replication in range(repetitions):
if metric == 1:
results.append(np.mean(outcomes[key][scenario][replication]))
if metric == 2:
results.append(min(outcomes[key][scenario][replication]))
if metric == 3:
results.append(max(outcomes[key][scenario][replication]))
if metric == 4:
results.append(outcomes[key][scenario][replication][-1])
scenario_avgs.append( | np.mean(results) | numpy.mean |
import io
import os
import pickle
import numpy as np
import torch
from PIL import Image
from learn2learn.vision.datasets import TieredImagenet
class TieredImageNet(TieredImagenet):
def __init__(self, root, partition="train", mode='coarse', transform=None, target_transform=None, download=False):
self.root = root
self.transform = transform
self.target_transform = target_transform
self.mode = mode
tiered_imaganet_path = os.path.join(self.root, 'tiered-imagenet')
short_partition = 'val' if partition == 'validation' else partition
labels_path = os.path.join(tiered_imaganet_path, short_partition + '_labels.pkl')
images_path = os.path.join(tiered_imaganet_path, short_partition + '_images_png.pkl')
with open(images_path, 'rb') as images_file:
self.images = pickle.load(images_file)
with open(labels_path, 'rb') as labels_file:
self.labels = pickle.load(labels_file)
self.coarse2fine = {}
for c, f in zip(self.labels['label_general'], self.labels['label_specific']):
if c in self.coarse2fine:
if f not in self.coarse2fine[c]:
self.coarse2fine[c].append(f)
else:
self.coarse2fine[c] = [f]
if self.mode == 'coarse':
self.labels = self.labels['label_general']
elif self.mode == 'fine':
self.labels = self.labels['label_specific']
else:
raise NotImplementedError
@property
def num_classes(self):
return len(np.unique(self.labels))
class MetaTieredImageNet(TieredImageNet):
def __init__(self, args, partition='train', train_transform=None, test_transform=None, fix_seed=True):
super(MetaTieredImageNet, self).__init__(
root=args.data_root,
partition=partition,
mode=args.mode)
self.fix_seed = fix_seed
self.n_ways = args.n_ways
self.n_shots = args.n_shots
self.n_queries = args.n_queries
self.n_test_runs = args.n_test_runs
self.n_aug_support_samples = args.n_aug_support_samples
self.train_transform = train_transform
self.test_transform = test_transform
self.data = {}
for idx in range(len(self.images)):
if self.labels[idx] not in self.data:
self.data[self.labels[idx]] = []
self.data[self.labels[idx]].append(self.images[idx])
self.classes = list(self.data.keys())
def __getitem__(self, item):
if self.fix_seed:
np.random.seed(item)
if len(self.classes) > self.n_ways:
cls_sampled = np.random.choice(self.classes, self.n_ways, False)
else:
cls_sampled = np.array(self.classes) if self.classes is not np.ndarray else self.classes
# cls_sampled = np.random.choice(self.classes, self.n_ways, False)
support_xs = []
support_ys = []
query_xs = []
query_ys = []
for idx, cls in enumerate(cls_sampled):
imgs = np.asarray(self.data[cls])
support_xs_ids_sampled = np.random.choice(range(imgs.shape[0]), self.n_shots, False)
support_xs.append(imgs[support_xs_ids_sampled])
support_ys.append([idx] * self.n_shots)
query_xs_ids = np.setxor1d(np.arange(imgs.shape[0]), support_xs_ids_sampled)
query_xs_ids = np.random.choice(query_xs_ids, self.n_queries, False)
query_xs.append(imgs[query_xs_ids])
query_ys.append([idx] * query_xs_ids.shape[0])
support_xs, support_ys, query_xs, query_ys = np.array(support_xs), np.array(support_ys), np.array(
query_xs), np.array(query_ys)
num_ways, n_queries_per_way = query_xs.shape
query_xs = query_xs.reshape((num_ways * n_queries_per_way))
query_ys = query_ys.reshape((num_ways * n_queries_per_way))
support_xs = support_xs.reshape(-1)
if self.n_aug_support_samples > 1:
support_xs = np.tile(support_xs, (self.n_aug_support_samples))
support_ys = np.tile(support_ys.reshape(-1), self.n_aug_support_samples)
support_xs = np.split(support_xs, support_xs.shape[0], axis=0)
query_xs = query_xs.reshape((-1))
query_xs = np.split(query_xs, query_xs.shape[0], axis=0)
support_xs = torch.stack(list(map(lambda x: self.train_transform(self._load_png_byte(x[0])), support_xs)))
query_xs = torch.stack(list(map(lambda x: self.test_transform(self._load_png_byte(x[0])), query_xs)))
return support_xs, support_ys, query_xs, query_ys
def _load_png_byte(self, bytes):
return Image.open(io.BytesIO(bytes))
def __len__(self):
return self.n_test_runs
class MetaFGTieredImageNet(MetaTieredImageNet):
def __getitem__(self, item):
if self.fix_seed:
np.random.seed(item)
coarse_sampled = np.random.choice(list(self.coarse2fine.keys()), 1, False)[0]
cls_sampled = np.random.choice(self.coarse2fine[coarse_sampled], self.n_ways, False)
support_xs = []
support_ys = []
query_xs = []
query_ys = []
for idx, cls in enumerate(cls_sampled):
imgs = np.asarray(self.data[cls])
support_xs_ids_sampled = np.random.choice(range(imgs.shape[0]), self.n_shots, False)
support_xs.append(imgs[support_xs_ids_sampled])
support_ys.append([idx] * self.n_shots)
query_xs_ids = np.setxor1d(np.arange(imgs.shape[0]), support_xs_ids_sampled)
query_xs_ids = | np.random.choice(query_xs_ids, self.n_queries, False) | numpy.random.choice |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 22 09:58:49 2021
@author: cghiaus
Import functions for EPW data files.
Adapted from
https://github.com/pvlib/pvlib-python/blob/master/pvlib/iotools/epw.py
"""
import numpy as np
import pandas as pd
import sys
def tc2ss(A, G, b, C, f, y):
"""
Parameters
----------
A : TYPE np.array
adjancecy (TC connection ) matrix:
#rows = #heat flow rates; #cols = #temperature nodes
G : TYPE np.array
square diagonal matrix of conductances
#rows = #heat flow rates (or resistances)
b : TYPE np.array
vector indicating the presence of temperature sources on branches:
1 for branches with temperature sources, otherwise 0
C : TYPE np.array
square diagonal matrix of capacities
f : TYPE np.array
vector indicating the presence of flow sources in nodes:
1 for nodes with heat sources, otherwise 0
y : TYPE np.array
vector indicating the temperatures in the outputs:
1 for output nodes, otherwise 0
Returns
-------
As state matrix in state equation
Bs input matrix in state equation
Cs output matrix in observation equation
Ds input matrix in observation equation
Idx{1} nodes with capacities
{2} branches with temp. sources
{3} nodes with flow sources
{4} nodes output temperatures
"""
rC = np.nonzero(np.diag(C))[0] # rows of non-zero elements in C
r0 = np.nonzero(np.diag(C) == 0)[0] # rows of zero elements in C
# idx_nonzero = {'C': rC,
# 'b': np.nonzero(b)[0],
# 'f': np.nonzero(f)[0],
# 'y': np.nonzero(y)[0]}
if rC.size == 0:
sys.exit('Error in dm4bem.tc2ss: capacity C matrix is zero')
CC = np.diag(C[np.nonzero(C)])
K = -A.T @ G @ A
K11 = K[r0, :][:, r0]
K12 = K[r0, :][:, rC]
K21 = K[rC, :][:, r0]
K22 = K[rC, :][:, rC]
Kb = A.T @ G
Kb1 = Kb[r0, :]
Kb2 = Kb[rC, :]
# State equation
As = np.linalg.inv(CC) @ (
-K21 @ np.linalg.inv(K11) @ K12 + K22)
Bs = np.linalg.inv(CC) @ np.hstack([
-K21 @ np.linalg.inv(K11) @ Kb1 + Kb2,
-K21 @ np.linalg.inv(K11),
np.eye(CC.shape[0])])
# re-arragne B s in order of f-sources
# index B for sources [b f0 fC]
idx_new = np.hstack([np.arange(b.size), b.size + r0, b.size + rC])
Bs[:, idx_new] = | np.array(Bs) | numpy.array |
import numpy as np
import matplotlib.pyplot as plt
import ipywidgets as widgets
import html
import matplotlib.patches as patches
from matplotlib.colors import SymLogNorm
import astropy.units as u
from .crisp import CRISP, CRISPSequence, CRISPWidebandSequence, CRISPNonU, CRISPNonUSequence
from .inversions import Inversion
from .utils import CRISP_sequence_constructor
from matplotlib import ticker
import matplotlib.patheffects as PathEffects
from matplotlib.lines import Line2D
from astropy.wcs.wcsapi import SlicedLowLevelWCS
from .utils import pt_bright_cycler
from IPython.core.display import display
from matplotlib.dates import date2num, DateFormatter
class SpectralViewer:
"""
Imaging spectroscopic viewer. SpectralViewer should be used when one wants to click on points of an image and have the spectrum displayed for that point. This works **exclusively** in Jupyter notebook but can be a nice data exploration tool. This viewer utilises the data structures defined in `crispy.crisp` and has many variable options.
:param data: The data to explore, this can be either one or two spectral lines (support for more than two can be added if required). This is the only required argument to view the data.
:type data: str or list or CRISP or CRISPSequence or CRISPNonU or CRISPNonUSequence
:param wcs: A prescribed world coordinate system. If None, the world coordinate system is derived from the data. Default is None.
:type wcs: astropy.wcs.WCS or None, optional
:param uncertainty: The uncertainty in the intensity values of the data. Default is None.
:type uncertainty: numpy.ndarray or None, optional
:param mask: A mask to be used on the data. Default is None.
:type mask: numpy.ndarray or None, optional
:param nonu: Whether or not the spectral axis is non-uniform. Default is False.
:type nonu: bool, optional
:cvar coords: The coordinates selected to produce spectra.
:type coords: list[tuple]
:cvar px_coords: The coordinates selected to produce spectra in pixel space. This is important for indexing the data later to get the correct spectra.
:type px_coords: list[tuple]
:cvar shape_type: The spectra can be selected for a single point or for a box with specified dimensions with top-left corner where the user clicks. This attribute tells the user which point is described by which shape.
:type shape_type: list[str]
"""
def __init__(self, data, wcs=None, uncertainty=None, mask=None, nonu=False):
plt.style.use("bmh")
self.aa = html.unescape("Å")
self.l = html.unescape("λ")
self.a = html.unescape("α")
self.D = html.unescape("Δ")
shape = widgets.Dropdown(options=["point", "box"], value="point", description="Shape: ")
if not nonu:
if type(data) == str:
self.cube = CRISP(filename=data, wcs=wcs, uncertainty=uncertainty, mask=mask)
if self.cube.file.data.ndim == 3:
self.wvls = self.cube.wave(np.arange(self.cube.shape[0])) << u.Angstrom
elif self.cube.file.data.ndim == 4:
self.wvls = self.cube.wave(np.arange(self.cube.shape[1])) << u.Angstrom
elif type(data) == list:
data = CRISP_sequence_constructor(data, wcs=wcs, uncertainty=uncertainty, mask=mask, nonu=nonu)
self.cube = CRISPSequence(data)
if self.cube.list[0].file.data.ndim == 3:
self.wvls1 = self.cube.list[0].wave(np.arange(self.cube.list[0].shape[0])) << u.Angstrom
elif self.cube.list[0].file.data.ndim == 4:
self.wvls1 = self.cube.list[0].wave(np.arange(self.cube.list[0].shape[1])) << u.Angstrom
if self.cube.list[1].file.data.ndim == 3:
self.wvls2 = self.cube.list[1].wave(np.arange(self.cube.list[1].shape[0])) << u.Angstrom
elif self.cube.list[1].file.data.ndim == 4:
self.wvls2 = self.cube.list[1].wave(np.arange(self.cube.list[1].shape[1])) << u.Angstrom
elif type(data) == CRISP:
self.cube = data
if self.cube.file.data.ndim == 3:
self.wvls = self.cube.wave(np.arange(self.cube.shape[0])) << u.Angstrom
elif self.cube.file.data.ndim == 4:
self.wvls = self.cube.wave(np.arange(self.cube.shape[1])) << u.Angstrom
elif type(data) == CRISPSequence:
self.cube = data
if self.cube.list[0].file.data.ndim == 3:
self.wvls1 = self.cube.list[0].wave(np.arange(self.cube.list[0].shape[0])) << u.Angstrom
elif self.cube.list[0].file.data.ndim == 4:
self.wvls1 = self.cube.list[0].wave(np.arange(self.cube.list[0].shape[1])) << u.Angstrom
if self.cube.list[1].file.data.ndim == 3:
self.wvls2 = self.cube.list[1].wave(np.arange(self.cube.list[1].shape[0])) << u.Angstrom
elif self.cube.list[1].file.data.ndim == 4:
self.wvls2 = self.cube.list[1].wave(np.arange(self.cube.list[1].shape[1])) << u.Angstrom
else:
if type(data) == str:
self.cube = CRISPNonU(filename=data, wcs=wcs, uncertainty=uncertainty, mask=mask)
if self.cube.file.data.ndim == 3:
self.wvls = self.cube.wave(np.arange(self.cube.shape[0])) << u.Angstrom
elif self.cube.file.data.ndim == 4:
self.wvls = self.cube.wave(np.arange(self.cube.shape[1])) << u.Angstrom
elif type(data) == list:
data = CRISP_sequence_constructor(data, wcs=wcs, uncertainty=uncertainty, mask=mask, nonu=nonu)
self.cube = CRISPNonUSequence(data)
if self.cube.list[0].file.data.ndim == 3:
self.wvls1 = self.cube.list[0].wave(np.arange(self.cube.list[0].shape[0])) << u.Angstrom
elif self.cube.list[0].file.data.ndim == 4:
self.wvls1 = self.cube.list[0].wave(np.arange(self.cube.list[0].shape[1])) << u.Angstrom
if self.cube.list[1].file.data.ndim == 3:
self.wvls2 = self.cube.list[1].wave(np.arange(self.cube.list[1].shape[0])) << u.Angstrom
elif self.cube.list[1].file.data.ndim == 4:
self.wvls2 = self.cube.list[1].wave(np.arange(self.cube.list[1].shape[1])) << u.Angstrom
elif type(data) == CRISPNonU:
self.cube = data
if self.cube.file.data.ndim == 3:
self.wvls = self.cube.wave(np.arange(self.cube.shape[0])) << u.Angstrom
elif self.cube.file.data.ndim == 4:
self.wvls = self.cube.wave(np.arange(self.cube.shape[1])) << u.Angstrom
elif type(data) == CRISPNonUSequence:
self.cube = data
if self.cube.list[0].file.data.ndim == 3:
self.wvls1 = self.cube.list[0].wave(np.arange(self.cube.list[0].shape[0])) << u.Angstrom
elif self.cube.list[0].file.data.ndim == 4:
self.wvls1 = self.cube.list[0].wave(np.arange(self.cube.list[0].shape[1])) << u.Angstrom
if self.cube.list[1].file.data.ndim == 3:
self.wvls2 = self.cube.list[1].wave(np.arange(self.cube.list[1].shape[0])) << u.Angstrom
elif self.cube.list[1].file.data.ndim == 4:
self.wvls2 = self.cube.list[1].wave(np.arange(self.cube.list[1].shape[1])) << u.Angstrom
if type(self.cube) == CRISP or type(self.cube) == CRISPNonU:
self.fig = plt.figure(figsize=(8,10))
try:
self.ax1 = self.fig.add_subplot(1, 2, 1, projection=self.cube.wcs.dropaxis(-1))
except:
self.ax1 = self.fig.add_subplot(1, 2, 1, projection=SlicedLowLevelWCS(self.cube[0].wcs.low_level_wcs, 0))
self.ax1.set_ylabel("Helioprojective Latitude [arcsec]")
self.ax1.set_xlabel("Helioprojective Longitude [arcsec]")
self.ax2 = self.fig.add_subplot(1, 2, 2)
self.ax2.yaxis.set_label_position("right")
self.ax2.yaxis.tick_right()
self.ax2.set_ylabel("I [DNs]")
self.ax2.set_xlabel(f"{self.l} [{self.aa}]")
self.ax2.tick_params(direction="in")
ll = widgets.SelectionSlider(options=[np.round(l - np.median(self.wvls), decimals=2).value for l in self.wvls], description = f"{self.D} {self.l} [{self.aa}]")
out1 = widgets.interactive_output(self._img_plot1, {"ll" : ll})
out2 = widgets.interactive_output(self._shape, {"opts" : shape})
display(widgets.HBox([ll, shape]))
elif type(self.cube) == CRISPSequence or type(self.cube) == CRISPNonUSequence:
self.fig = plt.figure(figsize=(8,10))
try:
self.ax1 = self.fig.add_subplot(2, 2, 1, projection=self.cube.list[0].wcs.dropaxis(-1))
except:
self.ax1 = self.fig.add_subplot(2, 2, 1, projection=SlicedLowLevelWCS(self.cube.list[0][0].wcs.low_level_wcs, 0))
self.ax1.set_ylabel("Helioprojective Latitude [arcsec]")
self.ax1.set_xlabel("Helioprojective Longitude [arcsec]")
self.ax1.xaxis.set_label_position("top")
self.ax1.xaxis.tick_top()
try:
self.ax2 = self.fig.add_subplot(2, 2, 3, projection=self.cube.list[0].wcs.dropaxis(-1))
except:
self.ax2 = self.fig.add_subplot(2, 2, 3, projection=SlicedLowLevelWCS(self.cube.list[0][0].wcs.low_level_wcs, 0))
self.ax2.set_ylabel("Helioprojective Latitude [arcsec]")
self.ax2.set_xlabel("Helioprojective Longitude [arcsec]")
self.ax3 = self.fig.add_subplot(2, 2, 2)
self.ax3.yaxis.set_label_position("right")
self.ax3.yaxis.tick_right()
self.ax3.set_ylabel("Intensity [DNs]")
self.ax3.set_xlabel(f"{self.l} [{self.aa}]")
self.ax3.xaxis.set_label_position("top")
self.ax3.xaxis.tick_top()
self.ax3.tick_params(direction="in")
self.ax4 = self.fig.add_subplot(2, 2, 4)
self.ax4.yaxis.set_label_position("right")
self.ax4.yaxis.tick_right()
self.ax4.set_ylabel("Intensity [DNs]")
self.ax4.set_xlabel(f"{self.l} [{self.aa}]")
self.ax4.tick_params(direction="in")
ll1 = widgets.SelectionSlider(
options=[np.round(l - np.median(self.wvls1), decimals=2).value for l in self.wvls1],
description=fr"{self.D} {self.l}$_{1}$ [{self.aa}]",
style={"description_width" : "initial"}
)
ll2 = widgets.SelectionSlider(
options=[np.round(l - np.median(self.wvls2), decimals=2).value for l in self.wvls2],
description=fr"{self.D} {self.l}$_{2}$ [{self.aa}]",
style={"description_width" : "initial"}
)
out1 = widgets.interactive_output(self._img_plot2, {"ll1" : ll1, "ll2" : ll2})
out2 = widgets.interactive_output(self._shape, {"opts" : shape})
display(widgets.HBox([widgets.VBox([ll1, ll2]), shape]))
self.coords = []
self.px_coords = []
self.shape_type = []
self.box_coords = []
self.colour_idx = 0
self.n = 0
self.receiver = self.fig.canvas.mpl_connect("button_press_event", self._on_click)
try:
x = widgets.IntText(value=1, min=1, max=self.cube.shape[-1], description="x [pix]")
y = widgets.IntText(value=1, min=1, max=self.cube.shape[-2], description="y [pix]")
except:
x = widgets.IntText(value=1, min=1, max=self.cube.list[0].shape[-1], description="x [pix]")
y = widgets.IntText(value=1, min=1, max=self.cube.list[0].shape[-2], description="y [pix]")
outx = widgets.interactive_output(self._boxx, {"x" : x})
outy = widgets.interactive_output(self._boxy, {"y" : y})
display(widgets.HBox([x, y]))
done_button = widgets.Button(description="Done")
done_button.on_click(self._disconnect_matplotlib)
clear_button = widgets.Button(description="Clear")
clear_button.on_click(self._clear)
save_button = widgets.Button(description="Save")
save_button.on_click(self._save)
display(widgets.HBox([done_button, clear_button, save_button]))
widgets.interact(self._file_name, fn= widgets.Text(description="Filename to save as: ", style={"description_width" : "initial"}, layout=widgets.Layout(width="50%")))
def _on_click(self, event):
if self.fig.canvas.manager.toolbar.mode != "":
return
if type(self.cube) == CRISP or type(self.cube) == CRISPNonU:
if self.shape == "point":
if self.colour_idx > len(pt_bright_cycler)-1:
self.colour_idx = 0
self.n += 1
centre_coord = int(event.ydata), int(event.xdata)
self.px_coords.append(centre_coord)
self.shape_type.append("point")
circ = patches.Circle(centre_coord[::-1], radius=10, facecolor=list(pt_bright_cycler)[self.colour_idx]["color"], edgecolor="k", linewidth=1)
self.ax1.add_patch(circ)
font = {
"size" : 12,
"color" : list(pt_bright_cycler)[self.colour_idx]["color"]
}
txt = self.ax1.text(centre_coord[1]+20, centre_coord[0]+10, s=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", fontdict=font)
txt.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
px = self.cube.to_lonlat(*centre_coord) << u.arcsec
if self.cube.file.data.ndim == 3:
self.ax2.plot(self.wvls, self.cube.file.data[:, centre_coord[0], centre_coord[1]], marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
elif self.cube.file.data.ndim == 4:
self.ax2.plot(self.wvls, self.cube.file.data[0, :, centre_coord[0], centre_coord[1]], marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax2.legend()
self.coords.append(px)
self.colour_idx += 1
self.fig.canvas.draw()
elif self.shape == "box":
if self.colour_idx > len(pt_bright_cycler)-1:
self.colour_idx = 0
self.n += 1
box_anchor = int(event.ydata), int(event.xdata)
self.px_coords.append(box_anchor)
self.shape_type.append("box")
# obtain the coordinates of the box on a grid with pixels the size of the box to make sure there is not copies of the same box
box_coord = box_anchor[0] // self.boxy, box_anchor[1] // self.boxx
if box_coord in self.box_coords:
coords = [p.get_xy() for p in self.ax1.patches]
for p in self.ax1.patches:
if p.get_xy() == box_anchor:
p.remove()
idx = self.box_coords.index(box_coord)
del self.box_coords[idx]
del self.px_coords[idx]
del self.shape_type[idx]
del self.coords[idx]
return
self.coords.append(self.cube.to_lonlat(*box_anchor) << u.arcsec)
rect = patches.Rectangle(box_anchor[::-1], self.boxx, self.boxy, linewidth=2, edgecolor=list(pt_bright_cycler)[self.colour_idx]["color"], facecolor="none")
rect.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
self.ax1.add_patch(rect)
font = {
"size" : 12,
"color" : list(pt_bright_cycler)[self.colour_idx]["color"]
}
txt = self.ax1.text(box_anchor[1]-50, box_anchor[0]-10, s=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", fontdict=font)
txt.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
if self.cube.file.data.ndim == 3:
self.ax2.plot(self.wvls, np.mean(self.cube.file.data[:,box_anchor[0]:box_anchor[0]+self.boxy,box_anchor[1]:box_anchor[1]+self.boxx],axis=(1,2)), marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
elif self.cube.file.data.ndim == 4:
self.ax2.plot(self.wvls, np.mean(self.cube.file.data[0, :,box_anchor[0]:box_anchor[0]+self.boxy,box_anchor[1]:box_anchor[1]+self.boxx],axis=(1,2)), marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax2.legend()
self.colour_idx += 1
self.fig.canvas.draw()
elif type(self.cube) == CRISPSequence or type(self.cube) == CRISPNonUSequence:
if self.shape == "point":
if self.colour_idx > len(pt_bright_cycler)-1:
self.colour_idx = 0
self.n += 1
centre_coord = int(event.ydata), int(event.xdata) #with WCS, the event data is returned in pixels so we don't need to do the conversion from real world but rather to real world later on
self.px_coords.append(centre_coord)
circ1 = patches.Circle(centre_coord[::-1], radius=10, facecolor=list(pt_bright_cycler)[self.colour_idx]["color"], edgecolor="k", linewidth=1)
circ2 = patches.Circle(centre_coord[::-1], radius=10, facecolor=list(pt_bright_cycler)[self.colour_idx]["color"], edgecolor="k", linewidth=1)
self.ax1.add_patch(circ1)
self.ax2.add_patch(circ2)
font = {
"size" : 12,
"color" : list(pt_bright_cycler)[self.colour_idx]["color"]
}
txt_1 = self.ax1.text(centre_coord[1]+20, centre_coord[0]+10, s=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", fontdict=font)
txt_2 = self.ax2.text(centre_coord[1]+20, centre_coord[0]+10, s=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", fontdict=font)
txt_1.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
txt_2.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
px = self.cube.list[0].to_lonlat(*centre_coord) << u.arcsec
if self.cube.list[0].file.data.ndim == 3:
self.ax3.plot(self.wvls1, self.cube.list[0].file.data[:, centre_coord[0], centre_coord[1]], marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
elif self.cube.list[0].file.data.ndim == 4:
self.ax3.plot(self.wvls1, self.cube.list[0].file.data[0, :, centre_coord[0], centre_coord[1]], marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
if self.cube.list[1].file.data.ndim == 3:
self.ax4.plot(self.wvls2, self.cube.list[1].file.data[:, centre_coord[0], centre_coord[1]], marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
elif self.cube.list[1].file.data.ndim == 4:
self.ax4.plot(self.wvls2, self.cube.list[1].file.data[0, :, centre_coord[0], centre_coord[1]], marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax3.legend()
self.ax4.legend()
self.coords.append(px)
self.colour_idx += 1
self.fig.canvas.draw()
elif self.shape == "box":
if self.colour_idx > len(pt_bright_cycler)-1:
self.colour_idx = 0
self.n += 1
box_anchor = int(event.ydata), int(event.xdata)
self.px_coords.append(box_anchor)
self.shape_type.append("box")
# obtain the coordinates of the box on a grid with pixels the size of the box to make sure there is not copies of the same box
box_coord = box_anchor[0] // self.boxy, box_anchor[1] // self.boxx
if box_coord in self.box_coords:
coords = [p.get_xy() for p in self.ax.patches]
for p in self.ax.patches:
if p.get_xy() == box_anchor:
p.remove()
idx = self.box_coords.index(box_coord)
del self.box_coords[idx]
del self.px_coords[idx]
del self.shape_type[idx]
del self.coords[idx]
return
self.coords.append(self.cube.to_lonlat(*box_anchor) << u.arcsec)
rect1 = patches.Rectangle(box_anchor[::-1], self.boxx, self.boxy, linewidth=2, edgecolor=list(pt_bright_cycler)[self.colour_idx]["color"], facecolor="none")
rect1.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
rect2 = patches.Rectangle(box_anchor[::-1], self.boxx, self.boxy, linewidth=2, edgecolor=list(pt_bright_cycler)[self.colour_idx]["color"], facecolor="none")
rect2.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
self.ax1.add_patch(rect1)
self.ax2.add_patch(rect2)
font = {
"size" : 12,
"color" : list(pt_bright_cycler)[self.colour_idx]["color"]
}
txt1 = self.ax1.text(box_anchor[1]-50, box_anchor[0]-10, s=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", fontdict=font)
txt1.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
txt2 = self.ax2.text(box_anchor[1]-50, box_anchor[0]-1, s=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", fontdict=font)
txt2.set_path_effect([PathEffects.withStroke(linewidth=3, foreground="k")])
if self.cube.list[0].file.data.ndim == 3:
self.ax3.plot(self.wvls1, np.mean(self.cube.list[0].file.data[:,box_anchor[0]:box_anchor[0]+self.boxy,box_anchor[1]:box_anchor[1]+self.boxx],axis=(1,2)), marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
elif self.cube.list[0].file.data.ndim == 4:
self.ax3.plot(self.wvls1, np.mean(self.cube.list[0].file.data[0, :,box_anchor[0]:box_anchor[0]+self.boxy,box_anchor[1]:box_anchor[1]+self.boxx],axis=(1,2)), marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
if self.cube.list[1].file.data.ndim == 3:
self.ax4.plot(self.wvls2, np.mean(self.cube.list[1].file.data[:,box_anchor[0]:box_anchor[0]+self.boxy,box_anchor[1]:box_anchor[1]+self.boxx],axis=(1,2)), marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
elif self.cube.list[1].file.data.ndim == 4:
self.ax4.plot(self.wvls2, np.mean(self.cube.list[1].file.data[0, :,box_anchor[0]:box_anchor[0]+self.boxy,box_anchor[1]:box_anchor[1]+self.boxx],axis=(1,2)), marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax3.legend()
self.ax4.legend()
self.colour_idx += 1
self.fig.canvas.draw()
def _shape(self, opts):
self.shape = opts
def _boxx(self, x):
self.boxx = x
def _boxy(self, y):
self.boxy = y
def _disconnect_matplotlib(self, _):
self.fig.canvas.mpl_disconnect(self.receiver)
def _clear(self, _):
self.coords = []
self.px_coords = []
self.shape_type = []
self.box_coords = []
self.colour_idx = 0
self.n = 0
if type(self.cube) == CRISP:
while len(self.ax1.patches) > 0:
for p in self.ax1.patches:
p.remove()
while len(self.ax1.texts) > 0:
for t in self.ax1.texts:
t.remove()
self.ax2.clear()
self.ax2.set_ylabel("Intensity [DNs]")
self.ax2.set_xlabel(f"{self.l} [{self.aa}]")
self.fig.canvas.draw()
self.fig.canvas.flush_events()
else:
while len(self.ax1.patches) > 0:
for p in self.ax1.patches:
p.remove()
while len(self.ax2.patches) > 0:
for p in self.ax2.patches:
p.remove()
while len(self.ax1.texts) > 0:
for t in self.ax1.texts:
t.remove()
while len(self.ax2.texts) > 0:
for t in self.ax2.texts:
t.remove()
self.ax3.clear()
self.ax3.set_ylabel("Intensity [DNs]")
self.ax3.set_xlabel(f"{self.l} [{self.aa}]")
self.ax4.clear()
self.ax4.set_ylabel("Intensity [DNs]")
self.ax4.set_xlabel(f"{self.l} [{self.aa}]")
self.fig.canvas.draw()
self.fig.canvas.flush_events()
def _save(self, _):
self.fig.savefig(self.filename, dpi=300)
def _file_name(self, fn):
self.filename = fn
def _img_plot1(self, ll):
if self.ax1.images == []:
pass
elif self.ax1.images[-1].colorbar != None:
self.ax1.images[-1].colorbar.remove()
ll_idx = int(np.where(np.round(self.wvls, decimals=2).value == np.round(np.median(self.wvls).value + ll, decimals=2))[0])
try:
data = self.cube.file.data[ll_idx].astype(np.float)
data[data < 0] = np.nan
im1 = self.ax1.imshow(data, cmap="Greys_r")
except:
data = self.cube.file.data[0, ll_idx].astype(np.float)
data[data < 0] = np.nan
im1 = self.ax1.imshow(data, cmap="Greys_r")
try:
el = self.cube.file.header["WDESC1"]
except KeyError:
el = self.cube.file.header["element"]
self.ax1.set_title(fr"{el} {self.aa} {self.D} {self.l}$_{1}$ = {ll} {self.aa}")
self.fig.colorbar(im1, ax=self.ax1, orientation="horizontal", label="Intensity [DNs]")
def _img_plot2(self, ll1, ll2):
if self.ax1.images == []:
pass
elif self.ax1.images[-1].colorbar != None:
self.ax1.images[-1].colorbar.remove()
if self.ax2.images == []:
pass
elif self.ax2.images[-1].colorbar != None:
self.ax2.images[-1].colorbar.remove()
ll1_idx = int(np.where(np.round(self.wvls1, decimals=2).value == np.round(np.median(self.wvls1).value + ll1, decimals=2))[0])
ll2_idx = int(np.where(np.round(self.wvls2, decimals=2).value == np.round(np.median(self.wvls2).value + ll2, decimals=2))[0])
try:
data = self.cube.list[0].file.data[ll1_idx].astype(np.float)
data[data < 0] = np.nan
im1 = self.ax1.imshow(data, cmap="Greys_r")
except:
data = self.cube.list[0].file.data[0, ll1_idx].astype(np.float)
data[data < 0] = np.nan
im1 = self.ax1.imshow(data, cmap="Greys_r")
try:
data = self.cube.list[1].file.data[ll2_idx].astype(np.float)
data[data < 0] = np.nan
im2 = self.ax2.imshow(data, cmap="Greys_r")
except:
data = self.cube.list[1].file.data[0, ll2_idx].astype(np.float)
data[data < 0] = np.nan
im2 = self.ax2.imshow(data, cmap="Greys_r")
try:
el1 = self.cube.list[0].file.header["WDESC1"]
el2 = self.cube.list[1].file.header["WDESC1"]
except KeyError:
el1 = self.cube.list[0].file.header["element"]
el2 = self.cube.list[1].file.header["element"]
self.ax1.set_title(fr"{el1} {self.aa} {self.D} {self.l}$_{1}$ = {ll1} {self.aa}")
self.ax2.set_title(fr"{el2} {self.aa} {self.D} {self.l}$_{2}$ = {ll2} {self.aa}")
self.fig.colorbar(im1, ax=self.ax1, orientation="horizontal", label="Intensity [DNs]")
self.fig.colorbar(im2, ax=self.ax2, orientation="horizontal", label="Intensity [DNs]")
class WidebandViewer:
"""
Wideband image viewer. This visualisation tool is useful for exploring the time series evolution of the wideband images.
:param files: The files to explore the time series for.
:type files: CRISPWidebandSequence or list
:cvar coords: The coordinates selected to produce spectra.
:type coords: list[tuple]
:cvar px_coords: The coordinates selected to produce spectra in pixel space. This is important for indexing the data later to get the correct spectra.
:type px_coords: list[tuple]
:cvar shape_type: The spectra can be selected for a single point or for a box with specified dimensions with top-left corner where the user clicks. This attribute tells the user which point is described by which shape.
:type shape_type: list[str]
"""
def __init__(self, files):
plt.style.use("bmh")
shape = widgets.Dropdown(options=["point", "box"], value="point", description="Shape: ")
if type(files) == CRISPWidebandSequence:
self.cube = files
elif type(files) == list and type(files[0]) == dict:
self.cube = CRISPWidebandSequence(files)
elif type(files) == list and type(files[0]) == str:
files = [{"filename" : f} for f in files]
self.cube = CRISPWidebandSequence(files)
elif type(files) == list and type(files[0]) == CRISPWidebandSequence:
self.cube = files
if type(self.cube) is not list:
try:
self.time = [date2num(f.file.header["DATE-AVG"]) for f in self.cube.list]
except KeyError:
self.time = [date2num(f.file.header["date_obs"]+" "+f.file.header["time_obs"]) for f in self.cube.list]
self.fig = plt.figure(figsize=(8,10))
self.ax1 = self.fig.add_subplot(1, 2, 1, projection=self.cube.list[0].wcs)
self.ax1.set_ylabel("Helioprojective Latitude [arcsec]")
self.ax1.set_xlabel("Helioprojective Longitude [arcsec]")
self.ax2 = self.fig.add_subplot(1, 2, 2)
self.ax2.yaxis.set_label_position("right")
self.ax2.yaxis.tick_right()
self.ax2.set_ylabel("I [DNs]")
self.ax2.set_xlabel("Time [UTC]")
self.ax2.xaxis.set_major_locator(plt.MaxNLocator(4))
self.ax2.tick_params(direction="in")
t = widgets.IntSlider(value=0, min=0, max=len(self.cube.list)-1, step=1, description="Time index: ", style={"description_width" : "initial"})
widgets.interact(self._img_plot1, t = t)
else:
try:
self.time1 = [date2num(f.file.header["DATE-AVG"]) for f in self.cube[0].list]
self.time2 = [date2num(f.file.header["DATE-AVG"]) for f in self.cube[1].list]
except KeyError:
self.time1 = [date2num(f.file.header["date_obs"]+" "+f.file.header["time_obs"]) for f in self.cube[0].list]
self.time2 = [date2num(f.file.header["date_obs"]+" "+f.file.header["time_obs"]) for f in self.cube[1].list]
self.fig = plt.figure(figsize=(8,10))
self.ax1 = self.fig.add_subplot(2, 2, 1, projection=self.cube[0].list[0].wcs)
self.ax1.set_ylabel("Helioprojective Latitude [arcsec]")
self.ax1.set_xlabel("Helioprojective Longitude [arcsec]")
self.ax1.xaxis.set_label_position("top")
self.ax1.xaxis.tick_top()
self.ax2 = self.fig.add_subplot(2, 2, 3, projection=self.cube[1].list[0].wcs)
self.ax2.ylabel("Helioprojective Latitude [arcsec]")
self.ax2.xlabel("Helioprojective Longitude [arcsec]")
self.ax3 = self.fig.add_subplot(2, 2, 2)
self.ax3.yaxis.set_label_position("right")
self.ax3.yaxis.tick_right()
self.ax3.set_ylabel("I [DNs]")
self.ax3.set_xlabel("Time [UTC]")
self.ax3.xaxis.set_label_position("top")
self.ax3.xaxis.tick_top()
self.ax3.xaxis.set_major_locator(plt.MaxNLocator(4))
self.ax3.tick_params(direction="in")
self.ax4 = self.fig.add_subplot(2, 2, 4)
self.ax4.yaxis.set_label_position("right")
self.ax4.yaxis.tick_right()
self.ax4.set_ylabel("I [DNs]")
self.ax4.set_xlabel("Time [UTC]")
self.ax4.xaxis.set_major_locator(plt.MaxNLocator(4))
self.ax4.tick_params(direction="in")
t1 = widgets.IntSlider(value=0, min=0, max=len(self.cube[0].list)-1, step=1, description="Time index: ", style={"description_width" : "initial"})
t2 = widgets.IntSlider(value=0, min=0, max=len(self.cube[1].list)-1, step=1, description="Time index: ", style={"description_width" : "initial"})
widgets.interact(self._img_plot2, t1=t1, t2=t2)
self.coords = []
self.px_coords = []
self.shape_type = []
self.box_coords = []
self.colour_idx = 0
self.n = 0
self.reveiver = self.fig.canvas.mpl_connect("button_press_event", self._on_click)
widgets.interact(self._shape, opts=shape)
x = widgets.IntText(value=1, min=1, max=self.cube.list[0].shape[-1], description="x [pix]")
y = widgets.IntText(value=1, min=1, max=self.cube.list[0].shape[-2], description="y [pix]")
outx = widgets.interactive_output(self._boxx, {"x" : x})
outy = widgets.interactive_output(self._boxy, {"y" : y})
display(widgets.HBox([x, y]))
done_button = widgets.Button(description="Done")
done_button.on_click(self._disconnect_matplotlib)
clear_button = widgets.Button(description="Clear")
clear_button.on_click(self._clear)
save_button = widgets.Button(description="Save")
save_button.on_click(self._save)
display(widgets.HBox([done_button, clear_button, save_button]))
widgets.interact(self._file_name, fn= widgets.Text(description="Filename to save as: ", style={"description_width" : "initial"}, layout=widgets.Layout(width="50%")))
def _on_click(self, event):
if self.fig.canvas.manager.toolbar.mode != "":
return
if type(self.cube) == CRISPWidebandSequence:
if self.shape == "point":
if self.colour_idx > len(pt_bright_cycler)-1:
self.colour_idx = 0
self.n += 1
centre_coord = int(event.ydata), int(event.xdata)
self.px_coords.append(centre_coord)
self.shape_type.append("point")
circ = patches.Circle(centre_coord[::-1], radius=10, facecolor=list(pt_bright_cycler)[self.colour_idx]["color"], edgecolor="k", linewidth=1)
self.ax1.add_patch(circ)
font = {
"size" : 12,
"color" : list(pt_bright_cycler)[self.colour_idx]["color"]
}
txt = self.ax1.text(centre_coord[1]+20, centre_coord[0]+10, s=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", fontdict=font)
txt.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
px = self.cube.list[0].wcs.array_index_to_world(*centre_coord) << u.arcsec
prof = [f.file.data[centre_coord[0], centre_coord[1]] for f in self.cube.list]
self.ax2.plot(self.time, prof, marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax2.xaxis.set_major_formatter(DateFormatter("%H:%M:%S"))
for label in self.ax2.get_xticklabels():
label.set_rotation(40)
label.set_horizontalalignment('right')
self.ax2.legend()
self.coords.append(px)
self.colour_idx += 1
self.fig.canvas.draw()
elif self.shape == "box":
if self.colour_idx > len(pt_bright_cycler)-1:
self.colour_idx = 0
self.n += 1
box_anchor = int(event.ydata), int(event.xdata)
self.px_coords.append(box_anchor)
self.shape_type.append("box")
# obtain the coordinates of the box on a grid with pixels the size of the box to make sure there is not copies of the same box
box_coord = box_anchor[0] // self.boxy, box_anchor[1] // self.boxx
if box_coord in self.box_coords:
coords = [p.get_xy() for p in self.ax.patches]
for p in self.ax.patches:
if p.get_xy() == box_anchor:
p.remove()
idx = self.box_coords.index(box_coord)
del self.box_coords[idx]
del self.px_coords[idx]
del self.shape_type[idx]
del self.coords[idx]
return
self.coords.append(self.cube.to_lonlat(*box_anchor) << u.arcsec)
rect = patches.Rectangle(box_anchor[::-1], self.boxx, self.boxy, linewidth=2, edgecolor=list(pt_bright_cycler)[self.colour_idx]["color"], facecolor="none")
rect.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
self.ax1.add_patch(rect)
font = {
"size" : 12,
"color" : list(pt_bright_cycler)[self.colour_idx]["color"]
}
txt = self.ax1.text(box_anchor[1]-50, box_anchor[0]-10, s=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", fontdict=font)
txt.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
prof = [np.mean(f.file.data[box_anchor[0]:box_anchor[0]+self.boxy, box_anchor[1]:box_anchor[1]+self.boxx]) for f in self.cube.list]
self.ax2.plot(self.time, prof, marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax2.xaxis.set_major_formatter(DateFormatter("%H:%M:%S"))
for label in self.ax2.get_xticklabels():
label.set_rotation(40)
label.set_horizontalalignment('right')
self.ax2.legend()
self.colour_idx += 1
self.fig.canvas.draw()
elif type(self.cube) == list:
if self.shape == "point":
if self.colour_idx > len(pt_bright_cycler)-1:
self.colour_idx = 0
self.n += 1
centre_coord = int(event.ydata), int(event.xdata)
self.px_coords.append(centre_coord)
circ1 = patches.Circle(centre_coord[::-1], radius=10, facecolor=list(pt_bright_cycler)[self.colour_idx]["color"], edgecolor="k", linewidth=1)
circ2 = patches.Circle(centre_coord[::-1], radius=10, facecolor=list(pt_bright_cycler)[self.colour_idx]["color"], edgecolor="k", linewidth=1)
self.ax1.add_patch(circ1)
self.ax2.add_patch(circ2)
font = {
"size" : 12,
"color" : list(pt_bright_cycler)[self.colour_idx]["color"]
}
txt_1 = self.ax1.text(centre_coord[1]+20, centre_coord[0]+10, s=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", fontdict=font)
txt_2 = self.ax2.text(centre_coord[1]+20, centre_coord[0]+10, s=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", fontdict=font)
txt_1.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
txt_2.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
px = self.cube[0].list[0].wcs.array_index_to_world(*centre_coord) << u.arcsec
prof_1 = [f.file.data[centre_coord[0], centre_coord[1]] for f in self.cube[0].list]
prof_2 = [f.file.data[centre_coord[0], centre_coord[1]] for f in self.cube[1].list]
self.ax3.plot(self.time1, prof_1, marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax4.plot(self.time2, prof_2, marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax3.legend()
self.ax3.xaxis.set_major_formatter(DateFormatter("%H:%M:%S"))
for label in self.ax3.get_xticklabels():
label.set_rotation(40)
label.set_horizontalalignment('right')
self.ax4.legend()
self.ax4.xaxis.set_major_formatter(DateFormatter("%H:%M:%S"))
for label in self.ax4.get_xticklabels():
label.set_rotation(40)
label.set_horizontalalignment('right')
self.coords.append(px)
self.colour_idx += 1
self.fig.canvas.draw()
elif self.shape == "box":
if self.colour_idx > len(pt_bright_cycler)-1:
self.colour_idx = 0
self.n += 1
box_anchor = int(event.ydata), int(event.xdata)
self.px_coords.append(box_anchor)
self.shape_type.append("box")
# obtain the coordinates of the box on a grid with pixels the size of the box to make sure there is not copies of the same box
box_coord = box_anchor[0] // self.boxy, box_anchor[1] // self.boxx
if box_coord in self.box_coords:
coords = [p.get_xy() for p in self.ax.patches]
for p in self.ax.patches:
if p.get_xy() == box_anchor:
p.remove()
idx = self.box_coords.index(box_coord)
del self.box_coords[idx]
del self.px_coords[idx]
del self.shape_type[idx]
del self.coords[idx]
return
self.coords.append(self.cube.to_lonlat(*box_anchor) << u.arcsec)
rect1 = patches.Rectangle(box_anchor[::-1], self.boxx, self.boxy, linewidth=2, edgecolor=list(pt_bright_cycler)[self.colour_idx]["color"], facecolor="none")
rect1.set_path_effects([PathEffects(linewidth=3, foreground="k")])
rect2 = patches.Rectangle(box_anchor[::-1], self.boxx, self.boxy, linewidth=2, edgecolor=list(pt_bright_cycler)[self.colour_idx]["color"], facecolor="none")
rect2.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
self.ax1.add_patch(rect1)
self.ax2.add_patch(rect2)
font = {
"size" : 12,
"color" : list(pt_bright_cycler)[self.colour_idx]["color"]
}
txt1 = self.ax1.text(box_anchor[1]-50, box_anchor[0]-10, s=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", fontdict=font)
txt1.set_path_effects([PathEffects(linewidth=3, foreground="k")])
txt2 = self.ax2.text(box_anchor[1]-50, box_anchor[0]-1, s=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", fontdict=font)
txt2.set_path_effect([PathEffects(linewidth=3, foreground="k")])
prof_1 = [np.mean(f.file.data[box_anchor[0]:box_anchor[0]+self.boxy, box_anchor[1]:box_anchor[1]+self.boxx]) for f in self.cube[0].list]
prof_2 = [np.mean(f.file.data[box_anchor[0]:box_anchor[0]+self.boxy, box_anchor[1]:box_anchor[1]+self.boxx]) for f in self.cube[1].list]
self.ax3.plot(self.time1, prof_1, marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax4.plot(self.time2, prof_2, marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax3.xaxis.set_major_formatter(DateFormatter("%H:%M:%S"))
for label in self.ax3.get_xticklabels():
label.set_rotation(40)
label.set_horizontalalignment('right')
self.ax4.xaxis.set_major_formatter(DateFormatter("%H:%M:%S"))
for label in self.ax4.get_xticklabels():
label.set_rotation(40)
label.set_horizontalalignment('right')
self.ax3.legend()
self.ax4.legend()
self.colour_idx += 1
self.fig.canvas.draW()
def _shape(self, opts):
self.shape = opts
def _boxx(self, x):
self.boxx = x
def _boxy(self, y):
self.boxy = y
def _disconnect_matplotlib(self, _):
self.fig.canvas.mpl_disconnect(self.receiver)
def _clear(self, _):
self.coords = []
self.px_coords = []
self.shape_type = []
self.box_coords = []
self.colour_idx = 0
self.n = 0
if type(self.cube) == CRISPWidebandSequence:
while len(self.ax1.patches) > 0:
for p in self.ax1.patches:
p.remove()
while len(self.ax1.texts) > 0:
for t in self.ax1.texts:
t.remove()
self.ax2.clear()
self.ax2.set_ylabel("I [DNs]")
self.ax2.set_xlabel("Time [UTC]")
self.ax2.xaxis.set_major_locator(plt.MaxNLocator(4))
self.fig.canvas.draw()
self.fig.canvas.flush_events()
else:
while len(self.ax1.patches) > 0:
for p in self.ax1.patches:
p.remove()
while len(self.ax2.patches) > 0:
for p in self.ax2.patches:
p.remove()
while len(self.ax1.texts) > 0:
for t in self.ax1.patches:
t.remove()
while len(self.ax2.patches) > 0:
for t in self.ax2.patches:
t.remove()
self.ax3.clear()
self.ax3.set_ylabel("I [DNs]")
self.ax3.set_xlabel("Time [UTC]")
self.ax3.xaxis.set_major_locator(plt.MaxNLocator(4))
self.ax4.clear()
self.ax4.set_ylabel("I [DNs]")
self.ax4.set_xlabel("Time [UTC]")
self.ax4.xaxis.set_major_locator(plt.MaxNLocator(4))
self.fig.canvas.draw()
self.fig.canvas.flush_events()
def _save(self, _):
self.fig.savefig(self.filename, dpi=300)
def _file_name(self, fn):
self.filename = fn
def _img_plot1(self, t):
if self.ax1.images == []:
pass
elif self.ax1.images[-1].colorbar is not None:
self.ax1.images[-1].colorbar.remove()
im1 = self.ax1.imshow(self.cube.list[t].file.data, cmap="Greys_r")
self.fig.colorbar(im1, ax=self.ax1, orientation="horizontal", label="I [DNs]")
def _img_plot2(self, t1, t2):
if self.ax1.images == []:
pass
elif self.ax1.images[-1].colorbar is not None:
self.ax1.images[-1].colorbar.remove()
if self.ax2.images == []:
pass
elif self.ax2.images[-1].colorbar is not None:
self.ax2.images[-1].colorbar.remove()
im1 = self.ax1.imshow(self.cube[0].list[t].file.data, cmap="Greys_r")
im2 = self.ax2.imshow(self.cube[1].list[t].file.data, cmap="Greys_r")
self.fig.colorbar(im1, ax=self.ax1, orientation="horizontal", label="I [DNs]")
self.fig.colorbar(im2, ax=self.ax2, orientation="horizontal", label="I [DNs]")
class AtmosViewer:
"""
This visualisation tool is for the investigation of atmospheric parameters found via inversion techniques. This makes use of the ``Inversion`` class. This assumes that there are three atmospheric parameters in the inversion: electron number density, electron temperature and bulk line-of-sight velocity. These are the estimated quantities by RADYNVERSION.
:param filename: The inversion file to be used.
:type filename: str or Inversion
:param z: The physical height grid of the estimated atmospheric parameters in megametres. Can only be None if filename is already an ``Inversion`` instance. Default is None. (N.B. the RADYNVERSION height grid is available from ``crispy.radynversion.utils``).
:type z: numpy.ndarray or None, optional
:param wcs: The world coordinate system that the inversion parameters are defined by. Can be None only if filename is already an ``Inversion`` instance. Default is None.
:type wcs: astropy.wcs.WCS or None, optional
:param header: The additional header information from the observations. Default is None.
:type header: dict or None, optional
:param eb: Whether or not to plot the errorbars on the parameter profiles. Default is False.
:type eb: bool, optional
:cvar coords: The coordinates selected to produce spectra.
:type coords: list[tuple]
:cvar px_coords: The coordinates selected to produce spectra in pixel space. This is important for indexing the data later to get the correct spectra.
:type px_coords: list[tuple]
:cvar shape_type: The spectra can be selected for a single point or for a box with specified dimensions with top-left corner where the user clicks. This attribute tells the user which point is described by which shape.
:type shape_type: list[str]
"""
def __init__(self, filename, z=None, wcs=None, header=None, eb=False):
plt.style.use("bmh")
shape = widgets.Dropdown(options=["point", "box"], value="point", description="Shape: ")
if type(filename) == str:
assert z is not None
assert header is not None
self.inv = Inversion(filename=filename, wcs=wcs, z=z, header=header)
elif type(filename) == Inversion:
self.inv = filename
self.coords = []
self.px_coords = []
self.shape_type = []
self.box_coords = []
self.colour_idx = 0
self.n = 0
self.eb = eb
self.fig = plt.figure(figsize=(8,10))
self.gs = self.fig.add_gridspec(nrows=5, ncols=3)
self.ax1 = self.fig.add_subplot(self.gs[:2, 0], projection=self.inv.wcs.dropaxis(-1))
self.ax2 = self.fig.add_subplot(self.gs[:2, 1], projection=self.inv.wcs.dropaxis(-1))
self.ax3 = self.fig.add_subplot(self.gs[:2, 2], projection=self.inv.wcs.dropaxis(-1))
self.ax1.set_ylabel("Helioprojective Latitude [arcsec]")
self.ax1.set_xlabel("Helioprojective Longitude [arcsec]")
self.ax2.set_xlabel("Helioprojective Longitude [arcsec]")
self.ax3.set_xlabel("Helioprojective Longitude [arcsec]")
self.ax2.tick_params(axis="y", labelleft=False)
self.ax3.tick_params(axis="y", labelleft=False)
self.ax4 = self.fig.add_subplot(self.gs[2, :])
self.ax4.set_ylabel(r"log $n_{e}$ [cm$^{-3}$]")
self.ax4.yaxis.set_label_position("right")
self.ax4.yaxis.tick_right()
self.ax5 = self.fig.add_subplot(self.gs[3, :])
self.ax5.set_ylabel(r"log T [K]")
self.ax5.yaxis.set_label_position("right")
self.ax5.yaxis.tick_right()
self.ax6 = self.fig.add_subplot(self.gs[4, :])
self.ax6.set_ylabel(r"v [km s$^{-1}$]")
self.ax6.set_xlabel(r"z [Mm]")
self.ax6.yaxis.set_label_position("right")
self.ax6.yaxis.tick_right()
self.ax4.tick_params(axis="x", labelbottom=False, direction="in")
self.ax5.tick_params(axis="x", labelbottom=False, direction="in")
self.ax6.tick_params(axis="both", direction="in")
widgets.interact(self._img_plot,
z = widgets.SelectionSlider(options=np.round(self.inv.z, decimals=3), description="Image height [Mm]: ", style={"description_width" : "initial"}, layout=widgets.Layout(width="50%")))
widgets.interact(self._shape, opts=shape)
self.receiver = self.fig.canvas.mpl_connect("button_press_event", self._on_click)
x = widgets.IntText(value=1, min=1, max=self.inv.ne.shape[-1], description="x [pix]")
y = widgets.IntText(value=1, min=1, max=self.inv.ne.shape[-2], description="y [pix]")
outx = widgets.interactive_output(self._boxx, {"x" : x})
outy = widgets.interactive_output(self._boxy, {"y" : y})
display(widgets.HBox([x, y]))
done_button = widgets.Button(description="Done")
done_button.on_click(self._disconnect_matplotlib)
clear_button = widgets.Button(description='Clear')
clear_button.on_click(self._clear)
save_button = widgets.Button(description="Save")
save_button.on_click(self._save)
display(widgets.HBox([done_button, clear_button, save_button]))
widgets.interact(self._file_name, fn = widgets.Text(description="Filename to save as: ", style={"description_width" : "initial"}), layout=widgets.Layout(width="50%"))
def _on_click(self, event):
if self.fig.canvas.manager.toolbar.mode != "":
return
if self.shape == "point":
if self.colour_idx > len(pt_bright_cycler)-1:
self.colour_idx = 0
self.n += 1
centre_coord = int(event.ydata), int(event.xdata)
self.px_coords.append(centre_coord)
circ1 = patches.Circle(centre_coord[::-1], radius=10, facecolor=list(pt_bright_cycler)[self.colour_idx]["color"], edgecolor="k", linewidth=1)
circ2 = patches.Circle(centre_coord[::-1], radius=10, facecolor=list(pt_bright_cycler)[self.colour_idx]["color"], edgecolor="k", linewidth=1)
circ3 = patches.Circle(centre_coord[::-1], radius=10, facecolor=list(pt_bright_cycler)[self.colour_idx]["color"], edgecolor="k", linewidth=1)
self.ax1.add_patch(circ1)
self.ax2.add_patch(circ2)
self.ax3.add_patch(circ3)
font = {
"size" : 12,
"color" : list(pt_bright_cycler)[self.colour_idx]["color"]
}
txt_1 = self.ax1.text(centre_coord[1]+20, centre_coord[0]+10, s=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", fontdict=font)
txt_2 = self.ax2.text(centre_coord[1]+20, centre_coord[0]+10, s=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", fontdict=font)
txt_3 = self.ax3.text(centre_coord[1]+20, centre_coord[0]+10, s=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", fontdict=font)
txt_1.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
txt_2.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
txt_3.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
if self.eb:
self.ax4.errorbar(self.inv.z, self.inv.ne[:,centre_coord[0], centre_coord[1]], yerr=self.inv.err[:,centre_coord[0],centre_coord[1],0], marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax5.errorbar(self.inv.z, self.inv.temp[:,centre_coord[0], centre_coord[1]], yerr=self.inv.err[:,centre_coord[0],centre_coord[1],1], marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax6.errorbar(self.inv.z, self.inv.vel[:,centre_coord[0],centre_coord[1]], yerr=self.inv.err[:,centre_coord[0],centre_coord[1],2], marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
else:
self.ax4.plot(self.inv.z, self.inv.ne[:,centre_coord[0],centre_coord[1]], marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax5.plot(self.inv.z, self.inv.temp[:,centre_coord[0],centre_coord[1]], marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax6.plot(self.inv.z, self.inv.vel[:,centre_coord[0], centre_coord[1]], marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax4.legend()
self.ax5.legend()
self.ax6.legend()
px = self.inv.to_lonlat(*centre_coord) << u.arcsec
self.colour_idx += 1
self.coords.append(px)
self.fig.canvas.draw()
elif self.shape == "box":
if self.colour_idx > len(pt_bright_cycler)-1:
self.colour_idx = 0
self.n += 1
box_anchor = int(event.ydata), int(event.xdata)
self.px_coords.append(box_anchor)
self.shape_type.append("box")
# obtain the coordinates of the box on a grid with pixels the size of the box to make sure there is not copies of the same box
box_coord = box_anchor[0] // self.boxy, box_anchor[1] // self.boxx
if box_coord in self.box_coords:
coords = [p.get_xy() for p in self.ax1.patches]
for p in self.ax1.patches:
if p.get_xy() == box_anchor:
p.remove()
idx = self.box_coords.index(box_coord)
del self.box_coords[idx]
del self.px_coords[idx]
del self.shape_type[idx]
del self.coords[idx]
return
self.coords.append(self.inv.to_lonlat(*box_anchor) << u.arcsec)
rect1 = patches.Rectangle(box_anchor[::-1], self.boxx, self.boxy, linewidth=2, edgecolor=list(pt_bright_cycler)[self.colour_idx]["color"], facecolor="none")
rect1.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
rect2 = patches.Rectangle(box_anchor[::-1], self.boxx, self.boxy, linewidth=2, edgecolor=list(pt_bright_cycler)[self.colour_idx]["color"], facecolor="none")
rect2.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
rect3 = patches.Rectangle(box_anchor[::-1], self.boxx, self.boxy, linewidth=2, edgecolor=list(pt_bright_cycler)[self.colour_idx]["color"], facecolor="none")
rect3.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
self.ax1.add_patch(rect1)
self.ax2.add_patch(rect2)
self.ax3.add_patch(rect3)
font = {
"size" : 12,
"color" : list(pt_bright_cycler)[self.colour_idx]["color"]
}
txt1 = self.ax1.text(box_anchor[1]-50, box_anchor[0]-10, s=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", fontdict=font)
txt1.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
txt2 = self.ax2.text(box_anchor[1]-50, box_anchor[0]-10, s=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", fontdict=font)
txt2.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
txt3 = self.ax3.text(box_anchor[1]-50, box_anchor[0]-10, s=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", fontdict=font)
txt3.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
if self.eb:
self.ax4.errorbar(self.inv.z, np.mean(self.inv.ne[:,box_anchor[0]:box_anchor[0]+self.boxy, box_anchor[1]:box_anchor[1]+self.boxx], axis=(0,1)), yerr=np.mean(self.inv.err[:,box_anchor[0]:box_anchor[0]+self.boxy,box_anchor[1]:box_anchor[1]+self.boxx,0], axis=(0,1)), marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax5.errorbar(self.inv.z, np.mean(self.inv.temp[:,box_anchor[0]:box_anchor[0]+self.boxy, box_anchor[1]:box_anchor[1]+self.boxx], axis=(0,1)), yerr=np.mean(self.inv.err[box_anchor[0]:box_anchor[0]+self.boxy,box_anchor[1]:box_anchor[1]+self.boxx,1], axis=(0,1)), marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax6.errorbar(self.inv.z, np.mean(self.inv.vel[:,box_anchor[0]:box_anchor[0]+self.boxy, box_anchor[1]:box_anchor[1]+self.boxx], axis=(0,1)), yerr=np.mean(self.inv.err[:,box_anchor[0]:box_anchor[0]+self.boxy,box_anchor[1]:box_anchor[1]+self.boxx,2], axis=(0,1)), marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
else:
self.ax4.plot(self.inv.z, np.mean(self.inv.ne[:,box_anchor[0]:box_anchor[0]+self.boxy, box_anchor[1]:box_anchor[1]+self.boxx], axis=(1,2)), marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax5.plot(self.inv.z, np.mean(self.inv.temp[:,box_anchor[0]:box_anchor[0]+self.boxy, box_anchor[1]:box_anchor[1]+self.boxx], axis=(1,2)), marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax6.plot(self.inv.z, np.mean(self.inv.vel[:,box_anchor[0]:box_anchor[0]+self.boxy, box_anchor[1]:box_anchor[1]+self.boxx], axis=(1,2)), marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax4.legend()
self.ax5.legend()
self.ax6.legend()
self.colour_idx += 1
self.fig.canvas.draw()
def _shape(self, opts):
self.shape = opts
def _boxx(self, x):
self.boxx = x
def _boxy(self, y):
self.boxy = y
def _disconnect_matplotlib(self, _):
self.fig.canvas.mpl_disconnect(self.receiver)
def _clear(self, _):
self.coords = []
self.px_coords = []
self.shape_type = []
self.box_coords = []
self.colour_idx = 0
self.n = 0
while len(self.ax1.patches) > 0:
for p in self.ax1.patches:
p.remove()
while len(self.ax2.patches) > 0:
for p in self.ax2.patches:
p.remove()
while len(self.ax3.patches) > 0:
for p in self.ax3.patches:
p.remove()
while len(self.ax1.texts) > 0:
for t in self.ax1.texts:
t.remove()
while len(self.ax2.texts) > 0:
for t in self.ax2.texts:
t.remove()
while len(self.ax3.texts) > 0:
for t in self.ax3.texts:
t.remove()
self.ax4.clear()
self.ax4.set_ylabel(r"log n$_{e}$ [cm$^{-3}$]")
self.ax5.clear()
self.ax5.set_ylabel(r"log T [K]")
self.ax6.clear()
self.ax6.set_ylabel(r"v [km s$^{-1}$]")
self.ax6.set_xlabel(r"z [Mm]")
self.fig.canvas.draw()
self.fig.canvas.flush_events()
def _save(self, _):
self.fig.savefig(self.filename, dpi=300)
def _file_name(self, fn):
self.filename = fn
def _img_plot(self, z):
if self.ax1.images == []:
pass
elif self.ax1.images[-1].colorbar != None:
self.ax1.images[-1].colorbar.remove()
if self.ax2.images == []:
pass
elif self.ax2.images[-1].colorbar != None:
self.ax2.images[-1].colorbar.remove()
if self.ax3.images == []:
pass
elif self.ax3.images[-1].colorbar != None:
self.ax3.images[-1].colorbar.remove()
z_idx = int(np.where(np.round(self.inv.z, decimals=3) == np.round(z, decimals=3))[0])
im1 = self.ax1.imshow(self.inv.ne[z_idx], cmap="cividis")
self.fig.colorbar(im1, ax=self.ax1, orientation="horizontal", label=r"log $n_{e}$ [cm$^{-3}$]")
im2 = self.ax2.imshow(self.inv.temp[z_idx], cmap="hot")
self.fig.colorbar(im2, ax=self.ax2, orientation="horizontal", label=r"log T [K]")
im3 = self.ax3.imshow(self.inv.vel[z_idx], cmap="RdBu", clim=(-np.max(self.inv.vel[z_idx]), np.max(self.inv.vel[z_idx])))
self.fig.colorbar(im3, ax=self.ax3, orientation="horizontal", label=r"v [km s$^{-1}$]")
class ImageViewer:
"""
This visualiser only views the images for data, not the spectra. For use when interested only in imaging data. Includes sliders to change the wavelength of the observation.
:param data: The data to explore, this can be either one or two spectral lines (support for more than two can be added if required). This is the only required argument to view the data.
:type data: str or list or CRISP or CRISPSequence or CRISPNonU or CRISPNonUSequence
:param wcs: A prescribed world coordinate system. If None, the world coordinate system is derived from the data. Default is None.
:type wcs: astropy.wcs.WCS or None, optional
:param uncertainty: The uncertainty in the intensity values of the data. Default is None.
:type uncertainty: numpy.ndarray or None, optional
:param mask: A mask to be used on the data. Default is None.
:type mask: numpy.ndarray or None, optional
:param nonu: Whether or not the spectral axis is non-uniform. Default is False.
:type nonu: bool, optional
"""
def __init__(self, data, wcs=None, uncertainty=None, mask=None, nonu=False):
plt.style.use("bmh")
self.aa = html.unescape("Å")
self.l = html.unescape("λ")
self.a = html.unescape("α")
self.D = html.unescape("Δ")
if not nonu:
if type(data) == str:
self.cube = CRISP(filename=data, wcs=wcs, uncertainty=uncertainty, mask=mask)
if self.cube.file.data.ndim == 3:
self.wvls = self.cube.wave(np.arange(self.cube.shape[0])) << u.Angstrom
elif self.cube.file.data.ndim == 4:
self.wvls = self.cube.wave(np.arange(self.cube.shape[1])) << u.Angstrom
elif type(data) == list:
data = CRISP_sequence_constructor(data, wcs=wcs, uncertainty=uncertainty, mask=mask, nonu=nonu)
self.cube = CRISPSequence(files=data)
if self.cube.list[0].file.data.ndim == 3:
self.wvls1 = self.cube.list[0].wave(np.arange(self.cube.list[0].shape[0])) << u.Angstrom
elif self.cube.list[0].file.data.ndim == 4:
self.wvls1 = self.cube.list[0].wave(np.arange(self.cube.list[0].shape[1])) << u.Angstrom
if self.cube.list[1].file.data.ndim == 3:
self.wvls2 = self.cube.list[1].wave(np.arange(self.cube.list[1].shape[0])) << u.Angstrom
elif self.cube.list[1].file.data.ndim == 4:
self.wvls2 = self.cube.list[1].wave(np.arange(self.cube.list[1].shape[1])) << u.Angstrom
elif type(data) == CRISP:
self.cube = data
if self.cube.file.data.ndim == 3:
self.wvls = self.cube.wave(np.arange(self.cube.shape[0])) << u.Angstrom
elif self.cube.file.data.ndim == 4:
self.wvls = self.cube.wave(np.arange(self.cube.shape[1])) << u.Angstrom
elif type(data) == CRISPSequence:
self.cube = data
if self.cube.list[0].file.data.ndim == 3:
self.wvls1 = self.cube.list[0].wave(np.arange(self.cube.list[0].shape[0])) << u.Angstrom
elif self.cube.list[0].file.data.ndim == 4:
self.wvls1 = self.cube.list[0].wave(np.arange(self.cube.list[0].shape[1])) << u.Angstrom
if self.cube.list[1].file.data.ndim == 3:
self.wvls2 = self.cube.list[1].wave(np.arange(self.cube.list[1].shape[0])) << u.Angstrom
elif self.cube.list[1].file.data.ndim == 4:
self.wvls2 = self.cube.list[1].wave(np.arange(self.cube.list[1].shape[1])) << u.Angstrom
else:
if type(data) == str:
self.cube = CRISPNonU(filename=data, wcs=wcs, uncertainty=uncertainty, mask=mask)
if self.cube.file.data.ndim == 3:
self.wvls = self.cube.wave(np.arange(self.cube.shape[0])) << u.Angstrom
elif self.cube.file.data.ndim == 4:
self.wvls = self.cube.wave(np.arange(self.cube.shape[1])) << u.Angstrom
elif type(data) == list:
data = CRISP_sequence_constructor(data, wcs=wcs, uncertainty=uncertainty, mask=mask, nonu=nonu)
self.cube = CRISPNonUSequence(files=data)
if self.cube.list[0].file.data.ndim == 3:
self.wvls1 = self.cube.list[0].wave(np.arange(self.cube.list[0].shape[0])) << u.Angstrom
elif self.cube.list[0].file.data.ndim == 4:
self.wvls1 = self.cube.list[0].wave(np.arange(self.cube.list[0].shape[1])) << u.Angstrom
if self.cube.list[1].file.data.ndim == 3:
self.wvls2 = self.cube.list[1].wave(np.arange(self.cube.list[1].shape[0])) << u.Angstrom
elif self.cube.list[1].file.data.ndim == 4:
self.wvls2 = self.cube.list[1].wave(np.arange(self.cube.list[1].shape[1])) << u.Angstrom
elif type(data) == CRISPNonU:
self.cube = data
if self.cube.file.data.ndim == 3:
self.wvls = self.cube.wave(np.arange(self.cube.shape[0])) << u.Angstrom
elif self.cube.file.data.ndim == 4:
self.wvls = self.cube.wave(np.arange(self.cube.shape[1])) << u.Angstrom
elif type(data) == CRISPNonUSequence:
self.cube = data
if self.cube.list[0].file.data.ndim == 3:
self.wvls1 = self.cube.list[0].wave(np.arange(self.cube.list[0].shape[0])) << u.Angstrom
elif self.cube.list[0].file.data.ndim == 4:
self.wvls1 = self.cube.list[0].wave(np.arange(self.cube.list[0].shape[1])) << u.Angstrom
if self.cube.list[1].file.data.ndim == 3:
self.wvls2 = self.cube.list[1].wave(np.arange(self.cube.list[1].shape[0])) << u.Angstrom
elif self.cube.list[1].file.data.ndim == 4:
self.wvls2 = self.cube.list[1].wave(np.arange(self.cube.list[1].shape[1])) << u.Angstrom
if type(self.cube) == CRISP or type(self.cube) == CRISPNonU:
self.fig = plt.figure(figsize=(8,10))
try:
self.ax1 = self.fig.add_subplot(1, 1, 1, projection=self.cube.wcs.dropaxis(-1))
except:
self.ax1 = self.fig.add_subplot(1, 1, 1, projection=SlicedLowLevelWCS(self.cube[0].wcs.low_level_wcs, 0))
self.ax1.set_ylabel("Helioprojective Latitude [arcsec]")
self.ax1.set_xlabel("Helioprojective Longitude [arcsec]")
ll = widgets.SelectionSlider(options=[np.round(l - np.median(self.wvls), decimals=2).value for l in self.wvls], description = f"{self.D} {self.l} [{self.aa}]")
out1 = widgets.interactive_output(self._img_plot1, {"ll" : ll})
display(widgets.HBox([ll]))
elif type(self.cube) == CRISPSequence or type(self.cube) == CRISPNonUSequence:
self.fig = plt.figure(figsize=(8,10))
try:
self.ax1 = self.fig.add_subplot(1, 2, 1, projection=self.cube.list[0].wcs.dropaxis(-1))
except:
self.ax1 = self.fig.add_subplot(1, 2, 1, projection=SlicedLowLevelWCS(self.cube.list[0][0].wcs.low_level_wcs, 0))
self.ax1.set_ylabel("Helioprojective Latitude [arcsec]")
self.ax1.set_xlabel("Helioprojective Longitude [arcsec]")
try:
self.ax2 = self.fig.add_subplot(1, 2, 2, projection=self.cube.list[1].wcs.dropaxis(-1))
except:
self.ax2 = self.fig.add_subplot(1, 2, 2, projection=SlicedLowLevelWCS(self.cube.list[1][0].wcs.low_level_wcs, 0))
self.ax2.set_ylabel("Helioprojective Latitude [arcsec]")
self.ax2.set_xlabel("Helioprojective Longitude [arcsec]")
ll1 = widgets.SelectionSlider(
options=[np.round(l - np.median(self.wvls1), decimals=2).value for l in self.wvls1],
description=fr"{self.D} {self.l}$_{1}$ [{self.aa}]",
style={"description_width" : "initial"}
)
ll2 = widgets.SelectionSlider(
options=[np.round(l - np.median(self.wvls2), decimals=2).value for l in self.wvls2],
description=fr"{self.D} {self.l}$_{2}$ [{self.aa}]",
style={"description_width" : "initial"}
)
out1 = widgets.interactive_output(self._img_plot2, {"ll1" : ll1, "ll2" : ll2})
display(widgets.HBox([widgets.VBox([ll1, ll2])]))
done_button = widgets.Button(description="Done")
done_button.on_click(self._disconnect_matplotlib)
save_button = widgets.Button(description="Save")
save_button.on_click(self._save)
display(widgets.HBox([done_button, save_button]))
widgets.interact(self._file_name, fn= widgets.Text(description="Filename to save as: ", style={"description_width" : "initial"}, layout=widgets.Layout(width="50%")))
def _disconnect_matplotlib(self, _):
self.fig.canvas.mpl_disconnect(self.receiver)
def _save(self, _):
self.fig.savefig(self.filename, dpi=300)
def _file_name(self, fn):
self.filename = fn
def _img_plot1(self, ll):
if self.ax1.images == []:
pass
elif self.ax1.images[-1].colorbar is not None:
self.ax1.images[-1].colorbar.remove()
ll_idx = int(np.where(np.round(self.wvls, decimals=2).value == np.round(np.median(self.wvls).value + ll, decimals=2))[0])
try:
data = self.cube.file.data[ll_idx].astype(np.float)
data[data < 0] = np.nan
im1 = self.ax1.imshow(data, cmap="Greys_r")
except:
data = self.cube.file.data[0, ll_idx].astype(np.float)
data[data < 0] = np.nan
im1 = self.ax1.imshow(data, cmap="Greys_r")
try:
el = self.cube.file.header["WDESC1"]
except KeyError:
el = self.cube.file.header["element"]
self.ax1.set_title(fr"{el} {self.aa} {self.D} {self.l}$_{1}$ = {ll} {self.aa}")
self.fig.colorbar(im1, ax=self.ax1, orientation="horizontal", label="Intensity [DNs]")
def _img_plot2(self, ll1, ll2):
if self.ax1.images == []:
pass
elif self.ax1.images[-1].colorbar is not None:
self.ax1.images[-1].colorbar.remove()
if self.ax2.images == []:
pass
elif self.ax2.images[-1].colorbar is not None:
self.ax2.images[-1].colorbar.remove()
ll1_idx = int(np.where(np.round(self.wvls1, decimals=2).value == np.round(np.median(self.wvls1).value + ll1, decimals=2))[0])
ll2_idx = int(np.where(np.round(self.wvls2, decimals=2).value == np.round(np.median(self.wvls2).value + ll2, decimals=2))[0])
try:
data = self.cube.list[0].file.data[ll1_idx].astype(np.float)
data[data < 0] = np.nan
im1 = self.ax1.imshow(data, cmap="Greys_r")
except:
data = self.cube.list[0].file.data[0, ll1_idx].astype(np.float)
data[data < 0] = np.nan
im1 = self.ax1.imshow(data, cmap="Greys_r")
try:
data = self.cube.list[1].file.data[ll2_idx].astype(np.float)
data[data < 0] = np.nan
im2 = self.ax2.imshow(data, cmap="Greys_r")
except:
data = self.cube.list[1].file.data[0, ll2_idx].astype(np.float)
data[data < 0] = np.nan
im2 = self.ax2.imshow(data, cmap="Greys_r")
try:
el1 = self.cube.list[0].file.header["WDESC1"]
el2 = self.cube.list[1].file.header["WDESC1"]
except KeyError:
el1 = self.cube.list[0].file.header["element"]
el2 = self.cube.list[1].file.header["element"]
self.ax1.set_title(fr"{el1} {self.aa} {self.D} {self.l}$_{1}$ = {ll1} {self.aa}")
self.ax2.set_title(fr"{el2} {self.aa} {self.D} {self.l}$_{2}$ = {ll2} {self.aa}")
self.fig.colorbar(im1, ax=self.ax1, orientation="horizontal", label="Intensity [DNs]")
self.fig.colorbar(im2, ax=self.ax2, orientation="horizontal", label="Intensity [DNs]")
class SpectralTimeViewer:
"""
Imaging spectroscopic viewer. SpectralTimeViewer should be used when one wants to click on points of an image and have the spectrum displayed for that point and the time series for a certain time range of observations. This works **exclusively** in Jupyter notebook but can be a nice data exploration tool. This viewer utilises the data structures defined in `crispy.crisp` and has many variable options.
:param data1: The data to explore, this is one spectral line. This is the only required argument to view the data.
:type data1: list or CRISPSequence or CRISPNonUSequence
:param data2: If there is a second set of data to explore.
:type data2: list or CRISPSequence or CRISPNonUSequence
:param wcs: A prescribed world coordinate system. If None, the world coordinate system is derived from the data. Default is None.
:type wcs: astropy.wcs.WCS or None, optional
:param uncertainty: The uncertainty in the intensity values of the data. Default is None.
:type uncertainty: numpy.ndarray or None, optional
:param mask: A mask to be used on the data. Default is None.
:type mask: numpy.ndarray or None, optional
:param nonu: Whether or not the spectral axis is non-uniform. Default is False.
:type nonu: bool, optional
:cvar coords: The coordinates selected to produce spectra.
:type coords: list[tuple]
:cvar px_coords: The coordinates selected to produce spectra in pixel space. This is important for indexing the data later to get the correct spectra.
:type px_coords: list[tuple]
:cvar shape_type: The spectra can be selected for a single point or for a box with specified dimensions with top-left corner where the user clicks. This attribute tells the user which point is described by which shape.
:type shape_type: list[str]
"""
def __init__(self, data1, data2=None, wcs=None, uncertainty=None, mask=None, nonu=False):
plt.style.use("bmh")
self.aa = html.unescape("Å")
self.l = html.unescape("λ")
self.a = html.unescape("α")
self.D = html.unescape("Δ")
shape = widgets.Dropdown(options=["point", "box"], value="point", description="Shape: ")
if not nonu:
if type(data1) == list:
data1 = CRISP_sequence_constructor(data1, wcs=wcs, uncertainty=uncertainty, mask=mask, nonu=nonu)
self.cube1 = CRISPSequence(files=data1)
if self.cube1.list[0].file.data.ndim == 3:
self.wvls1 = self.cube1.list[0].wave(np.arange(self.cube1.list[0].shape[0])) << u.Angstrom
elif self.cube1.list[0].file.data.ndim == 4:
self.wvls1 = self.cube1.list[0].wave(np.arange(self.cube1.list[0].shape[1])) << u.Angstrom
elif type(data1) == CRISPSequence:
self.cube1 = data1
if self.cube1.list[0].file.data.ndim == 3:
self.wvls1 = self.cube1.list[0].wave(np.arange(self.cube1.list[0].shape[0]))
elif self.cube1.list[0].file.data.ndim == 4:
self.wvls1 = self.cube1.list[0].wave(np.arange(self.cube1.list[0].shape[1]))
if data2 == None:
pass
elif type(data2) == list:
data2 = CRISP_sequence_constructor(data2, wcs=wcs, uncertainty=uncertainty, mask=mask, nonu=nonu)
self.cube2 = CRISPSequence(files=data2)
if self.cube2.list[0].file.data.ndim == 3:
self.wvls2 = self.cube2.list[0].wave(np.arange(self.cube2.list[0].shape[0]))
elif self.cube2.list[0].file.data.ndim == 4:
self.wvls2 = self.cube2.list[0].wave(np.arange(self.cube2.list[0].shape[1]))
elif type(data2) == CRISPSequence:
self.cube2 = data2
if self.cube2.list[0].file.data.ndim == 3:
self.wvls2 = self.cube2.list[0].wave(np.arange(self.cube2.list[0].shape[0]))
elif self.cube2.list[0].file.data.ndim == 4:
self.wvls2 = self.cube2.list[0].wave(np.arange(self.cube2.list[0].shape[1]))
else:
if type(data1) == list:
data1 = CRISP_sequence_constructor(data1, wcs=wcs, uncertainty=uncertainty, mask=mask, nonu=nonu)
self.cube1 = CRISPNonUSequence(files=data1)
if self.cube1.list[0].file.data.ndim == 3:
self.wvls1 = self.cube1.list[0].wave(np.arange(self.cube1.list[0].shape[0])) << u.Angstrom
elif self.cube1.list[0].file.data.ndim == 4:
self.wvls1 = self.cube1.list[0].wave(np.arange(self.cube1.list[0].shape[1])) << u.Angstrom
elif type(data1) == CRISPNonUSequence:
self.cube1 = data
if self.cube1.list[0].file.data.ndim == 3:
self.wvls1 = self.cube1.list[0].wave(np.arange(self.cube1.list[0].shape[0])) << u.Angstrom
elif self.cube1.list[0].file.data.ndim == 4:
self.wvls1 = self.cube1.list[0].wave(np.arange(self.cube1.list[0].shape[1])) << u.Angstrom
if data2 == None:
pass
elif type(data2) == list:
data2 = CRISP_sequence_constructor(data2, wcs=wcs, uncertainty=uncertainty, mask=mask, nonu=nonu)
self.cube2 = CRISPNonUSequence(files=data2)
if self.cube2.list[0].file.data.ndim == 3:
self.wvls2 = self.cube2.list[0].wave(np.arange(self.cube2.list[0].shape[0]))
elif self.cube2.list[0].file.data.ndim == 4:
self.wvls2 = self.cube2.list[0].wave(np.arange(self.cube2.list[0].shape[1]))
elif type(data2) == CRISPNonUSequence:
self.cube2 = data2
if self.cube2.list[0].file.data.ndim == 3:
self.wvls2 = self.cube2.list[0].wave(np.arange(self.cube2.list[0].shape[0]))
elif self.cube2.list[0].file.data.ndim == 4:
self.wvls2 = self.cube2.list[0].wave(np.arange(self.cube2.list[0].shape[1]))
if data2 == None:
self.fig = plt.figure(figsize=(8,10))
self.gs = self.fig.add_gridspec(nrows=2, ncols=2)
if self.cube1.list[0].file.data.ndim == 3:
self.ax1 = self.fig.add_subplot(self.gs[0,0], projection=self.cube1.list[0].wcs.dropaxis(-1))
elif self.cube1.list[0].file.data.ndim == 4:
self.ax1 = self.fig.add_subplot(self.gs[0,0], projection=SlicedLowLevelWCS(self.cube1.list[0][0].wcs.low_level_wcs, 0))
self.ax1.set_ylabel("Helioprojective Latitude [arcsec]")
self.ax1.set_xlabel("Helioprojective Longitude [arcsec]")
self.ax2 = self.fig.add_subplot(self.gs[0,1])
self.ax2.yaxis.set_label_position("right")
self.ax2.yaxis.tick_right()
self.ax2.set_ylabel("I [DNs]")
self.ax2.set_xlabel(f"{self.l} [{self.aa}]")
self.ax2.tick_params(direction="in")
self.ax3 = self.fig.add_subplot(self.gs[1,:])
self.ax3.set_ylabel("I [DNs]")
self.ax3.set_xlabel("Time [UTC]")
self.ll = widgets.SelectionSlider(options=[np.round(l - np.median(self.wvls1), decimals=2).value for l in self.wvls1], description = f"{self.D} {self.l} [{self.aa}]")
self.t = widgets.IntSlider(value=0, min=0, max=len(self.cube1.list)-1, step=1, description="Time index: ", disabled=False)
try:
self.times1 = [date2num(f.file.header["DATE-AVG"]) for f in self.cube1.list]
except KeyError:
self.times1 = [date2num(f.file.header["date_obs"]+" "+f.file.header["time_obs"]) for f in self.cube1.list]
out1 = widgets.interactive_output(self._img_plot1, {"ll" : self.ll, "t" : self.t})
out2 = widgets.interactive_output(self._shape, {"opts" : shape})
display(widgets.HBox([widgets.VBox([self.ll,self.t]), shape]))
else:
self.fig = plt.figure(figsize=(8,10))
self.gs = self.fig.add_gridspec(nrows=3, ncols=2)
try:
self.ax1 = self.fig.add_subplot(self.gs[0,0], projection=self.cube1.list[0].wcs.dropaxis(-1))
except:
self.ax1 = self.fig.add_subplot(self.gs[0,0], projection=SlicedLowLevelWCS(self.cube1.list[0][0].wcs.low_level_wcs, 0))
self.ax1.set_ylabel("Helioprojective Latitude [arcsec]")
self.ax1.set_xlabel("Helioprojective Longitude [arcsec]")
self.ax1.xaxis.set_label_position("top")
self.ax1.xaxis.tick_top()
try:
self.ax2 = self.fig.add_subplot(self.gs[1,0], projection=self.cube2.list[0].wcs.dropaxis(-1))
except:
self.ax2 = self.fig.add_subplot(self.gs[1,0], projection=SlicedLowLevelWCS(self.cube2.list[0][0].wcs.low_level_wcs, 0))
self.ax2.set_ylabel("Helioprojective Latitude [arcsec]")
self.ax2.set_xlabel("Helioprojective Longitude [arcsec]")
self.ax3 = self.fig.add_subplot(self.gs[0,1])
self.ax3.yaxis.set_label_position("right")
self.ax3.yaxis.tick_right()
self.ax3.set_ylabel("Intensity [DNs]")
self.ax3.set_xlabel(f"{self.l} [{self.aa}]")
self.ax3.xaxis.set_label_position("top")
self.ax3.xaxis.tick_top()
self.ax3.tick_params(direction="in")
self.ax4 = self.fig.add_subplot(self.gs[1,1])
self.ax4.yaxis.set_label_position("right")
self.ax4.yaxis.tick_right()
self.ax4.set_ylabel("Intensity [DNs]")
self.ax4.set_xlabel(f"{self.l} [{self.aa}]")
self.ax4.tick_params(direction="in")
self.ax5 = self.fig.add_subplot(self.gs[2,:])
self.ax5.set_ylabel("Intensity [DNs]")
self.ax5.set_xlabel("Time [UTC]")
self.ax5b = self.ax5.twinx()
self.ax5b.set_ylabel("Intensity [DNs]")
self.ll1 = widgets.SelectionSlider(
options=[np.round(l - np.median(self.wvls1), decimals=2).value for l in self.wvls1],
description=fr"{self.aa} {self.D} {self.l}$_{1}$ [{self.aa}]",
style={"description_width" : "initial"}
)
self.ll2 = widgets.SelectionSlider(
options=[np.round(l - np.median(self.wvls2), decimals=2).value for l in self.wvls2],
description=fr"{self.aa} {self.D} {self.l}$_{2}$ [{self.aa}]",
style={"description_width" : "initial"}
)
self.t1 = widgets.IntSlider(value=0, min=0, max=len(self.cube1.list)-1, step=1, disabled=False, description=r"t$_{1}$ index: ")
self.t2 = widgets.IntSlider(value=0, min=0, max=len(self.cube2.list)-1, step=1, disabled=False, description=r"t$_{2}$ index: ")
try:
self.times1 = [date2num(f.file.header["DATE-AVG"]) for f in self.cube1.list]
self.times2 = [date2num(f.file.header["DATE-AVG"]) for f in self.cube2.list]
except KeyError:
self.times1 = [date2num(f.file.header["date_obs"]+" "+f.file.header["time_obs"]) for f in self.cube1.list]
self.times2 = [date2num(f.file.header["date_obs"]+" "+f.file.header["time_obs"]) for f in self.cube2.list]
out1 = widgets.interactive_output(self._img_plot2, {"ll1" : self.ll1, "ll2" : self.ll2, "t1" : self.t1, "t2" : self.t2})
out2 = widgets.interactive_output(self._shape, {"opts" : shape})
display(widgets.HBox([widgets.VBox([widgets.HBox([self.ll1, self.ll2]),widgets.HBox([self.t1, self.t2])]), shape]))
self.coords = []
self.px_coords = []
self.shape_type = []
self.box_coords = []
self.colour_idx = 0
self.n = 0
self.receiver = self.fig.canvas.mpl_connect("button_press_event", self._on_click)
x = widgets.IntText(value=1, min=1, max=self.cube1.list[0].shape[-1], description="x [pix]")
y = widgets.IntText(value=1, min=1, max=self.cube1.list[0].shape[-2], description="y [pix]")
outx = widgets.interactive_output(self._boxx, {"x" : x})
outy = widgets.interactive_output(self._boxy, {"y" : y})
display(widgets.HBox([x, y]))
done_button = widgets.Button(description="Done")
done_button.on_click(self._disconnect_matplotlib)
clear_button = widgets.Button(description="Clear")
clear_button.on_click(self._clear)
save_button = widgets.Button(description="Save")
save_button.on_click(self._save)
display(widgets.HBox([done_button, clear_button, save_button]))
widgets.interact(self._file_name, fn= widgets.Text(description="Filename to save as: ", style={"description_width" : "initial"}, layout=widgets.Layout(width="50%")))
def _on_click(self, event):
if self.fig.canvas.manager.toolbar.mode != "":
return
if not hasattr(self, "cube2"):
if self.shape == "point":
if self.colour_idx > len(pt_bright_cycler)-1:
self.colour_idx = 0
self.n += 1
centre_coord = int(event.ydata), int(event.xdata)
self.px_coords.append(centre_coord)
self.shape_type.append("point")
circ = patches.Circle(centre_coord[::-1], radius=10, facecolor=list(pt_bright_cycler)[self.colour_idx]["color"], edgecolor="k", linewidth=1)
self.ax1.add_patch(circ)
font = {
"size" : 12,
"color" : list(pt_bright_cycler)[self.colour_idx]["color"]
}
txt = self.ax1.text(centre_coord[1]+20, centre_coord[0]+10, s=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", fontdict=font)
txt.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
px = self.cube1.list[self.t.value].to_lonlat(*centre_coord) << u.arcsec
if self.cube1.list[0].file.data.ndim == 3:
self.ax2.plot(self.wvls1, self.cube1.list[self.t.value].file.data[:, centre_coord[0], centre_coord[1]], marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
elif self.cube1.list[0].file.data.ndim == 4:
self.ax2.plot(self.wvls1, self.cube1.list[self.t.value].file.data[0, :, centre_coord[0], centre_coord[1]], marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax2.legend()
ll_idx = int(np.where(np.round(self.wvls1, decimals=2).value == np.round(np.median(self.wvls1).value + self.ll.value, decimals=2))[0])
if self.cube1.list[0].file.data.ndim == 3:
i_time1 = [f.file.data[ll_idx, centre_coord[0], centre_coord[1]] for f in self.cube1.list]
self.ax3.plot(self.times1, i_time1, marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
elif self.cube1.list[0].file.data.ndim == 4:
i_time1 = [f.file.data[0, ll_idx, centre_coord[0], centre_coord[1]] for f in self.cube1.list]
self.ax3.plot(self.times1, i_time1, marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax3.xaxis.set_major_formatter(DateFormatter("%H:%M:%S"))
for label in self.ax3.get_xticklabels():
label.set_rotation(40)
label.set_horizontalalignment('right')
self.coords.append(px)
self.colour_idx += 1
self.fig.canvas.draw()
elif self.shape == "box":
if self.colour_idx > len(pt_bright_cycler)-1:
self.colour_idx = 0
self.n += 1
box_anchor = int(event.ydata), int(event.xdata)
self.px_coords.append(box_anchor)
self.shape_type.append("box")
# obtain the coordinates of the box on a grid with pixels the size of the box to make sure there is not copies of the same box
box_coord = box_anchor[0] // self.boxy, box_anchor[1] // self.boxx
if box_coord in self.box_coords:
coords = [p.get_xy() for p in self.ax1.patches]
for p in self.ax.patches:
if p.get_xy() == box_anchor:
p.remove()
idx = self.box_coords.index(box_coord)
del self.box_coords[idx]
del self.px_coords[idx]
del self.shape_type[idx]
del self.coords[idx]
return
self.coords.append(self.cube1.list[0].to_lonlat(*box_anchor) << u.arcsec)
rect = patches.Rectangle(box_anchor[::-1], self.boxx, self.boxy, linewidth=2, edgecolor=list(pt_bright_cycler)[self.colour_idx]["color"], facecolor="none")
rect.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
self.ax1.add_patch(rect)
font = {
"size" : 12,
"color" : list(pt_bright_cycler)[self.colour_idx]["color"]
}
txt = self.ax1.text(box_anchor[1]-50, box_anchor[0]-10, s=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", fontdict=font)
txt.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
if self.cube1.list[0].file.data.ndim == 3:
self.ax2.plot(self.wvls1, np.mean(self.cube1.list[self.t.value].file.data[:,box_anchor[0]:box_anchor[0]+self.boxy,box_anchor[1]:box_anchor[1]+self.boxx],axis=(1,2)), marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
elif self.cube1.list[0].file.data.ndim == 4:
self.ax2.plot(self.wvls1, np.mean(self.cube1.list[self.t.value].file.data[0, :,box_anchor[0]:box_anchor[0]+self.boxy,box_anchor[1]:box_anchor[1]+self.boxx],axis=(1,2)), marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax2.legend()
ll_idx = int(np.where(np.round(self.wvls1, decimals=2).value == np.round(np.median(self.wvls1).value + self.ll.value, decimals=2))[0])
if self.cube1.list[0].file.data.ndim == 3:
i_time1 = [np.mean(f.file.data[ll_idx,box_anchor[0]:box_anchor[0]+self.boxy,box_anchor[1]:box_anchor[1]+self.boxx]) for f in self.cube1.list]
self.ax3.plot(self.times1, i_time1, marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
elif self.cube1.list[0].file.data.ndim == 4:
i_time1 = [np.mean(f.file.data[0, ll_idx,box_anchor[0]:box_anchor[0]+self.boxy,box_anchor[1]:box_anchor[1]+self.boxx]) for f in self.cube1.list]
self.ax3.plot(self.times1, i_time1, marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax3.xaxis.set_major_formatter(DateFormatter("%H:%M:%S"))
for label in self.ax3.get_xticklabels():
label.set_rotation(40)
label.set_horizontalalignment('right')
self.colour_idx += 1
self.fig.canvas.draw()
else:
if self.shape == "point":
if self.colour_idx > len(pt_bright_cycler)-1:
self.colour_idx = 0
self.n += 1
centre_coord = int(event.ydata), int(event.xdata) #with WCS, the event data is returned in pixels so we don't need to do the conversion from real world but rather to real world later on
self.px_coords.append(centre_coord)
circ1 = patches.Circle(centre_coord[::-1], radius=10, facecolor=list(pt_bright_cycler)[self.colour_idx]["color"], edgecolor="k", linewidth=1)
circ2 = patches.Circle(centre_coord[::-1], radius=10, facecolor=list(pt_bright_cycler)[self.colour_idx]["color"], edgecolor="k", linewidth=1)
self.ax1.add_patch(circ1)
self.ax2.add_patch(circ2)
font = {
"size" : 12,
"color" : list(pt_bright_cycler)[self.colour_idx]["color"]
}
txt_1 = self.ax1.text(centre_coord[1]+20, centre_coord[0]+10, s=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", fontdict=font)
txt_2 = self.ax2.text(centre_coord[1]+20, centre_coord[0]+10, s=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", fontdict=font)
txt_1.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
txt_2.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
px = self.cube1.list[0].to_lonlat(*centre_coord) << u.arcsec
if self.cube1.list[0].file.data.ndim == 3:
self.ax3.plot(self.wvls1, self.cube1.list[self.t1.value].file.data[:, centre_coord[0], centre_coord[1]], marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
elif self.cube1.list[0].file.data.ndim == 4:
self.ax3.plot(self.wvls1, self.cube1.list[self.t1.value].file.data[0, :, centre_coord[0], centre_coord[1]], marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
if self.cube2.list[0].file.data.ndim == 3:
self.ax4.plot(self.wvls2, self.cube2.list[self.t2.value].file.data[:, centre_coord[0], centre_coord[1]], marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
elif self.cube2.list[0].file.data.ndim == 4:
self.ax4.plot(self.wvls2, self.cube2.list[self.t2.value].file.data[0, :, centre_coord[0], centre_coord[1]], marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax3.legend()
self.ax4.legend()
ll_idx1 = int(np.where(np.round(self.wvls1, decimals=2).value == np.round(np.median(self.wvls1).value + self.ll1.value, decimals=2))[0])
ll_idx2 = int(np.where(np.round(self.wvls2, decimals=2).value == np.round(np.median(self.wvls2).value + self.ll2.value, decimals=2))[0])
if self.cube1.list[0].file.data.ndim == 3:
i_time1 = [f.file.data[ll_idx1, centre_coord[0], centre_coord[1]] for f in self.cube1.list]
elif self.cube1.list[0].file.data.ndim == 4:
i_time1 = [f.file.data[0, ll_idx1, centre_coord[0], centre_coord[1]] for f in self.cube1.list]
if self.cube2.list[0].file.data.ndim == 3:
i_time2 = [f.file.data[ll_idx2, centre_coord[0], centre_coord[1]] for f in self.cube2.list]
elif self.cube2.list[0].file.data.ndim == 4:
i_time2 = [f.file.data[0, ll_idx2, centre_coord[0], centre_coord[1]] for f in self.cube2.list]
self.ax5.plot(self.times1, i_time1, marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax5b.plot(self.times2, i_time2, linestyle="--", marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax5.xaxis.set_major_formatter(DateFormatter("%H:%M:%S"))
for label in self.ax5.get_xticklabels():
label.set_rotation(40)
label.set_horizontalalignment('right')
self.coords.append(px)
self.colour_idx += 1
self.fig.canvas.draw()
elif self.shape == "box":
if self.colour_idx > len(pt_bright_cycler)-1:
self.colour_idx = 0
self.n += 1
box_anchor = int(event.ydata), int(event.xdata)
self.px_coords.append(box_anchor)
self.shape_type.append("box")
# obtain the coordinates of the box on a grid with pixels the size of the box to make sure there is not copies of the same box
box_coord = box_anchor[0] // self.boxy, box_anchor[1] // self.boxx
if box_coord in self.box_coords:
coords = [p.get_xy() for p in self.ax1.patches]
for p in self.ax.patches:
if p.get_xy() == box_anchor:
p.remove()
idx = self.box_coords.index(box_coord)
del self.box_coords[idx]
del self.px_coords[idx]
del self.shape_type[idx]
del self.coords[idx]
return
self.coords.append(self.cube1.list[0].to_lonlat(*box_anchor) << u.arcsec)
rect1 = patches.Rectangle(box_anchor[::-1], self.boxx, self.boxy, linewidth=2, edgecolor=list(pt_bright_cycler)[self.colour_idx]["color"], facecolor="none")
rect1.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
rect2 = patches.Rectangle(box_anchor[::-1], self.boxx, self.boxy, linewidth=2, edgecolor=list(pt_bright_cycler)[self.colour_idx]["color"], facecolor="none")
rect2.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
self.ax1.add_patch(rect1)
self.ax2.add_patch(rect2)
font = {
"size" : 12,
"color" : list(pt_bright_cycler)[self.colour_idx]["color"]
}
txt1 = self.ax1.text(box_anchor[1]-50, box_anchor[0]-10, s=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", fontdict=font)
txt1.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
txt2 = self.ax2.text(box_anchor[1]-50, box_anchor[0]-1, s=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", fontdict=font)
txt2.set_path_effects([PathEffects.withStroke(linewidth=3, foreground="k")])
if self.cube1.list[0].file.data.ndim == 3:
self.ax3.plot(self.wvls1, np.mean(self.cube1.list[self.t1.value].file.data[:, box_anchor[0]:box_anchor[0]+self.boxy, box_anchor[1]:box_anchor[1]+self.boxx], axis=(1,2)), marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
elif self.cube1.list[0].file.data.ndim == 4:
self.ax3.plot(self.wvls1, np.mean(self.cube1.list[self.t1.value].file.data[0, :, box_anchor[0]:box_anchor[0]+self.boxy, box_anchor[1]:box_anchor[1]+self.boxx], axis=(1,2)), marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
if self.cube2.list[0].file.data.ndim == 3:
self.ax4.plot(self.wvls2, np.mean(self.cube2.list[self.t2.value].file.data[:, box_anchor[0]:box_anchor[0]+self.boxy, box_anchor[1]:box_anchor[1]+self.boxx], axis=(1,2)), marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
elif self.cube2.list[0].file.data.ndim == 4:
self.ax4.plot(self.wvls2, np.mean(self.cube2.list[self.t2.value].file.data[0, :, box_anchor[0]:box_anchor[0]+self.boxy, box_anchor[1]:box_anchor[1]+self.boxx], axis=(1,2)), marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax3.legend()
self.ax4.legend()
ll_idx1 = int(np.where(np.round(self.wvls1, decimals=2).value == np.round(np.median(self.wvls1).value + self.ll1.value, decimals=2))[0])
ll_idx2 = int(np.where(np.round(self.wvls2, decimals=2).value == np.round(np.median(self.wvls2).value + self.ll2.value, decimals=2))[0])
if self.cube1.list[0].file.data.ndim == 3:
i_time1 = [np.mean(f.file.data[ll_idx1,box_anchor[0]:box_anchor[0]+self.boxy,box_anchor[1]:box_anchor[1]+self.boxx]) for f in self.cube1.list]
elif self.cube1.list[0].file.data.ndim == 4:
i_time1 = [np.mean(f.file.data[0, ll_idx1,box_anchor[0]:box_anchor[0]+self.boxy,box_anchor[1]:box_anchor[1]+self.boxx]) for f in self.cube1.list]
if self.cube2.list[0].file.data.ndim == 3:
i_time2 = [np.mean(f.file.data[ll_idx2,box_anchor[0]:box_anchor[0]+self.boxy,box_anchor[1]:box_anchor[1]+self.boxx]) for f in self.cube2.list]
elif self.cube2.list[0].file.data.ndim == 4:
i_time2 = [np.mean(f.file.data[0, ll_idx2,box_anchor[0]:box_anchor[0]+self.boxy,box_anchor[1]:box_anchor[1]+self.boxx]) for f in self.cube2.list]
self.ax5.plot(self.times1, i_time1, marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax5b.plot(self.times2, i_time2, linestyle="--", marker=Line2D.filled_markers[self.colour_idx+self.n*len(pt_bright_cycler)], label=f"{self.colour_idx+1+(self.n*len(pt_bright_cycler))}", c=list(pt_bright_cycler)[self.colour_idx]["color"])
self.ax5.xaxis.set_major_formatter(DateFormatter("%H:%M:%S"))
for label in self.ax5.get_xticklabels():
label.set_rotation(40)
label.set_horizontalalignment('right')
self.colour_idx += 1
self.fig.canvas.draW()
def _shape(self, opts):
self.shape = opts
def _boxx(self, x):
self.boxx = x
def _boxy(self, y):
self.boxy = y
def _disconnect_matplotlib(self, _):
self.fig.canvas.mpl_disconnect(self.receiver)
def _clear(self, _):
self.coords = []
self.px_coords = []
self.shape_type = []
self.box_coords = []
self.colour_idx = 0
self.n = 0
if not hasattr(self, "cube2"):
while len(self.ax1.patches) > 0:
for p in self.ax1.patches:
p.remove()
while len(self.ax1.texts) > 0:
for t in self.ax1.texts:
t.remove()
self.ax2.clear()
self.ax2.set_ylabel("Intensity [DNs]")
self.ax2.set_xlabel(f"{self.l} [{self.aa}]")
self.ax3.clear()
self.ax3.set_ylabel("I [DNs]")
self.ax3.set_xlabel("Time [UTC]")
self.fig.canvas.draw()
self.fig.canvas.flush_events()
else:
while len(self.ax1.patches) > 0:
for p in self.ax1.patches:
p.remove()
while len(self.ax2.patches) > 0:
for p in self.ax2.patches:
p.remove()
while len(self.ax1.texts) > 0:
for t in self.ax1.texts:
t.remove()
while len(self.ax2.texts) > 0:
for t in self.ax2.texts:
t.remove()
self.ax3.clear()
self.ax3.set_ylabel("Intensity [DNs]")
self.ax3.set_xlabel(f"{self.l} [{self.aa}]")
self.ax4.clear()
self.ax4.set_ylabel("Intensity [DNs]")
self.ax4.set_xlabel(f"{self.l} [{self.aa}]")
self.ax5.clear()
self.ax5.set_ylabel("I [DNs]")
self.ax5.set_xlabel("Time [UTC]")
self.fig.canvas.draw()
self.fig.canvas.flush_events()
def _save(self, _):
self.fig.savefig(self.filename, dpi=300)
def _file_name(self, fn):
self.filename = fn
def _img_plot1(self, ll, t):
if self.ax1.images == []:
pass
elif self.ax1.images[-1].colorbar != None:
self.ax1.images[-1].colorbar.remove()
ll_idx = int(np.where(np.round(self.wvls1, decimals=2).value == np.round(np.median(self.wvls1).value + ll, decimals=2))[0])
try:
data = self.cube1.list[t].file.data[ll_idx].astype(np.float)
data[data < 0] = np.nan
im1 = self.ax1.imshow(data, cmap="Greys_r")
except:
data = self.cube1.list[t].file.data[0, ll_idx].astype(np.float)
data[data < 0] = np.nan
im1 = self.ax1.imshow(data, cmap="Greys_r")
try:
el = self.cube1.list[0].file.header["WDESC1"]
except KeyError:
el = self.cube1.list[0].file.header["element"]
self.ax1.set_title(fr"{el} {self.aa} {self.D} {self.l}$_{1}$ = {ll} {self.aa}")
self.fig.colorbar(im1, ax=self.ax1, orientation="horizontal", label="Intensity [DNs]")
def _img_plot2(self, ll1, ll2, t1, t2):
if self.ax1.images == []:
pass
elif self.ax1.images[-1].colorbar != None:
self.ax1.images[-1].colorbar.remove()
if self.ax2.images == []:
pass
elif self.ax2.images[-1].colorbar != None:
self.ax2.images[-1].colorbar.remove()
ll1_idx = int(np.where(np.round(self.wvls1, decimals=2).value == np.round(np.median(self.wvls1).value + ll1, decimals=2))[0])
ll2_idx = int(np.where(np.round(self.wvls2, decimals=2).value == np.round(np.median(self.wvls2).value + ll2, decimals=2))[0])
try:
data1 = self.cube1.list[t1].file.data[ll1_idx].astype(np.float)
data1[data1 < 0] = np.nan
im1 = self.ax1.imshow(data1, cmap="Greys_r")
except:
data1 = self.cube1.list[t1].file.data[0, ll1_idx].astype(np.float)
data1[data1 < 0] = np.nan
im1 = self.ax1.imshow(data1, cmap="Greys_r")
try:
data2 = self.cube2.list[t2].file.data[ll2_idx].astype(np.float)
data2[data2 < 0] = np.nan
im2 = self.ax2.imshow(data2, cmap="Greys_r")
except:
data2 = self.cube2.list[t2].file.data[0, ll2_idx].astype(np.float)
data2[data2 < 0] = np.nan
im2 = self.ax2.imshow(data2, cmap="Greys_r")
try:
el1 = self.cube1.list[0].file.header["WDESC1"]
el2 = self.cube2.list[0].file.header["WDESC1"]
except KeyError:
el1 = self.cube1.list[0].file.header["element"]
el2 = self.cube2.list[0].file.header["element"]
self.ax1.set_title(fr"{el1} {self.aa} {self.D} {self.l}$_{1}$ = {ll1} {self.aa}")
self.ax2.set_title(fr"{el2} {self.aa} {self.D} {self.l}$_{2}$ = {ll2} {self.aa}")
self.fig.colorbar(im1, ax=self.ax1, orientation="vertical", label="Intensity [DNs]")
self.fig.colorbar(im2, ax=self.ax2, orientation="vertical", label="Intensity [DNs]")
class PolarimetricViewer:
"""
This class defines the visualisation tool for exploring narrowband imaging spectropolarimetric data. This currently is only developed to look at one spectral line at a time. The functionality is similar to the ``SpectralViewer`` defines above but with an added Stokes parameter that can be changed.
:param data: The data to explore, this is one spectral line. This is the only required argument to view the data.
:type data: str or CRISP or CRISPNonU
:param wcs: A prescribed world coordinate system. If None, the world coordinate system is derived from the data. Default is None.
:type wcs: astropy.wcs.WCS or None, optional
:param uncertainty: The uncertainty in the intensity values of the data. Default is None.
:type uncertainty: numpy.ndarray or None, optional
:param mask: A mask to be used on the data. Default is None.
:type mask: numpy.ndarray or None, optional
:param nonu: Whether or not the spectral axis is non-uniform. Default is False.
:type nonu: bool, optional
:cvar coords: The coordinates selected to produce spectra.
:type coords: list[tuple]
:cvar px_coords: The coordinates selected to produce spectra in pixel space. This is important for indexing the data later to get the correct spectra.
:type px_coords: list[tuple]
:cvar shape_type: The spectra can be selected for a single point or for a box with specified dimensions with top-left corner where the user clicks. This attribute tells the user which point is described by which shape.
:type shape_type: list[str]
"""
def __init__(self, data, wcs=None, uncertainty=None, mask=None, nonu=False):
plt.style.use("bmh")
self.aa = html.unescape("Å")
self.l = html.unescape("λ")
self.a = html.unescape("α")
self.D = html.unescape("Δ")
shape = widgets.Dropdown(options=["point", "box"], value="point", description="Shape: ")
if not nonu:
if type(data) == str:
self.cube = CRISP(filename=data, wcs=wcs, uncertainty=uncertainty, mask=mask)
self.wvls = self.cube.wave(np.arange(self.cube.shape[1])) << u.Angstrom
elif type(data) == CRISP:
self.cube = data
self.wvls = self.cube.wave(np.arange(self.cube.shape[1])) << u.Angstrom
else:
if type(data) == str:
self.cube = CRISPNonU(filename=data, wcs=wcs, uncertainty=uncertainty, mask=mask)
self.wvls = self.cube.wave(np.arange(self.cube.shape[1])) << u.Angstrom
elif type(data) == CRISPNonU:
self.cube = data
self.wvls = self.cube.wave(np.arange(self.cube.shape[1])) << u.Angstrom
self.fig = plt.figure(figsize=(12,10))
self.gs = self.fig.add_gridspec(nrows=2, ncols=6)
self.ax1 = self.fig.add_subplot(self.gs[:,:2], projection=SlicedLowLevelWCS(self.cube[0].wcs.low_level_wcs,0))
self.ax1.set_ylabel("Helioprojective Latitude [arcsec]")
self.ax1.set_xlabel("Helioprojective Longitude [arcsec]")
self.ax2 = self.fig.add_subplot(self.gs[0,2:4])
self.ax2.set_ylabel("I [DNs]")
self.ax2.set_xlabel(f"{self.l} [{self.aa}]")
self.ax2.tick_params(direction="in")
self.ax3 = self.fig.add_subplot(self.gs[0,4:])
self.ax3.yaxis.set_label_position("right")
self.ax3.yaxis.tick_right()
self.ax3.set_ylabel("Q [DNs]")
self.ax3.set_xlabel(f"{self.l} [{self.aa}]")
self.ax4 = self.fig.add_subplot(self.gs[1,2:4])
self.ax4.set_ylabel("U [DNs]")
self.ax4.set_xlabel(f"{self.l} [{self.aa}]")
self.ax5 = self.fig.add_subplot(self.gs[1,4:])
self.ax5.yaxis.set_label_position("right")
self.ax5.yaxis.tick_right()
self.ax5.set_ylabel("V [DNs]")
self.ax5.set_xlabel(f"{self.l} [{self.aa}]")
ll = widgets.SelectionSlider(options=[np.round(l - | np.median(self.wvls) | numpy.median |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Nov 20 09:59:09 2017
@author: nsde
"""
#%%
import numpy as np
import matplotlib.pyplot as plt
from scipy.linalg import expm as scipy_expm
from ddtn.helper.utility import get_dir, load_obj, save_obj, make_hashable
from ddtn.helper.math import null, create_grid
#%%
class setup_CPAB_transformer:
def __init__(self, ncx = 2,
ncy = 2,
valid_outside = True,
zero_trace = False,
zero_boundary = False,
name = 'cpab_basis',
override = False):
"""
Main class for setting up cpab_transformer object. The main purpose of
calling this class is to produce a file "cbap_basis.pkl" that contains
all information needed for the transformation.
Arguments:
ncx: number of rectangular cells in x direction
ncy: number of rectangular cells in y direction
valid_outside: boolean, determines if transformation is valid
outside the image region
zero_trace: boolean, if true the transformation is area
preserving <--> each affine transformation have
zero trace
zero_boundary: boolean, if true the velocity at the image boundary
is constrained to be zero. NOTE: zero_boundary and
valid_outside cannot both be True or False at the
same time
name: str, name for the created bases file. Default is
'cpab_basis', but can be used to create multiple
basis files for easy switch between them
override: if True, then a new basis will be saved to
'cbap_basis.pkl' even if it already exists
"""
# We cannot have zero boundary and valid_outside at the same time
assert valid_outside != zero_boundary, '''valid_outside and zero_boundary
cannot both be active or deactive at the same time, CHOOSE'''
# Domain information
self.valid_outside = valid_outside
self.zero_trace = zero_trace
self.zero_boundary = zero_boundary
self.minbound = [-1, -1]
self.maxbound = [1, 1]
self.ncx = ncx
self.ncy = ncy
self.nC = 4*ncx*ncy
self.inc_x = (self.maxbound[0] - self.minbound[0]) / self.ncx
self.inc_y = (self.maxbound[1] - self.minbound[1]) / self.ncy
self.Ashape = [2,3]
self.Asize = np.prod(self.Ashape)
dir_loc = get_dir(__file__)
self.filename = dir_loc + '/../' + name
# Try to load file with basis and vertices
try:
file = load_obj(self.filename)
if override:
raise print('File ' + name + '.pkl already exist, ' \
'but override == True, ' \
'so updating basis with new settings')
# File found -> load information
self.valid_outside = file['valid_outside']
self.zero_trace = file['zero_trace']
self.zero_boundary = file['zero_boundary']
self.B = file['B']
self.nConstrains = file['nConstrains']
self.cells_multiidx = file['cells_multiidx']
self.cells_verts = file['cells_verts']
self.ncx = file['ncx']
self.ncy = file['ncy']
self.nC = 4*self.ncx*self.ncy
self.inc_x = (self.maxbound[0] - self.minbound[0]) / self.ncx
self.inc_y = (self.maxbound[1] - self.minbound[1]) / self.ncy
loaded = True
except: # Else create it
# Call tessalation and get vertices of cells
self.cells_multiidx, self.cells_verts = self.tessalation()
# Find shared vertices (edges) where a continuity constrain needs to hold
self.shared_v, self.shared_v_idx = self.find_shared_verts()
# If the transformation should be valid outside of the image domain,
# calculate the auxiliary points and add them to the edges where a
# continuity constrain should be
if self.valid_outside:
shared_v_outside, shared_v_idx_outside = self.find_shared_verts_outside()
if shared_v_outside.size != 0:
self.shared_v = np.concatenate((self.shared_v, shared_v_outside))
self.shared_v_idx = np.concatenate((self.shared_v_idx, shared_v_idx_outside))
# Create L
L = self.create_continuity_constrains()
# Update L with extra constrains if needed
if self.zero_trace:
Ltemp = self.create_zero_trace_constrains()
L = np.vstack((L, Ltemp))
if self.zero_boundary:
Ltemp = self.create_zero_boundary_constrains()
L = np.vstack((L, Ltemp))
# Number of constrains
self.nConstrains = L.shape[0]
# Find the null space of L, which is the basis B
self.B = null(L)
# Save all information
save_obj({
'B': self.B,
'D': self.B.shape[0],
'd': self.B.shape[1],
'nConstrains': self.nConstrains,
'cells_multiidx': self.cells_multiidx,
'cells_verts': self.cells_verts,
'nC': self.nC,
'ncx': self.ncx,
'ncy': self.ncy,
'inc_x': self.inc_x,
'inc_y': self.inc_y,
'minbound': self.minbound,
'maxbound': self.maxbound,
'valid_outside': self.valid_outside,
'zero_trace': self.zero_trace,
'zero_boundary': self.zero_boundary
}, self.filename)
loaded = False
# Get shapes of PA space and CPA space
self.D, self.d = self.B.shape
# Print information about basis
print(70*'-')
if loaded:
print('Loaded file ' + name + '.pkl, ' \
'containing tessalation with settings:')
else:
print('Creating file ' + name +'.pkl, ' \
'containing tessalation with settings:')
print(' nx = {0}, ny = {1}'.format(self.ncx, self.ncy))
print(' valid outside = {0}'.format(self.valid_outside))
print(' zero boundary = {0}'.format(self.zero_boundary))
print(' volume preserving = {0}'.format(self.zero_trace))
print('With these settings, theta.shape = {0}x1'.format(self.B.shape[1]))
print(70*'-')
def tessalation(self):
""" Finds the coordinates of all cell vertices """
xmin, ymin = self.minbound
xmax, ymax = self.maxbound
Vx = np.linspace(xmin, xmax, self.ncx+1)
Vy = np.linspace(ymin, ymax, self.ncy+1)
cells_x = [ ]
cells_x_verts = [ ]
for i in range(self.ncy):
for j in range(self.ncx):
ul = tuple([Vx[j],Vy[i],1])
ur = tuple([Vx[j+1],Vy[i],1])
ll = tuple([Vx[j],Vy[i+1],1])
lr = tuple([Vx[j+1],Vy[i+1],1])
center = [(Vx[j]+Vx[j+1])/2,(Vy[i]+Vy[i+1])/2,1]
center = tuple(center)
cells_x_verts.append((center,ul,ur)) # order matters!
cells_x_verts.append((center,ur,lr)) # order matters!
cells_x_verts.append((center,lr,ll)) # order matters!
cells_x_verts.append((center,ll,ul)) # order matters!
cells_x.append((j,i,0))
cells_x.append((j,i,1))
cells_x.append((j,i,2))
cells_x.append((j,i,3))
return cells_x, np.asarray(cells_x_verts)
def find_shared_verts(self):
""" Find all pair of cells that share a vertices that encode continuity
constrains inside the domain
"""
nC = self.nC
shared_v = [ ]
shared_v_idx = [ ]
for i in range(nC):
for j in range(nC):
vi = make_hashable(self.cells_verts[i])
vj = make_hashable(self.cells_verts[j])
shared_verts = set(vi).intersection(vj)
if len(shared_verts) == 2 and (j,i) not in shared_v_idx:
shared_v.append(list(shared_verts))
shared_v_idx.append((i,j))
return np.array(shared_v), shared_v_idx
def find_shared_verts_outside(self):
""" Find all pair of cells that share a vertices that encode continuity
constrains outside the domain
"""
shared_v = [ ]
shared_v_idx = [ ]
left = np.zeros((self.nC, self.nC), np.bool)
right = np.zeros((self.nC, self.nC), np.bool)
top = np.zeros((self.nC, self.nC), np.bool)
bottom = np.zeros((self.nC, self.nC), np.bool)
for i in range(self.nC):
for j in range(self.nC):
vi = make_hashable(self.cells_verts[i])
vj = make_hashable(self.cells_verts[j])
shared_verts = set(vi).intersection(vj)
mi = self.cells_multiidx[i]
mj = self.cells_multiidx[j]
# leftmost col, left triangle, adjacent rows
if mi[0]==mj[0]==0 and \
mi[2]==mj[2]==3 and \
np.abs(mi[1]-mj[1])==1:
left[i,j]=True
# rightmost col, right triangle, adjacent rows
if mi[0]==mj[0]==self.ncx-1 and \
mi[2]==mj[2]==1 and \
np.abs(mi[1]-mj[1])==1:
right[i,j]=True
# uppermost row, upper triangle , adjacent cols
if mi[1]==mj[1]==0 and \
mi[2]==mj[2]==0 and \
np.abs(mi[0]-mj[0])==1:
top[i,j]=True
# lowermost row, # lower triangle, # adjacent cols
if mi[1]==mj[1]==self.ncy-1 and \
mi[2]==mj[2]==2 and \
np.abs(mi[0]-mj[0])==1:
bottom[i,j]=True
if len(shared_verts) == 1 and \
any([left[i,j],right[i,j],top[i,j],bottom[i,j]]) and \
(j,i) not in shared_v_idx:
v_aux = list(shared_verts)[0] # v_aux is a tuple
v_aux = list(v_aux) # Now v_aux is a list (i.e. mutable)
if left[i,j] or right[i,j]:
v_aux[0]-=10 # Create a new vertex with the same y
elif top[i,j] or bottom[i,j]:
v_aux[1]-=10 # Create a new vertex with the same x
else:
raise ValueError("WTF?")
shared_verts = [tuple(shared_verts)[0], tuple(v_aux)]
shared_v.append(shared_verts)
shared_v_idx.append((i,j))
return np.array(shared_v), shared_v_idx
def create_continuity_constrains(self):
""" Based on the vertices found that are shared by cells, construct
continuity constrains
"""
Ltemp = np.zeros(shape=(0,6*self.nC))
count = 0
for i,j in self.shared_v_idx:
# Row 1 [x_a^T 0_{1x3} -x_a^T 0_{1x3}]
row1 = np.zeros(shape=(6*self.nC))
row1[(6*i):(6*(i+1))] = np.append(np.array(self.shared_v[count][0]),
np.zeros((1,3)))
row1[(6*j):(6*(j+1))] = np.append(-np.array(self.shared_v[count][0]),
np.zeros((1,3)))
# Row 2 [0_{1x3} x_a^T 0_{1x3} -x_a^T]
row2 = np.zeros(shape=(6*self.nC))
row2[(6*i):(6*(i+1))] = np.append(np.zeros((1,3)),
np.array(self.shared_v[count][0]))
row2[(6*j):(6*(j+1))] = np.append(np.zeros((1,3)),
-np.array(self.shared_v[count][0]))
# Row 3 [x_b^T 0_{1x3} -x_b^T 0_{1x3}]
row3 = np.zeros(shape=(6*self.nC))
row3[(6*i):(6*(i+1))] = np.append(np.array(self.shared_v[count][1]),
np.zeros((1,3)))
row3[(6*j):(6*(j+1))] = np.append(-np.array(self.shared_v[count][1]),
np.zeros((1,3)))
# Row 4 [0_{1x3} x_b^T 0_{1x3} -x_b^T]
row4 = np.zeros(shape=(6*self.nC))
row4[(6*i):(6*(i+1))] = np.append(np.zeros((1,3)),
np.array(self.shared_v[count][1]))
row4[(6*j):(6*(j+1))] = np.append(np.zeros((1,3)),
-np.array(self.shared_v[count][1]))
Ltemp = np.vstack((Ltemp, row1, row2, row3, row4))
count += 1
return Ltemp
def create_zero_trace_constrains(self):
""" Construct zero trace (volume perservation) constrains """
Ltemp = np.zeros(shape=(self.nC, 6*self.nC))
for c in range(self.nC):
Ltemp[c,(6*c):(6*(c+1))] = np.array([1,0,0,0,1,0])
return Ltemp
def create_zero_boundary_constrains(self):
""" Construct zero boundary i.e. fixed boundary constrains. Note that
points on the upper and lower bound can still move to the left and
right and points on the left and right bound can still move up
and down. Thus, they are only partial zero.
"""
xmin, ymin = self.minbound
xmax, ymax = self.maxbound
Ltemp = np.zeros(shape=(0,6*self.nC))
for c in range(self.nC):
for v in self.cells_verts[c]:
if(v[0] == xmin or v[0] == xmax):
row = np.zeros(shape=(6*self.nC))
row[(6*c):(6*(c+1))] = np.append(np.zeros((1,3)),v)
Ltemp = np.vstack((Ltemp, row))
if(v[1] == ymin or v[1] == ymax):
row = | np.zeros(shape=(6*self.nC)) | numpy.zeros |
# -*- coding: utf-8 -*-
#
# Copyright (c) 2018 Leland Stanford Junior University
# Copyright (c) 2018 The Regents of the University of California
#
# This file is part of pelicun.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software without
# specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# You should have received a copy of the BSD 3-Clause License along with
# pelicun. If not, see <http://www.opensource.org/licenses/>.
#
# Contributors:
# <NAME>
"""
This subpackage performs system tests on the control module of pelicun.
"""
import pytest
import numpy as np
from numpy.testing import assert_allclose
from scipy.stats import truncnorm as tnorm
from copy import deepcopy
import os, sys, inspect
current_dir = os.path.dirname(
os.path.abspath(inspect.getfile(inspect.currentframe())))
parent_dir = os.path.dirname(current_dir)
sys.path.insert(0,os.path.dirname(parent_dir))
from pelicun.control import *
from pelicun.uq import mvn_orthotope_density as mvn_od
from pelicun.tests.test_pelicun import prob_allclose, prob_approx
# -----------------------------------------------------------------------------
# FEMA_P58_Assessment
# -----------------------------------------------------------------------------
def test_FEMA_P58_Assessment_central_tendencies():
"""
Perform a loss assessment with customized inputs that reduce the
dispersion of calculation parameters to negligible levels. This allows us
to test the results against pre-defined reference values in spite of the
randomness involved in the calculations.
"""
base_input_path = 'resources/'
DL_input = base_input_path + 'input data/' + "DL_input_test.json"
EDP_input = base_input_path + 'EDP data/' + "EDP_table_test.out"
A = FEMA_P58_Assessment()
A.read_inputs(DL_input, EDP_input, verbose=False)
A.define_random_variables()
# -------------------------------------------------- check random variables
# EDP
RV_EDP = list(A._EDP_dict.values())[0]
assert RV_EDP.theta[0] == pytest.approx(0.5 * g)
assert RV_EDP.theta[1] == pytest.approx(0.5 * g * 1e-6, abs=1e-7)
assert RV_EDP._distribution == 'lognormal'
# QNT
assert A._QNT_dict is None
#RV_QNT = A._RV_dict['QNT']
#assert RV_QNT is None
# FRG
RV_FRG = list(A._FF_dict.values())
thetas, betas = np.array([rv.theta for rv in RV_FRG]).T
assert_allclose(thetas, np.array([0.444, 0.6, 0.984]) * g, rtol=0.01)
assert_allclose(betas, np.array([0.3, 0.4, 0.5]), rtol=0.01)
rho = RV_FRG[0].RV_set.Rho()
assert_allclose(rho, np.ones((3, 3)), rtol=0.01)
assert np.all([rv.distribution == 'lognormal' for rv in RV_FRG])
# RED
RV_RED = list(A._DV_RED_dict.values())
mus, sigmas = np.array([rv.theta for rv in RV_RED]).T
assert_allclose(mus, np.ones(2), rtol=0.01)
assert_allclose(sigmas, np.array([1e-4, 1e-4]), rtol=0.01)
rho = RV_RED[0].RV_set.Rho()
assert_allclose(rho, np.array([[1, 0], [0, 1]]), rtol=0.01)
assert np.all([rv.distribution == 'normal' for rv in RV_RED])
assert_allclose (RV_RED[0].truncation_limits, [0., 2.], rtol=0.01)
assert_allclose (RV_RED[1].truncation_limits, [0., 4.], rtol=0.01)
# INJ
RV_INJ = list(A._DV_INJ_dict.values())
mus, sigmas = np.array([rv.theta for rv in RV_INJ]).T
assert_allclose(mus, np.ones(4), rtol=0.01)
assert_allclose(sigmas, np.ones(4) * 1e-4, rtol=0.01)
rho = RV_INJ[0].RV_set.Rho()
rho_target = np.zeros((4, 4))
np.fill_diagonal(rho_target, 1.)
assert_allclose(rho, rho_target, rtol=0.01)
assert np.all([rv.distribution == 'normal' for rv in RV_INJ])
assert_allclose(RV_INJ[0].truncation_limits, [0., 10./3.], rtol=0.01)
assert_allclose(RV_INJ[1].truncation_limits, [0., 10./3.], rtol=0.01)
assert_allclose(RV_INJ[2].truncation_limits, [0., 10.], rtol=0.01)
assert_allclose(RV_INJ[3].truncation_limits, [0., 10.], rtol=0.01)
# REP
RV_REP = list(A._DV_REP_dict.values())
thetas, betas = np.array([rv.theta for rv in RV_REP]).T
assert_allclose(thetas, np.ones(6), rtol=0.01)
assert_allclose(betas, np.ones(6) * 1e-4, rtol=0.01)
rho = RV_REP[0].RV_set.Rho()
rho_target = np.zeros((6, 6))
np.fill_diagonal(rho_target, 1.)
assert_allclose(rho, rho_target, rtol=0.01)
assert np.all([rv.distribution == 'lognormal' for rv in RV_REP])
# ------------------------------------------------------------------------
A.define_loss_model()
# QNT (deterministic)
QNT = A._FG_dict['T0001.001']._performance_groups[0]._quantity
assert QNT == pytest.approx(50., rel=0.01)
A.calculate_damage()
# ------------------------------------------------ check damage calculation
# TIME
T_check = A._TIME.describe().T.loc[['hour','month','weekday?'],:]
assert_allclose(T_check['mean'], np.array([11.5, 5.5, 5. / 7.]), rtol=0.05)
assert_allclose(T_check['min'], np.array([0., 0., 0.]), rtol=0.01)
assert_allclose(T_check['max'], np.array([23., 11., 1.]), rtol=0.01)
assert_allclose(T_check['50%'], np.array([12., 5., 1.]), atol=1.0)
assert_allclose(T_check['count'], np.array([10000., 10000., 10000.]),
rtol=0.01)
# POP
P_CDF = A._POP.describe(np.arange(1, 27) / 27.).iloc[:, 0].values[4:]
vals, counts = np.unique(P_CDF, return_counts=True)
assert_allclose(vals, np.array([0., 2.5, 5., 10.]), rtol=0.01)
assert_allclose(counts, np.array([14, 2, 7, 5]), atol=1)
# COL
COL_check = A._COL.describe().T
assert COL_check['mean'].values[0] == pytest.approx(0.5, rel=0.05)
assert len(A._ID_dict['non-collapse']) == pytest.approx(5000, rel=0.05)
assert len(A._ID_dict['collapse']) == pytest.approx(5000, rel=0.05)
# DMG
DMG_check = A._DMG.describe().T
assert_allclose(DMG_check['mean'], np.array([17.074, 17.074, 7.9361]),
rtol=0.1, atol=1.0)
assert_allclose(DMG_check['min'], np.zeros(3), rtol=0.01)
assert_allclose(DMG_check['max'], np.ones(3) * 50.0157, rtol=0.05)
# ------------------------------------------------------------------------
A.calculate_losses()
# -------------------------------------------------- check loss calculation
# RED
DV_RED = A._DV_dict['red_tag'].describe().T
assert_allclose(DV_RED['mean'], np.array([0.341344, 0.1586555]), rtol=0.1)
# INJ - collapse
DV_INJ_C = deepcopy(A._COL[['INJ-0', 'INJ-1']])
DV_INJ_C.dropna(inplace=True)
NC_count = DV_INJ_C.describe().T['count'][0]
assert_allclose(NC_count, np.ones(2) * 5000, rtol=0.05)
# lvl 1
vals, counts = np.unique(DV_INJ_C.iloc[:, 0].values, return_counts=True)
assert_allclose(vals, np.array([0., 2.5, 5., 10.]) * 0.1, rtol=0.01)
assert_allclose(counts / NC_count, np.array([14, 2, 7, 5]) / 28., atol=0.01, rtol=0.1)
# lvl 2
vals, counts = np.unique(DV_INJ_C.iloc[:, 1].values, return_counts=True)
assert_allclose(vals, np.array([0., 2.5, 5., 10.]) * 0.9, rtol=0.01)
assert_allclose(counts / NC_count, np.array([14, 2, 7, 5]) / 28., atol=0.01, rtol=0.1)
# INJ - non-collapse
DV_INJ_NC = deepcopy(A._DV_dict['injuries'])
DV_INJ_NC[0].dropna(inplace=True)
assert_allclose(DV_INJ_NC[0].describe().T['count'], np.ones(2) * 5000,
rtol=0.05)
# lvl 1 DS2
I_CDF = DV_INJ_NC[0].iloc[:, 0]
I_CDF = np.around(I_CDF, decimals=3)
vals, counts = np.unique(I_CDF, return_counts=True)
assert_allclose(vals, np.array([0., 0.075, 0.15, 0.3]), rtol=0.01)
target_prob = np.array(
[0.6586555, 0., 0., 0.] + 0.3413445 * np.array([14, 2, 7, 5]) / 28.)
assert_allclose(counts / NC_count, target_prob, atol=0.01, rtol=0.1)
# lvl 1 DS3
I_CDF = DV_INJ_NC[0].iloc[:, 1]
I_CDF = np.around(I_CDF, decimals=3)
vals, counts = np.unique(I_CDF, return_counts=True)
assert_allclose(vals, np.array([0., 0.075, 0.15, 0.3]), rtol=0.01)
target_prob = np.array(
[0.8413445, 0., 0., 0.] + 0.1586555 * np.array([14, 2, 7, 5]) / 28.)
assert_allclose(counts / NC_count, target_prob, atol=0.01, rtol=0.1)
# lvl 2 DS2
I_CDF = DV_INJ_NC[1].iloc[:, 0]
I_CDF = np.around(I_CDF, decimals=3)
vals, counts = np.unique(I_CDF, return_counts=True)
assert_allclose(vals, np.array([0., 0.025, 0.05, 0.1]), rtol=0.01)
target_prob = np.array(
[0.6586555, 0., 0., 0.] + 0.3413445 * np.array([14, 2, 7, 5]) / 28.)
assert_allclose(counts / NC_count, target_prob, atol=0.01, rtol=0.1)
# lvl2 DS3
I_CDF = DV_INJ_NC[1].iloc[:, 1]
I_CDF = np.around(I_CDF, decimals=3)
vals, counts = np.unique(I_CDF, return_counts=True)
assert_allclose(vals, np.array([0., 0.025, 0.05, 0.1]), rtol=0.01)
target_prob = np.array(
[0.8413445, 0., 0., 0.] + 0.1586555 * np.array([14, 2, 7, 5]) / 28.)
assert_allclose(counts / NC_count, target_prob, atol=0.01, rtol=0.1)
# REP
assert len(A._ID_dict['non-collapse']) == len(A._ID_dict['repairable'])
assert len(A._ID_dict['irreparable']) == 0
# cost
DV_COST = A._DV_dict['rec_cost']
# DS1
C_CDF = DV_COST.iloc[:, 0]
C_CDF = np.around(C_CDF / 10., decimals=0) * 10.
vals, counts = np.unique(C_CDF, return_counts=True)
assert_allclose(vals, [0, 2500], rtol=0.01)
t_prob = 0.3413445
assert_allclose(counts / NC_count, [1. - t_prob, t_prob], rtol=0.1)
# DS2
C_CDF = DV_COST.iloc[:, 1]
C_CDF = np.around(C_CDF / 100., decimals=0) * 100.
vals, counts = np.unique(C_CDF, return_counts=True)
assert_allclose(vals, [0, 25000], rtol=0.01)
t_prob = 0.3413445
assert_allclose(counts / NC_count, [1. - t_prob, t_prob], rtol=0.1)
# DS3
C_CDF = DV_COST.iloc[:, 2]
C_CDF = np.around(C_CDF / 1000., decimals=0) * 1000.
vals, counts = np.unique(C_CDF, return_counts=True)
assert_allclose(vals, [0, 250000], rtol=0.01)
t_prob = 0.1586555
assert_allclose(counts / NC_count, [1. - t_prob, t_prob], rtol=0.1)
# time
DV_TIME = A._DV_dict['rec_time']
# DS1
T_CDF = DV_TIME.iloc[:, 0]
T_CDF = np.around(T_CDF, decimals=1)
vals, counts = np.unique(T_CDF, return_counts=True)
assert_allclose(vals, [0, 2.5], rtol=0.01)
t_prob = 0.3413445
assert_allclose(counts / NC_count, [1. - t_prob, t_prob], rtol=0.1)
# DS2
T_CDF = DV_TIME.iloc[:, 1]
T_CDF = np.around(T_CDF, decimals=0)
vals, counts = np.unique(T_CDF, return_counts=True)
assert_allclose(vals, [0, 25], rtol=0.01)
t_prob = 0.3413445
assert_allclose(counts / NC_count, [1. - t_prob, t_prob], rtol=0.1)
# DS3
T_CDF = DV_TIME.iloc[:, 2]
T_CDF = np.around(T_CDF / 10., decimals=0) * 10.
vals, counts = np.unique(T_CDF, return_counts=True)
assert_allclose(vals, [0, 250], rtol=0.01)
t_prob = 0.1586555
assert_allclose(counts / NC_count, [1. - t_prob, t_prob], rtol=0.1)
# ------------------------------------------------------------------------
A.aggregate_results()
# ------------------------------------------------ check result aggregation
S = A._SUMMARY
SD = S.describe().T
assert_allclose(S[('event time', 'month')], A._TIME['month'] + 1)
assert_allclose(S[('event time', 'weekday?')], A._TIME['weekday?'])
assert_allclose(S[('event time', 'hour')], A._TIME['hour'])
assert_allclose(S[('inhabitants', '')], A._POP.iloc[:, 0])
assert SD.loc[('collapses', 'collapsed'), 'mean'] == pytest.approx(0.5,
rel=0.05)
assert SD.loc[('collapses', 'mode'), 'mean'] == 0.
assert SD.loc[('collapses', 'mode'), 'count'] == pytest.approx(5000,
rel=0.05)
assert SD.loc[('red tagged', ''), 'mean'] == pytest.approx(0.5, rel=0.05)
assert SD.loc[('red tagged', ''), 'count'] == pytest.approx(5000, rel=0.05)
for col in ['irreparable', 'cost impractical', 'time impractical']:
assert SD.loc[('reconstruction', col), 'mean'] == 0.
assert SD.loc[('reconstruction', col), 'count'] == pytest.approx(5000,
rel=0.05)
RC = deepcopy(S.loc[:, ('reconstruction', 'cost')])
RC_CDF = np.around(RC / 1000., decimals=0) * 1000.
vals, counts = np.unique(RC_CDF, return_counts=True)
assert_allclose(vals, np.array([0, 2., 3., 25., 250., 300.]) * 1000.)
t_prob1 = 0.3413445 / 2.
t_prob2 = 0.1586555 / 2.
assert_allclose(counts / 10000.,
[t_prob2, t_prob1 / 2., t_prob1 / 2., t_prob1, t_prob2,
0.5], atol=0.01, rtol=0.1)
RT = deepcopy(S.loc[:, ('reconstruction', 'time-parallel')])
RT_CDF = np.around(RT, decimals=0)
vals, counts = np.unique(RT_CDF, return_counts=True)
assert_allclose(vals, np.array([0, 2., 3., 25., 250., 300.]))
t_prob1 = 0.3413445 / 2.
t_prob2 = 0.1586555 / 2.
assert_allclose(counts / 10000.,
[t_prob2, t_prob1 / 2., t_prob1 / 2., t_prob1, t_prob2,
0.5], atol=0.01, rtol=0.1)
assert_allclose(S.loc[:, ('reconstruction', 'time-parallel')],
S.loc[:, ('reconstruction', 'time-sequential')])
CAS = deepcopy(S.loc[:, ('injuries', 'sev1')])
CAS_CDF = np.around(CAS, decimals=3)
vals, counts = np.unique(CAS_CDF, return_counts=True)
assert_allclose(vals, [0, 0.075, 0.15, 0.25, 0.3, 0.5, 1.])
assert_allclose(counts / 10000.,
np.array([35, 1, 3.5, 2, 2.5, 7, 5]) / 56., atol=0.01,
rtol=0.1)
CAS = deepcopy(S.loc[:, ('injuries', 'sev2')])
CAS_CDF = np.around(CAS, decimals=3)
vals, counts = np.unique(CAS_CDF, return_counts=True)
assert_allclose(vals, [0, 0.025, 0.05, 0.1, 2.25, 4.5, 9.])
assert_allclose(counts / 10000.,
np.array([35, 1, 3.5, 2.5, 2, 7, 5]) / 56., atol=0.01,
rtol=0.1)
def test_FEMA_P58_Assessment_EDP_uncertainty_basic():
"""
Perform a loss assessment with customized inputs that focus on testing the
methods used to estimate the multivariate lognormal distribution of EDP
values. Besides the fitting, this test also evaluates the propagation of
EDP uncertainty through the analysis. Dispersions in other calculation
parameters are reduced to negligible levels. This allows us to test the
results against pre-defined reference values in spite of the randomness
involved in the calculations.
"""
base_input_path = 'resources/'
DL_input = base_input_path + 'input data/' + "DL_input_test_2.json"
EDP_input = base_input_path + 'EDP data/' + "EDP_table_test_2.out"
A = FEMA_P58_Assessment()
A.read_inputs(DL_input, EDP_input, verbose=False)
A.define_random_variables()
# -------------------------------------------------- check random variables
# EDP
RV_EDP = list(A._EDP_dict.values())
thetas, betas = np.array([rv.theta for rv in RV_EDP]).T
assert_allclose(thetas, [9.80665, 12.59198, 0.074081, 0.044932], rtol=0.02)
assert_allclose(betas, [0.25, 0.25, 0.3, 0.4], rtol=0.02)
rho = RV_EDP[0].RV_set.Rho()
rho_target = [
[1.0, 0.6, 0.3, 0.3],
[0.6, 1.0, 0.3, 0.3],
[0.3, 0.3, 1.0, 0.7],
[0.3, 0.3, 0.7, 1.0]]
assert_allclose(rho, rho_target, atol=0.05)
assert np.all([rv.distribution == 'lognormal' for rv in RV_EDP])
# ------------------------------------------------------------------------
A.define_loss_model()
A.calculate_damage()
# ------------------------------------------------ check damage calculation
# COL
COL_check = A._COL.describe().T
col_target = 1.0 - mvn_od(np.log([0.074081, 0.044932]),
np.array([[1, 0.7], [0.7, 1]]) * np.outer(
[0.3, 0.4], [0.3, 0.4]),
upper=np.log([0.1, 0.1]))[0]
assert COL_check['mean'].values[0] == pytest.approx(col_target, rel=0.1)
# DMG
DMG_check = [len(np.where(A._DMG.iloc[:, i] > 0.0)[0]) / 10000. for i in
range(8)]
DMG_1_PID = mvn_od(np.log([0.074081, 0.044932]),
np.array([[1, 0.7], [0.7, 1]]) * np.outer([0.3, 0.4],
[0.3, 0.4]),
lower=np.log([0.05488, 1e-6]), upper=np.log([0.1, 0.1]))[
0]
DMG_2_PID = mvn_od(np.log([0.074081, 0.044932]),
np.array([[1, 0.7], [0.7, 1]]) * np.outer([0.3, 0.4],
[0.3, 0.4]),
lower=np.log([1e-6, 0.05488]), upper=np.log([0.1, 0.1]))[
0]
DMG_1_PFA = mvn_od(np.log([0.074081, 9.80665]),
np.array([[1, 0.3], [0.3, 1]]) * np.outer([0.3, 0.25],
[0.3, 0.25]),
lower=np.log([1e-6, 9.80665]),
upper=np.log([0.1, np.inf]))[0]
DMG_2_PFA = mvn_od(np.log([0.074081, 12.59198]),
np.array([[1, 0.3], [0.3, 1]]) * np.outer([0.3, 0.25],
[0.3, 0.25]),
lower=np.log([1e-6, 9.80665]),
upper=np.log([0.1, np.inf]))[0]
assert DMG_check[0] == pytest.approx(DMG_check[1], rel=0.01)
assert DMG_check[2] == pytest.approx(DMG_check[3], rel=0.01)
assert DMG_check[4] == pytest.approx(DMG_check[5], rel=0.01)
assert DMG_check[6] == pytest.approx(DMG_check[7], rel=0.01)
assert DMG_check[0] == pytest.approx(DMG_1_PID, rel=0.10)
assert DMG_check[2] == pytest.approx(DMG_2_PID, rel=0.10)
assert DMG_check[4] == pytest.approx(DMG_1_PFA, rel=0.10)
assert DMG_check[6] == pytest.approx(DMG_2_PFA, rel=0.10)
# ------------------------------------------------------------------------
A.calculate_losses()
# -------------------------------------------------- check loss calculation
# COST
DV_COST = A._DV_dict['rec_cost']
DV_TIME = A._DV_dict['rec_time']
C_target = [0., 250., 1250.]
T_target = [0., 0.25, 1.25]
# PG 1011 and 1012
P_target = [
mvn_od(np.log([0.074081, 0.044932]),
np.array([[1, 0.7], [0.7, 1]]) * np.outer([0.3, 0.4],
[0.3, 0.4]),
lower=np.log([1e-6, 1e-6]), upper=np.log([0.05488, 0.1]))[0],
mvn_od(np.log([0.074081, 0.044932]),
np.array([[1, 0.7], [0.7, 1]]) * np.outer([0.3, 0.4],
[0.3, 0.4]),
lower=np.log([0.05488, 0.05488]), upper=np.log([0.1, 0.1]))[0],
mvn_od(np.log([0.074081, 0.044932]),
np.array([[1, 0.7], [0.7, 1]]) * np.outer([0.3, 0.4],
[0.3, 0.4]),
lower=np.log([0.05488, 1e-6]), upper=np.log([0.1, 0.05488]))[0],
]
for i in [0, 1]:
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, i].values / 10., decimals=0) * 10.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(
np.around(DV_TIME.iloc[:, i].values * 100., decimals=0) / 100.,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / 10000.
assert_allclose(P_target, P_test, atol=0.02)
assert_allclose(C_target, C_test, rtol=0.001)
assert_allclose(T_target, T_test, rtol=0.001)
# PG 1021 and 1022
P_target = [
mvn_od(np.log([0.074081, 0.044932]),
np.array([[1, 0.7], [0.7, 1]]) * np.outer([0.3, 0.4],
[0.3, 0.4]),
lower=np.log([1e-6, 1e-6]), upper=np.log([0.1, 0.05488]))[0],
mvn_od(np.log([0.074081, 0.044932]),
np.array([[1, 0.7], [0.7, 1]]) * np.outer([0.3, 0.4],
[0.3, 0.4]),
lower=np.log([0.05488, 0.05488]), upper=np.log([0.1, 0.1]))[0],
mvn_od(np.log([0.074081, 0.044932]),
np.array([[1, 0.7], [0.7, 1]]) * np.outer([0.3, 0.4],
[0.3, 0.4]),
lower=np.log([1e-6, 0.05488]), upper=np.log([0.05488, 0.1]))[0],
]
for i in [2, 3]:
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, i].values / 10., decimals=0) * 10.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(
np.around(DV_TIME.iloc[:, i].values * 100., decimals=0) / 100.,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / 10000.
assert_allclose(P_target, P_test, atol=0.02)
assert_allclose(C_target, C_test, rtol=0.001)
assert_allclose(T_target, T_test, rtol=0.001)
# PG 2011 and 2012
P_target = [
mvn_od(np.log([0.074081, 9.80665, 12.59198]),
np.array([[1.0, 0.3, 0.3], [0.3, 1.0, 0.6],
[0.3, 0.6, 1.0]]) * np.outer([0.3, 0.25, 0.25],
[0.3, 0.25, 0.25]),
lower=np.log([1e-6, 1e-6, 1e-6]),
upper=np.log([0.1, 9.80665, np.inf]))[0],
mvn_od(np.log([0.074081, 9.80665, 12.59198]),
np.array([[1.0, 0.3, 0.3], [0.3, 1.0, 0.6],
[0.3, 0.6, 1.0]]) * np.outer([0.3, 0.25, 0.25],
[0.3, 0.25, 0.25]),
lower=np.log([1e-6, 9.80665, 9.80665]),
upper=np.log([0.1, np.inf, np.inf]))[0],
mvn_od(np.log([0.074081, 9.80665, 12.59198]),
np.array([[1.0, 0.3, 0.3], [0.3, 1.0, 0.6],
[0.3, 0.6, 1.0]]) * np.outer([0.3, 0.25, 0.25],
[0.3, 0.25, 0.25]),
lower=np.log([1e-6, 9.80665, 1e-6]),
upper=np.log([0.1, np.inf, 9.80665]))[0],
]
for i in [4, 5]:
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, i].values / 10., decimals=0) * 10.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(
np.around(DV_TIME.iloc[:, i].values * 100., decimals=0) / 100.,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / 10000.
assert_allclose(P_target, P_test, atol=0.02)
assert_allclose(C_target, C_test, rtol=0.001)
assert_allclose(T_target, T_test, rtol=0.001)
# PG 2021 and 2022
P_target = [
mvn_od(np.log([0.074081, 9.80665, 12.59198]),
np.array([[1.0, 0.3, 0.3], [0.3, 1.0, 0.6],
[0.3, 0.6, 1.0]]) * np.outer([0.3, 0.25, 0.25],
[0.3, 0.25, 0.25]),
lower=np.log([1e-6, 1e-6, 1e-6]),
upper=np.log([0.1, np.inf, 9.80665]))[0],
mvn_od(np.log([0.074081, 9.80665, 12.59198]),
np.array([[1.0, 0.3, 0.3], [0.3, 1.0, 0.6],
[0.3, 0.6, 1.0]]) * np.outer([0.3, 0.25, 0.25],
[0.3, 0.25, 0.25]),
lower=np.log([1e-6, 9.80665, 9.80665]),
upper=np.log([0.1, np.inf, np.inf]))[0],
mvn_od(np.log([0.074081, 9.80665, 12.59198]),
np.array([[1.0, 0.3, 0.3], [0.3, 1.0, 0.6],
[0.3, 0.6, 1.0]]) * np.outer([0.3, 0.25, 0.25],
[0.3, 0.25, 0.25]),
lower=np.log([1e-6, 1e-6, 9.80665]),
upper=np.log([0.1, 9.80665, np.inf]))[0],
]
for i in [6, 7]:
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, i].values / 10., decimals=0) * 10.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(
np.around(DV_TIME.iloc[:, i].values * 100., decimals=0) / 100.,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / 10000.
assert_allclose(P_target, P_test, atol=0.02)
assert_allclose(C_target, C_test, rtol=0.001)
assert_allclose(T_target, T_test, rtol=0.001)
# RED TAG
RED_check = A._DV_dict['red_tag'].describe().T
RED_check = (RED_check['mean'] * RED_check['count'] / 10000.).values
assert RED_check[0] == pytest.approx(RED_check[1], rel=0.01)
assert RED_check[2] == pytest.approx(RED_check[3], rel=0.01)
assert RED_check[4] == pytest.approx(RED_check[5], rel=0.01)
assert RED_check[6] == pytest.approx(RED_check[7], rel=0.01)
assert RED_check[0] == pytest.approx(DMG_1_PID, rel=0.10)
assert RED_check[2] == pytest.approx(DMG_2_PID, rel=0.10)
assert RED_check[4] == pytest.approx(DMG_1_PFA, rel=0.10)
assert RED_check[6] == pytest.approx(DMG_2_PFA, rel=0.10)
DMG_on = np.where(A._DMG > 0.0)[0]
RED_on = np.where(A._DV_dict['red_tag'] > 0.0)[0]
assert_allclose(DMG_on, RED_on)
# ------------------------------------------------------------------------
A.aggregate_results()
# ------------------------------------------------ check result aggregation
P_no_RED_target = mvn_od(np.log([0.074081, 0.044932, 9.80665, 12.59198]),
np.array(
[[1.0, 0.7, 0.3, 0.3], [0.7, 1.0, 0.3, 0.3],
[0.3, 0.3, 1.0, 0.6],
[0.3, 0.3, 0.6, 1.0]]) * np.outer(
[0.3, 0.4, 0.25, 0.25],
[0.3, 0.4, 0.25, 0.25]),
lower=np.log([1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log(
[0.05488, 0.05488, 9.80665, 9.80665]))[0]
S = A._SUMMARY
SD = S.describe().T
P_no_RED_test = (1.0 - SD.loc[('red tagged', ''), 'mean']) * SD.loc[
('red tagged', ''), 'count'] / 10000.
def test_FEMA_P58_Assessment_EDP_uncertainty_detection_limit():
"""
Perform a loss assessment with customized inputs that focus on testing the
methods used to estimate the multivariate lognormal distribution of EDP
values. Besides the fitting, this test also evaluates the propagation of
EDP uncertainty through the analysis. Dispersions in other calculation
parameters are reduced to negligible levels. This allows us to test the
results against pre-defined reference values in spite of the randomness
involved in the calculations.
This test differs from the basic case in having unreliable EDP values above
a certain limit - a typical feature of interstory drifts in dynamic
simulations. Such cases should not be a problem if the limits can be
estimated and they are specified as detection limits in input file.
"""
base_input_path = 'resources/'
DL_input = base_input_path + 'input data/' + "DL_input_test_3.json"
EDP_input = base_input_path + 'EDP data/' + "EDP_table_test_3.out"
A = FEMA_P58_Assessment()
A.read_inputs(DL_input, EDP_input, verbose=False)
A.define_random_variables()
# -------------------------------------------------- check random variables
# EDP
RV_EDP = list(A._EDP_dict.values())
thetas, betas = np.array([rv.theta for rv in RV_EDP]).T
EDP_theta_test = thetas
EDP_beta_test = betas
EDP_theta_target = [9.80665, 12.59198, 0.074081, 0.044932]
EDP_beta_target = [0.25, 0.25, 0.3, 0.4]
assert_allclose(EDP_theta_test, EDP_theta_target, rtol=0.025)
assert_allclose(EDP_beta_test, EDP_beta_target, rtol=0.1)
rho = RV_EDP[0].RV_set.Rho()
EDP_rho_test = rho
EDP_rho_target = [
[1.0, 0.6, 0.3, 0.3],
[0.6, 1.0, 0.3, 0.3],
[0.3, 0.3, 1.0, 0.7],
[0.3, 0.3, 0.7, 1.0]]
EDP_COV_test = EDP_rho_test * np.outer(EDP_beta_test, EDP_beta_test)
assert_allclose(EDP_rho_test, EDP_rho_target, atol=0.15)
assert np.all([rv.distribution == 'lognormal' for rv in RV_EDP])
# ------------------------------------------------------------------------
A.define_loss_model()
A.calculate_damage()
# ------------------------------------------------ check damage calculation
# COL
COL_check = A._COL.describe().T
col_target = 1.0 - mvn_od(np.log(EDP_theta_test[2:]),
EDP_COV_test[2:, 2:],
upper=np.log([0.1, 0.1]))[0]
assert COL_check['mean'].values[0] == prob_approx(col_target, 0.03)
# DMG
DMG_check = [len(np.where(A._DMG.iloc[:, i] > 0.0)[0]) / 10000.
for i in range(8)]
DMG_1_PID = mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([0.05488, 1e-6]),
upper=np.log([0.1, 0.1]))[0]
DMG_2_PID = mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([1e-6, 0.05488]),
upper=np.log([0.1, 0.1]))[0]
DMG_1_PFA = mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([9.80665, 1e-6, 1e-6, 1e-6]),
upper=np.log([np.inf, np.inf, 0.1, 0.1]))[0]
DMG_2_PFA = mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([1e-6, 9.80665, 1e-6, 1e-6]),
upper=np.log([np.inf, np.inf, 0.1, 0.1]))[0]
assert DMG_check[0] == pytest.approx(DMG_check[1], rel=0.01)
assert DMG_check[2] == pytest.approx(DMG_check[3], rel=0.01)
assert DMG_check[4] == pytest.approx(DMG_check[5], rel=0.01)
assert DMG_check[6] == pytest.approx(DMG_check[7], rel=0.01)
assert DMG_check[0] == prob_approx(DMG_1_PID, 0.03)
assert DMG_check[2] == prob_approx(DMG_2_PID, 0.03)
assert DMG_check[4] == prob_approx(DMG_1_PFA, 0.03)
assert DMG_check[6] == prob_approx(DMG_2_PFA, 0.03)
# ------------------------------------------------------------------------
A.calculate_losses()
# -------------------------------------------------- check loss calculation
# COST
DV_COST = A._DV_dict['rec_cost']
DV_TIME = A._DV_dict['rec_time']
C_target = [0., 250., 1250.]
T_target = [0., 0.25, 1.25]
# PG 1011 and 1012
P_target = [
mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([1e-6, 1e-6]), upper=np.log([0.05488, 0.1]))[0],
mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([0.05488, 0.05488]), upper=np.log([0.1, 0.1]))[0],
mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([0.05488, 1e-6]), upper=np.log([0.1, 0.05488]))[0],
]
for i in [0, 1]:
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, i].values / 10., decimals=0) * 10.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(
np.around(DV_TIME.iloc[:, i].values * 100., decimals=0) / 100.,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / 10000.
assert_allclose(C_target, C_test, rtol=0.001)
assert_allclose(T_target, T_test, rtol=0.001)
prob_allclose(P_target, P_test, 0.04)
# PG 1021 and 1022
P_target = [
mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([1e-6, 1e-6]), upper=np.log([0.1, 0.05488]))[0],
mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([0.05488, 0.05488]), upper=np.log([0.1, 0.1]))[0],
mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([1e-6, 0.05488]), upper=np.log([0.05488, 0.1]))[0],
]
for i in [2, 3]:
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, i].values / 10., decimals=0) * 10.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(
np.around(DV_TIME.iloc[:, i].values * 100., decimals=0) / 100.,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / 10000.
assert_allclose(C_target, C_test, rtol=0.001)
assert_allclose(T_target, T_test, rtol=0.001)
prob_allclose(P_target, P_test, 0.04)
# PG 2011 and 2012
P_target = [
mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log([9.80665, np.inf, 0.1, 0.1]))[0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([9.80665, 9.80665, 1e-6, 1e-6]),
upper=np.log([np.inf, np.inf, 0.1, 0.1]))[0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([9.80665, 1e-6, 1e-6, 1e-6]),
upper=np.log([np.inf, 9.80665, 0.1, 0.1]))[0],
]
for i in [4, 5]:
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, i].values / 10., decimals=0) * 10.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(
np.around(DV_TIME.iloc[:, i].values * 100., decimals=0) / 100.,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / 10000.
assert_allclose(C_target, C_test, rtol=0.001)
assert_allclose(T_target, T_test, rtol=0.001)
prob_allclose(P_target, P_test, 0.04)
# PG 2021 and 2022
P_target = [
mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log([np.inf, 9.80665, 0.1, 0.1]))[0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([9.80665, 9.80665, 1e-6, 1e-6]),
upper=np.log([np.inf, np.inf, 0.1, 0.1]))[0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([1e-6, 9.80665, 1e-6, 1e-6]),
upper=np.log([9.80665, np.inf, 0.1, 0.1]))[0],
]
for i in [6, 7]:
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, i].values / 10., decimals=0) * 10.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(
np.around(DV_TIME.iloc[:, i].values * 100., decimals=0) / 100.,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / 10000.
assert_allclose(C_target, C_test, rtol=0.001)
assert_allclose(T_target, T_test, rtol=0.001)
prob_allclose(P_target, P_test, 0.04)
# RED TAG
RED_check = A._DV_dict['red_tag'].describe().T
RED_check = (RED_check['mean'] * RED_check['count'] / 10000.).values
assert RED_check[0] == pytest.approx(RED_check[1], rel=0.01)
assert RED_check[2] == pytest.approx(RED_check[3], rel=0.01)
assert RED_check[4] == pytest.approx(RED_check[5], rel=0.01)
assert RED_check[6] == pytest.approx(RED_check[7], rel=0.01)
assert RED_check[0] == prob_approx(DMG_1_PID, 0.03)
assert RED_check[2] == prob_approx(DMG_2_PID, 0.03)
assert RED_check[4] == prob_approx(DMG_1_PFA, 0.03)
assert RED_check[6] == prob_approx(DMG_2_PFA, 0.03)
DMG_on = np.where(A._DMG > 0.0)[0]
RED_on = np.where(A._DV_dict['red_tag'] > 0.0)[0]
assert_allclose(DMG_on, RED_on)
# ------------------------------------------------------------------------
A.aggregate_results()
# ------------------------------------------------ check result aggregation
P_no_RED_target = mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log([9.80665, 9.80665, 0.05488, 0.05488]))[0]
S = A._SUMMARY
SD = S.describe().T
P_no_RED_test = ((1.0 - SD.loc[('red tagged', ''), 'mean'])
* SD.loc[('red tagged', ''), 'count'] / 10000.)
assert P_no_RED_target == prob_approx(P_no_RED_test, 0.04)
def test_FEMA_P58_Assessment_EDP_uncertainty_failed_analyses():
"""
Perform a loss assessment with customized inputs that focus on testing the
methods used to estimate the multivariate lognormal distribution of EDP
values. Besides the fitting, this test also evaluates the propagation of
EDP uncertainty through the analysis. Dispersions in other calculation
parameters are reduced to negligible levels. This allows us to test the
results against pre-defined reference values in spite of the randomness
involved in the calculations.
Here we use EDP results with unique values assigned to failed analyses.
In particular, PID=1.0 and PFA=100.0 are used when an analysis fails.
These values shall be handled by detection limits of 10 and 100 for PID
and PFA, respectively.
"""
base_input_path = 'resources/'
DL_input = base_input_path + 'input data/' + "DL_input_test_4.json"
EDP_input = base_input_path + 'EDP data/' + "EDP_table_test_4.out"
A = FEMA_P58_Assessment()
A.read_inputs(DL_input, EDP_input, verbose=False)
A.define_random_variables()
# -------------------------------------------------- check random variables
# EDP
RV_EDP = list(A._EDP_dict.values())
thetas, betas = np.array([rv.theta for rv in RV_EDP]).T
EDP_theta_test = thetas
EDP_beta_test = betas
EDP_theta_target = [9.80665, 12.59198, 0.074081, 0.044932]
EDP_beta_target = [0.25, 0.25, 0.3, 0.4]
assert_allclose(EDP_theta_test, EDP_theta_target, rtol=0.025)
assert_allclose(EDP_beta_test, EDP_beta_target, rtol=0.1)
rho = RV_EDP[0].RV_set.Rho()
EDP_rho_test = rho
EDP_rho_target = [
[1.0, 0.6, 0.3, 0.3],
[0.6, 1.0, 0.3, 0.3],
[0.3, 0.3, 1.0, 0.7],
[0.3, 0.3, 0.7, 1.0]]
EDP_COV_test = EDP_rho_test * np.outer(EDP_beta_test, EDP_beta_test)
assert_allclose(EDP_rho_test, EDP_rho_target, atol=0.15)
assert np.all([rv.distribution == 'lognormal' for rv in RV_EDP])
# ------------------------------------------------------------------------
A.define_loss_model()
A.calculate_damage()
# ------------------------------------------------ check damage calculation
# COL
COL_check = A._COL.describe().T
col_target = 1.0 - mvn_od(np.log(EDP_theta_test[2:]),
EDP_COV_test[2:,2:],
upper=np.log([0.1, 0.1]))[0]
assert COL_check['mean'].values[0] == prob_approx(col_target, 0.03)
# DMG
DMG_check = [len(np.where(A._DMG.iloc[:, i] > 0.0)[0]) / 10000.
for i in range(8)]
DMG_1_PID = mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:,2:],
lower=np.log([0.05488, 1e-6]),
upper=np.log([0.1, 0.1]))[0]
DMG_2_PID = mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([1e-6, 0.05488]),
upper=np.log([0.1, 0.1]))[0]
DMG_1_PFA = mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([9.80665, 1e-6, 1e-6, 1e-6]),
upper=np.log([np.inf, np.inf, 0.1, 0.1]))[0]
DMG_2_PFA = mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([1e-6, 9.80665, 1e-6, 1e-6]),
upper=np.log([np.inf, np.inf, 0.1, 0.1]))[0]
assert DMG_check[0] == pytest.approx(DMG_check[1], rel=0.01)
assert DMG_check[2] == pytest.approx(DMG_check[3], rel=0.01)
assert DMG_check[4] == pytest.approx(DMG_check[5], rel=0.01)
assert DMG_check[6] == pytest.approx(DMG_check[7], rel=0.01)
assert DMG_check[0] == prob_approx(DMG_1_PID, 0.03)
assert DMG_check[2] == prob_approx(DMG_2_PID, 0.03)
assert DMG_check[4] == prob_approx(DMG_1_PFA, 0.03)
assert DMG_check[6] == prob_approx(DMG_2_PFA, 0.03)
# ------------------------------------------------------------------------
A.calculate_losses()
# -------------------------------------------------- check loss calculation
# COST
DV_COST = A._DV_dict['rec_cost']
DV_TIME = A._DV_dict['rec_time']
C_target = [0., 250., 1250.]
T_target = [0., 0.25, 1.25]
# PG 1011 and 1012
P_target = [
mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([1e-6, 1e-6]), upper=np.log([0.05488, 0.1]))[0],
mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([0.05488, 0.05488]), upper=np.log([0.1, 0.1]))[0],
mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([0.05488, 1e-6]), upper=np.log([0.1, 0.05488]))[0],
]
for i in [0, 1]:
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, i].values / 10., decimals=0) * 10.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(
np.around(DV_TIME.iloc[:, i].values * 100., decimals=0) / 100.,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / 10000.
assert_allclose(C_target, C_test, rtol=0.001)
assert_allclose(T_target, T_test, rtol=0.001)
prob_allclose(P_target, P_test, 0.04)
# PG 1021 and 1022
P_target = [
mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([1e-6, 1e-6]), upper=np.log([0.1, 0.05488]))[0],
mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([0.05488, 0.05488]), upper=np.log([0.1, 0.1]))[0],
mvn_od(np.log(EDP_theta_test[2:]), EDP_COV_test[2:, 2:],
lower=np.log([1e-6, 0.05488]), upper=np.log([0.05488, 0.1]))[0],
]
for i in [2, 3]:
C_test, P_test = np.unique(
np.around(DV_COST.iloc[:, i].values / 10., decimals=0) * 10.,
return_counts=True)
C_test = C_test[np.where(P_test > 10)]
T_test, P_test = np.unique(
np.around(DV_TIME.iloc[:, i].values * 100., decimals=0) / 100.,
return_counts=True)
T_test = T_test[np.where(P_test > 10)]
P_test = P_test[np.where(P_test > 10)]
P_test = P_test / 10000.
assert_allclose(C_target, C_test, rtol=0.001)
assert_allclose(T_target, T_test, rtol=0.001)
prob_allclose(P_target, P_test, 0.04)
# PG 2011 and 2012
P_target = [
mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([1e-6, 1e-6, 1e-6, 1e-6]),
upper=np.log([9.80665, np.inf, 0.1, 0.1]))[0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower=np.log([9.80665, 9.80665, 1e-6, 1e-6]),
upper=np.log([np.inf, np.inf, 0.1, 0.1]))[0],
mvn_od(np.log(EDP_theta_test), EDP_COV_test,
lower= | np.log([9.80665, 1e-6, 1e-6, 1e-6]) | numpy.log |
"""
Module for PypeIt extraction code
.. include:: ../include/links.rst
"""
import copy
import numpy as np
import scipy
from matplotlib import pyplot as plt
from IPython import embed
from astropy import stats
from pypeit import msgs
from pypeit import utils
from pypeit import specobj
from pypeit import specobjs
from pypeit import tracepca
from pypeit import bspline
from pypeit.display import display
from pypeit.core import pydl
from pypeit.core import pixels
from pypeit.core import arc
from pypeit.core import fitting
from pypeit.core import procimg
from pypeit.core.trace import fit_trace
from pypeit.core.moment import moment1d
def extract_optimal(sciimg, ivar, mask, waveimg, skyimg, thismask, oprof, box_radius,
spec, min_frac_use=0.05, base_var=None, count_scale=None, noise_floor=None):
"""
Calculate the spatial FWHM from an object profile. Utility routine for
fit_profile
The specobj object is changed in place with the boxcar and optimal
dictionaries being filled with the extraction parameters.
Parameters
----------
sciimg : float `numpy.ndarray`_, shape (nspec, nspat)
Science frame
ivar : float `numpy.ndarray`_, shape (nspec, nspat)
Inverse variance of science frame. Can be a model or deduced from the
image itself.
mask : boolean `numpy.ndarray`_, shape (nspec, nspat)
Good-pixel mask, indicating which pixels are should or should not be
used. Good pixels = True, Bad Pixels = False
waveimg : float `numpy.ndarray`_, shape (nspec, nspat)
Wavelength image.
skyimg : float `numpy.ndarray`_, shape (nspec, nspat)
Image containing our model of the sky
thismask : boolean `numpy.ndarray`_, shape (nspec, nspat)
Image indicating which pixels are on the slit/order in question.
True=Good.
oprof : float `numpy.ndarray`_, shape (nspec, nspat)
Image containing the profile of the object that we are extracting.
box_radius : :obj:`float`
Size of boxcar window in floating point pixels in the spatial direction.
spec : :class:`~pypeit.specobj.SpecObj`
This is the container that holds object, trace, and extraction
information for the object in question. This routine operates one object
at a time. **This object is altered in place!**
min_frac_use : :obj:`float`, optional
If the sum of object profile across the spatial direction are less than
this value, the optimal extraction of this spectral pixel is masked
because the majority of the object profile has been masked.
base_var : `numpy.ndarray`_, shape is (nspec, nspat), optional
The "base-level" variance in the data set by the detector properties and
the image processing steps. See
:func:`~pypeit.core.procimg.base_variance`.
count_scale : :obj:`float`, `numpy.ndarray`_, optional
A scale factor, :math:`s`, that *has already been applied* to the
provided science image. For example, if the image has been flat-field
corrected, this is the inverse of the flat-field counts. If None, set
to 1. If a single float, assumed to be constant across the full image.
If an array, the shape must match ``base_var``. The variance will be 0
wherever :math:`s \leq 0`, modulo the provided ``adderr``. This is one
of the components needed to construct the model variance; see
``model_noise``.
noise_floor : :obj:`float`, optional
A fraction of the counts to add to the variance, which has the effect of
ensuring that the S/N is never greater than ``1/noise_floor``; see
:func:`~pypeit.core.procimg.variance_model`. If None, no noise floor is
added.
"""
# Setup
imgminsky = sciimg - skyimg
nspat = imgminsky.shape[1]
nspec = imgminsky.shape[0]
spec_vec = np.arange(nspec)
spat_vec = np.arange(nspat)
# TODO This makes no sense for difference imaging? Not sure we need NIVAR anyway
var_no = None if base_var is None \
else procimg.variance_model(base_var, counts=skyimg, count_scale=count_scale,
noise_floor=noise_floor)
ispec, ispat = np.where(oprof > 0.0)
# Exit gracefully if we have no positive object profiles, since that means something was wrong with object fitting
if not np.any(oprof > 0.0):
msgs.warn('Object profile is zero everywhere. This aperture is junk.')
return
mincol = np.min(ispat)
maxcol = np.max(ispat) + 1
nsub = maxcol - mincol
mask_sub = mask[:,mincol:maxcol]
thismask_sub = thismask[:, mincol:maxcol]
wave_sub = waveimg[:,mincol:maxcol]
ivar_sub = np.fmax(ivar[:,mincol:maxcol],0.0) # enforce positivity since these are used as weights
vno_sub = None if var_no is None else np.fmax(var_no[:,mincol:maxcol],0.0)
base_sub = None if base_var is None else base_var[:,mincol:maxcol]
img_sub = imgminsky[:,mincol:maxcol]
sky_sub = skyimg[:,mincol:maxcol]
oprof_sub = oprof[:,mincol:maxcol]
# enforce normalization and positivity of object profiles
norm = np.nansum(oprof_sub,axis = 1)
norm_oprof = np.outer(norm, np.ones(nsub))
oprof_sub = np.fmax(oprof_sub/norm_oprof, 0.0)
ivar_denom = np.nansum(mask_sub*oprof_sub, axis=1)
mivar_num = np.nansum(mask_sub*ivar_sub*oprof_sub**2, axis=1)
mivar_opt = mivar_num/(ivar_denom + (ivar_denom == 0.0))
flux_opt = np.nansum(mask_sub*ivar_sub*img_sub*oprof_sub, axis=1)/(mivar_num + (mivar_num == 0.0))
# Optimally extracted noise variance (sky + read noise) only. Since
# this variance is not the same as that used for the weights, we
# don't get the usual cancellation. Additional denom factor is the
# analog of the numerator in Horne's variance formula. Note that we
# are only weighting by the profile (ivar_sub=1) because
# otherwise the result depends on the signal (bad).
nivar_num = np.nansum(mask_sub*oprof_sub**2, axis=1) # Uses unit weights
if vno_sub is None:
nivar_opt = None
else:
nvar_opt = ivar_denom * np.nansum(mask_sub * vno_sub * oprof_sub**2, axis=1) \
/ (nivar_num**2 + (nivar_num**2 == 0.0))
nivar_opt = 1.0/(nvar_opt + (nvar_opt == 0.0))
# Optimally extract sky and (read noise)**2 in a similar way
sky_opt = ivar_denom*(np.nansum(mask_sub*sky_sub*oprof_sub**2, axis=1))/(nivar_num**2 + (nivar_num**2 == 0.0))
if base_var is None:
base_opt = None
else:
base_opt = ivar_denom * np.nansum(mask_sub * base_sub * oprof_sub**2, axis=1) \
/ (nivar_num**2 + (nivar_num**2 == 0.0))
base_opt = | np.sqrt(base_opt) | numpy.sqrt |
import sys
import numpy as np
import matplotlib.pyplot as plt
from wordcloud import WordCloud, STOPWORDS, ImageColorGenerator
from PIL import Image, ImageDraw
from datetime import datetime
import random
from collections import OrderedDict
from parser import Parser
their_name: str = ""
def main():
global their_name
parser = Parser()
name: str = sys.argv[2] # name of JSON file
platform: str = sys.argv[1] # platform of messages
if platform == "messenger":
parser.parse_messenger(name)
elif platform == "whatsapp":
parser.parse_whatsapp(name)
me_message_count: int = parser.me_message_count
them_message_count: int = parser.them_message_count
me_word_count: int = parser.me_word_count
them_word_count: int = parser.them_word_count
my_val: dict = parser.my_val
their_val: dict = parser.their_val
total: dict = parser.total
messages: list = parser.messages
their_name = parser.their_name
###########################################################
############# Plotting ####################################
###########################################################
messages_per_person(me_message_count, them_message_count)
words_per_person(me_word_count, them_word_count)
# Exclusion list of basic words
exclusions: list = ["the", "to", "so", "do", "can", "are", "of", "on", "is", "that", "on", "is", "just", "in",
"it", "a", "it's", "and", "at", "for", "was", "but", "be", "as", "too", "this", "or", "did",
"with", "its", "i", "you", "u", "have", "if", "me", "he", "her", "your", "not"]
# Lists of all the keys within a certain range from the keys and values from the dict
keys: list = [key for (key, value) in total.items() if value > 1000 and key not in exclusions]
values: list = [value for (key, value) in total.items() if value > 1000 and key not in exclusions]
# Sort the lists based on the values
keys = [x for _, x in sorted(zip(values, keys), reverse=True)]
values.sort(reverse=True)
values = np.array(values)
##### Resize image ##
fig_size = plt.rcParams["figure.figsize"]
fig_size[0] = 15
fig_size[1] = 10
plt.rcParams["figure.figsize"] = fig_size
#####################
my_values: list = []
their_values: list = []
for key in keys:
if key in my_val:
my_values.append(my_val[key])
else:
my_values.append(0)
if key in their_val:
their_values.append(their_val[key])
else:
their_values.append(0)
if platform == "messenger":
plot_bar_me(keys, my_values)
plot_bar_them(keys, their_values)
gen_start(messages[-1]["timestamp_ms"])
gen_wordcloud(total, exclusions)
# Produces your bar graph of your frequency for most used words
def plot_bar_me(keys: list, values: list):
fig, ax = plt.subplots(facecolor="black")
y_pos: list = np.arange(len(keys))
ax.barh(y_pos, values, align="center", color="blue")
ax.set_yticks(y_pos)
ax.set_yticklabels("")
ax.invert_yaxis()
ax.set_xlabel("Frequency")
ax.set_clip_on(False)
ax.spines['bottom'].set_color("white")
ax.xaxis.label.set_color("white")
ax.tick_params(axis='x', colors="white")
plt.savefig("{}/me.svg".format(their_name), bbox_inches="tight", facecolor=fig.get_facecolor(), transparent=True)
# Produces their bar graph of their frequency for most used words
def plot_bar_them(keys: list, values: list):
fig, ax = plt.subplots(facecolor="black")
y_pos: list = np.arange(len(keys))
ax.barh(y_pos, values, align="center", color="deeppink")
ax.set_yticks(y_pos)
ax.set_yticklabels(keys)
ax.invert_yaxis()
ax.invert_xaxis()
ax.set_xlabel("Frequency")
ax.set_clip_on(False)
ax.spines['bottom'].set_color("white")
ax.spines['left'].set_color("white")
ax.xaxis.label.set_color("white")
ax.tick_params(axis='x', colors="white")
ax.tick_params(axis='y', colors="white")
plt.savefig("{}/them.svg".format(their_name), bbox_inches="tight", facecolor=fig.get_facecolor(), transparent=True)
# Generate the wordcloud image from the input data
def gen_wordcloud(total: dict, exclusions: list):
cloud_dict: dict = OrderedDict()
for (k, v) in total.items():
# if v > 100 and k not in exclusions:
if v > 1 and k not in exclusions:
cloud_dict[k] = v
mask: np.array = np.array(Image.open("mother.png"))
# mask: np.array = np.array(Image.open("mask.png"))
# wordcloud = WordCloud(width=1000, height=500, relative_scaling=1, mask=mask).generate_from_frequencies(cloud_dict)
wordcloud = WordCloud(width=2000, height=2000, relative_scaling=1, mask=mask, background_color="white").generate_from_frequencies(cloud_dict)
wordcloud.recolor(color_func=recolour, random_state=3)
plt.imshow(wordcloud, interpolation='bilinear')
plt.axis("off")
wordcloud.to_file("{}/wordcloud.png".format(their_name))
# Recolour function to change the wordcloud text colours
# I chose the RGB values between the values of two colours
def recolour(word, font_size, position, orientation, random_state=None, **kwargs):
# red: int = random.randint(0, 255)
# green: int = random.randint(0, 20)
# blue: int = random.randint(147, 255)
red: int = random.randint(0, 32)
green: int = random.randint(100, 178)
blue: int = random.randint(0, 170)
return "rgb({0}, {1}, {2})".format(red, green, blue)
# Used as a helper function to calculate the pie chart
def func(pct, allvals):
absolute = int(pct / 100. * | np.sum(allvals) | numpy.sum |
"""Various utilities"""
from __future__ import annotations
import os
import pathlib
import re
import shutil
from datetime import datetime, timedelta
import geopandas as gpd
import geoutils as gu
import numpy as np
import pandas as pd
import xdem
def get_satellite_type(dem_path):
"""Parse the satellite type from the filename"""
basename = os.path.basename(dem_path)
if re.match("DEM\S*", basename):
sat_type = "ASTER"
elif re.match("\S*dem_mcf\S*", basename) is not None:
sat_type = "TDX"
else:
raise ValueError("Could not identify satellite type")
return sat_type
def decyear_to_date_time(decyear: float, leapyear=True, fannys_corr=False) -> datetime.datetime:
"""
Convert a decimal year to a datetime object.
If leapyear set to True, use the actual number of days in the year, otherwise, use the average value of 365.25.
"""
# Get integer year and decimals
year = int(np.trunc(decyear))
decimals = decyear - year
# Convert to date and time
base = datetime(year, 1, 1)
ndays = base.replace(year=base.year + 1) - base
# Calculate final date, taking into account leap years or average 365.25 days
if leapyear:
date_time = base + timedelta(seconds=ndays.total_seconds() * decimals)
else:
date_time = base + timedelta(seconds=365.25 * 24 * 3600 * decimals)
# Apply a correction to correctly reverse Fanny's decyear which have ~1 day shift
if fannys_corr:
date_time -= timedelta(seconds=86399.975157)
return date_time
def date_time_to_decyear(date_time: float, leapyear=True) -> float:
"""
Convert a datetime object to a decimal year.
If leapyear set to True, use the actual number of days in the year, otherwise, use the average value of 365.25.
"""
base = datetime(date_time.year, 1, 1)
ddate = date_time - base
if leapyear:
ndays = (datetime(date_time.year + 1, 1, 1) - base).days
else:
ndays = 365.25
decyear = date_time.year + ddate.total_seconds() / (ndays * 24 * 3600)
return decyear
def fannys_convert_date_time_to_decimal_date(date_time):
"""
WARNING: this function is flawed, see https://github.com/adehecq/ragmac_xdem/pull/18.
Function used by <NAME> for decimal year conversion, from ragmac_xdem/data/raw/convert_dates.py.
Used only for checking that we're transforming the date back correctly.
----
This function converts a date and a time to a decimal date value
Inputs:
- date_time: datetime object
Outputs:
- decimal_date_float: float
"""
hourdec = (date_time.hour + date_time.minute / 60.0 + date_time.second / 3600.0) / 24.0
doy = date_time.timetuple().tm_yday
decimal_date = date_time.year + (doy + hourdec) / 365.25
decimal_date = float("{:.8f}".format(decimal_date))
return decimal_date
def get_aster_date(fname) -> datetime:
"""Parse the date of an ASTER DEM from the filename"""
# Extract string containing decimal year
basename = os.path.basename(fname)
decyear = float(basename[4:17])
# Convert to datetime
return decyear_to_date_time(decyear, leapyear=False, fannys_corr=True)
def get_tdx_date(fname: str) -> datetime:
"""Parse the date of a TDX DEM from the filename"""
# Extract string containing date and time
basename = os.path.basename(fname)
datetime_str = basename[:17]
# Convert to datetime
return datetime.strptime(datetime_str, "%Y-%m-%d_%H%M%S")
def get_dems_date(dem_path_list: list[str]) -> list:
"""
Returns a list of dates from a list of DEM paths.
:param dem_path_list: List of path to DEMs
:returns: The list of dates in datetime format
"""
dates = []
for dem_path in dem_path_list:
basename = os.path.basename(dem_path)
sat_type = get_satellite_type(dem_path)
# Get date
if sat_type == "ASTER":
dates.append(get_aster_date(dem_path))
elif sat_type == "TDX":
dates.append(get_tdx_date(dem_path))
return np.asarray(dates)
def select_dems_by_date(dem_path_list: list[str], date1: str, date2: str, sat_type: str) -> list:
"""
Returns the list of files which date falls within date1 and date 2 (included)
:param dem_path_list: List of path to DEMs
:param date1: Start date in ISO format YYYY-MM-DD
:param date1: End date in ISO format YYYY-MM-DD
:param sat_type: Either 'ASTER' or 'TDX'
:returns: The list of indexes that match the criteria
"""
if sat_type == "ASTER":
dates = np.asarray([get_aster_date(dem_file) for dem_file in dem_path_list])
elif sat_type == "TDX":
dates = np.asarray([get_tdx_date(dem_file) for dem_file in dem_path_list])
else:
raise ValueError("sat_type must be 'ASTER' or 'TDX'")
date1 = datetime.fromisoformat(date1)
date2 = datetime.fromisoformat(date2)
return np.where((date1 <= dates) & (dates <= date2))[0]
def best_dem_cover(dem_path_list: list, init_stats: pd.Series) -> list[str, float]:
"""
From a list of DEMs, returns the one with the best ROI coverage.
:params dem_path_list: list of DEMs path to be considered
:params init_stats: a pd.Series containing the statistics of all DEMs as returned by dem_postprocessing.calculate_init_stats_parallel.
:returns: path to the best DEM, ROI coverage
"""
# Extract stats for selected DEMs
stats_subset = init_stats.loc[np.isin(init_stats["dem_path"], dem_path_list)]
# Select highest ROI coverage
best = stats_subset.sort_values(by="roi_cover_orig").iloc[-1]
return best.dem_path, best.roi_cover_orig
def list_pairs(validation_dates):
"""
For a set of ndates dates, return a list of indexes and IDs for all possible unique pairs.
For example, for ndates=3 -> [(0, 1), (0,2), (1,2)]
"""
ndates = len(validation_dates)
indexes = []
pair_ids = []
for k1 in range(ndates):
for k2 in range(k1 + 1, ndates):
indexes.append((k1, k2))
date1 = validation_dates[k1]
date2 = validation_dates[k2]
pair_ids.append(f"{date1[:4]}_{date2[:4]}") # year1_year2)
return indexes, pair_ids
def dems_selection(
dem_path_list: list[str],
mode: str = None,
validation_dates: list[str] = None,
dt: float = -1,
months: list[int] = np.arange(12) + 1,
init_stats: pd.Series = None,
) -> list[list[str]]:
"""
Return a list of lists of DEMs path that fit the selection.
Selection mode include None, 'close', 'best' or 'subperiod'.
If None, return all DEMs.
If any other mode is set, `dt` and `validation_dates` must be set.
If 'close' is set, optionally `months` can be set. Returns all DEMs within dt days around each validation date, and within the selected months.
If 'subperiod' is set, returns all DEMs within each possible subperiods from pairs of validation_dates, +/- dt days.
If 'best' is set, 'init_stats' must be provided. Select DEMs based on the 'close' selection, but only returns a single DEM with the highest ROI coverage.
:param dem_path_list: List containing path to all DEMs to be considered
:param mode" Any of None, 'close', 'subperiod' or 'best'.
:param validation_dates: List of validation dates for the experiment, dates expressed as 'yyyy-mm-dd'
:param dt: Number of days allowed around each validation date
:param months: A list of months to be selected (numbered 1 to 12). Default is all months.
:params init_stats: a pd.Series containing the statistics of all DEMs as returned by dem_postprocessing.calculate_init_stats_parallel.
:returns: List containing lists of DEM paths for each validation date. Same length as validation dates, or as the number of possible pair combinations for mode 'subperiod'.
"""
if mode is None:
print(f"Found {len(dem_path_list)} DEMs")
return [dem_path_list]
elif mode == "close" or mode == "best" or mode == "subperiod":
# check that optional arguments are set
assert validation_dates is not None, "`validation_dates` must be set"
assert dt >= 0, "dt must be set to >= 0 value"
# Get input DEM dates
dems_dates = get_dems_date(dem_path_list)
dems_months = np.asarray([date.month for date in dems_dates])
output_list = []
# Extract DEMs within all subperiods +/- buffer
if mode == "subperiod":
pairs, pair_ids = list_pairs(validation_dates)
for k1, k2 in pairs:
date1 = datetime.fromisoformat(validation_dates[k1]) - timedelta(dt)
date2 = datetime.fromisoformat(validation_dates[k2]) + timedelta(dt)
matching_dates = np.where((date1 <= dems_dates) & (dems_dates <= date2) & np.isin(dems_months, months))[
0
]
output_list.append(dem_path_list[matching_dates])
print(f"For period {validation_dates[k1]} - {validation_dates[k2]} found {len(matching_dates)} DEMs")
return output_list
# Compare to each validation date
for date_str in validation_dates:
date = datetime.fromisoformat(date_str)
date1 = date - timedelta(dt)
date2 = date + timedelta(dt)
matching_dates = np.where((date1 <= dems_dates) & (dems_dates <= date2) & np.isin(dems_months, months))[0]
output_list.append(dem_path_list[matching_dates])
if mode == "close":
for date, group in zip(validation_dates, output_list):
print(f"For date {date} found {len(group)} DEMs")
return output_list
else:
assert init_stats is not None, "`init_stats` must be provided for mode 'best'"
final_dem_list = []
for group in output_list:
selected_dem, _ = best_dem_cover(group, init_stats)
final_dem_list.append(
[
selected_dem,
]
)
return final_dem_list
else:
raise ValueError(f"Mode {mode} not recognized")
def load_ref_and_masks(case_paths: dict) -> list:
"""
Loads the reference xdem, outlines and masks of ROI and stable terrin, from the dictionary provided by files.get_data_paths.
:returns:
- ref_dem (xdem.DEM object), all_outlines (gu.Vector object), roi_outlines (gu.Vector object), roi_mask (np.ndarray, stable_mask (np.ndarray)
"""
# Load reference DEM
ref_dem = xdem.DEM(case_paths["raw_data"]["ref_dem_path"])
# Load all outlines
all_outlines = gu.geovector.Vector(case_paths["raw_data"]["rgi_path"])
# Load selected glacier outline
roi_outlines = gu.geovector.Vector(case_paths["raw_data"]["selected_path"])
# Create masks
roi_mask = roi_outlines.create_mask(ref_dem)
stable_mask = ~all_outlines.create_mask(ref_dem)
return ref_dem, all_outlines, roi_outlines, roi_mask, stable_mask
"""
@author: friedrichknuth
"""
def OGGM_get_centerline(rgi_id, crs=None, return_longest_segment=False):
from oggm import cfg, graphics, utils, workflow
cfg.initialize(logging_level="CRITICAL")
rgi_ids = [rgi_id]
cfg.PATHS["working_dir"] = utils.gettempdir(dirname="OGGM-centerlines", reset=True)
# We start from prepro level 3 with all data ready - note the url here
base_url = (
"https://cluster.klima.uni-bremen.de/~oggm/gdirs/oggm_v1.4/L3-L5_files/CRU/centerlines/qc3/pcp2.5/no_match/"
)
gdirs = workflow.init_glacier_directories(rgi_ids, from_prepro_level=3, prepro_border=40, prepro_base_url=base_url)
gdir_cl = gdirs[0]
center_lines = gdir_cl.read_pickle("centerlines")
p = pathlib.Path("./rgi_tmp/")
p.mkdir(parents=True, exist_ok=True)
utils.write_centerlines_to_shape(gdir_cl, path="./rgi_tmp/tmp.shp")
gdf = gpd.read_file("./rgi_tmp/tmp.shp")
shutil.rmtree("./rgi_tmp/")
if crs:
gdf = gdf.to_crs(crs)
if return_longest_segment:
gdf[gdf["LE_SEGMENT"] == gdf["LE_SEGMENT"].max()]
return gdf
def get_largest_glacier_from_shapefile(shapefile, crs=None, get_longest_segment=False):
gdf = gpd.read_file(shapefile)
gdf = gdf[gdf["Area"] == gdf["Area"].max()]
if crs:
gdf = gdf.to_crs(crs)
return gdf
def extract_linestring_coords(linestring):
"""
Function to extract x, y coordinates from linestring object
Input:
shapely.geometry.linestring.LineString
Returns:
[x: np.array,y: np.array]
"""
x = []
y = []
for coords in linestring.coords:
x.append(coords[0])
y.append(coords[1])
return [np.array(x), | np.array(y) | numpy.array |
""" TODO: Break out these augmentations into submodules for easier reference.
TODO: Rewrite this code to be briefer. Take advantage of common python class structures
"""
import numpy as np
from scipy.sparse import csr_matrix
from deepneuro.utilities.util import add_parameter
class Augmentation(object):
def __init__(self, **kwargs):
# Instance Options
add_parameter(self, kwargs, 'data_groups', [])
# Repetition Options
add_parameter(self, kwargs, 'multiplier', None)
add_parameter(self, kwargs, 'total', None)
# Derived Parameters
self.output_shape = None
self.initialization = False
self.iteration = 0
self.data_groups = {data_group: None for data_group in self.data_groups}
self.augmentation_string = '_copy_'
self.load(kwargs)
return
def load(self, kwargs):
return
def set_multiplier(self, multiplier):
self.multiplier = multiplier
def augment(self, augmentation_num=0):
for label, data_group in self.data_groups.items():
data_group.augmentation_cases[augmentation_num + 1] = data_group.augmentation_cases[augmentation_num]
def initialize_augmentation(self):
if not self.initialization:
self.initialization = True
def iterate(self):
if self.multiplier is None:
return
self.iteration += 1
if self.iteration == self.multiplier:
self.iteration = 0
def reset(self, augmentation_num):
return
def append_data_group(self, data_group):
self.data_groups[data_group.label] = data_group
# Aliasing
Copy = Augmentation
class Flip_Rotate_2D(Augmentation):
""" TODO: extend to be more flexible and useful.
Ponder about how best to apply to multiple dimensions
"""
def load(self, kwargs):
# Flip and Rotate options
add_parameter(self, kwargs, 'flip', True)
add_parameter(self, kwargs, 'rotate', True)
add_parameter(self, kwargs, 'flip_axis', 2)
add_parameter(self, kwargs, 'rotate_axis', (1, 2))
# TODO: This is incredibly over-elaborate, return to fix.
self.transforms_list = []
if self.flip:
self.flip_list = [False, True]
else:
self.flip_list = [False]
if self.rotate:
self.rotations_90 = [0, 1, 2, 3]
else:
self.rotations_90 = [0]
self.available_transforms = np.array(np.meshgrid(self.flip_list, self.rotations_90)).T.reshape(-1, 2)
self.total_transforms = self.available_transforms.shape[0]
self.augmentation_string = '_rotate2D_'
def initialize_augmentation(self):
if not self.initialization:
for label, data_group in self.data_groups.items():
# Dealing with the time dimension.
if len(data_group.get_shape()) < 5:
self.flip_axis = 1
else:
self.flip_axis = -4
self.initialization = True
def augment(self, augmentation_num=0):
for label, data_group in self.data_groups.items():
if self.available_transforms[self.iteration % self.total_transforms, 0]:
data_group.augmentation_cases[augmentation_num + 1] = np.flip(data_group.augmentation_cases[augmentation_num], self.flip_axis)
else:
data_group.augmentation_cases[augmentation_num + 1] = data_group.augmentation_cases[augmentation_num]
if self.available_transforms[self.iteration % self.total_transforms, 1]:
data_group.augmentation_cases[augmentation_num + 1] = np.rot90(data_group.augmentation_cases[augmentation_num], self.available_transforms[self.iteration % self.total_transforms, self.flip_axis], axes=self.rotate_axis)
class Shift_Squeeze_Intensities(Augmentation):
""" TODO: extend to be more flexible and useful.
Ponder about how best to apply to multiple dimensions
"""
def load(self, kwargs):
# Flip and Rotate options
add_parameter(self, kwargs, 'shift', True)
add_parameter(self, kwargs, 'squeeze', True)
add_parameter(self, kwargs, 'shift_amount', [-.5, .5])
add_parameter(self, kwargs, 'squeeze_factor', [.7, 1.3])
# TODO: This is incredibly over-elaborate, return to fix.
self.transforms_list = []
if self.shift:
self.shift_list = [False, True]
else:
self.shift_list = [False]
if self.squeeze:
self.squeeze_list = [False, True]
else:
self.squeeze_list = [False]
self.available_transforms = np.array(np.meshgrid(self.shift_list, self.squeeze_list)).T.reshape(-1, 2)
self.total_transforms = self.available_transforms.shape[0]
self.augmentation_string = '_shift_squeeze_'
def augment(self, augmentation_num=0):
for label, data_group in self.data_groups.items():
if self.available_transforms[self.iteration % self.total_transforms, 0]:
data_group.augmentation_cases[augmentation_num + 1] = data_group.augmentation_cases[augmentation_num] + np.random.uniform(self.shift_amount[0], self.shift_amount[1])
else:
data_group.augmentation_cases[augmentation_num + 1] = data_group.augmentation_cases[augmentation_num]
if self.available_transforms[self.iteration % self.total_transforms, 0]:
data_group.augmentation_cases[augmentation_num + 1] = data_group.augmentation_cases[augmentation_num] * np.random.uniform(self.squeeze_factor[0], self.squeeze_factor[1])
else:
data_group.augmentation_cases[augmentation_num + 1] = data_group.augmentation_cases[augmentation_num]
class Flip_Rotate_3D(Augmentation):
def load(self, kwargs):
"""
"""
# Flip and Rotate options
add_parameter(self, kwargs, 'rotation_axes', [1, 2, 3])
# Derived Parameters
self.rotation_generator = {}
self.augmentation_num = 0
def initialize_augmentation(self):
if not self.initialization:
for label, data_group in self.data_groups.items():
self.rotation_generator[label] = self.rotations24(data_group.augmentation_cases[0])
self.initialization = True
def rotations24(self, array):
while True:
# imagine shape is pointing in axis 0 (up)
# 4 rotations about axis 0
for i in range(4):
yield self.rot90_3d(array, i, self.rotation_axes[0])
# rotate 180 about axis 1, now shape is pointing down in axis 0
# 4 rotations about axis 0
rotated_array = self.rot90_3d(array, 2, axis=self.rotation_axes[1])
for i in range(4):
yield self.rot90_3d(rotated_array, i, self.rotation_axes[0])
# rotate 90 or 270 about axis 1, now shape is pointing in axis 2
# 8 rotations about axis 2
rotated_array = self.rot90_3d(array, axis=self.rotation_axes[1])
for i in range(4):
yield self.rot90_3d(rotated_array, i, self.rotation_axes[2])
rotated_array = self.rot90_3d(array, -1, axis=self.rotation_axes[1])
for i in range(4):
yield self.rot90_3d(rotated_array, i, self.rotation_axes[2])
# rotate about axis 2, now shape is pointing in axis 1
# 8 rotations about axis 1
rotated_array = self.rot90_3d(array, axis=self.rotation_axes[2])
for i in range(4):
yield self.rot90_3d(rotated_array, i, self.rotation_axes[1])
rotated_array = self.rot90_3d(array, -1, axis=self.rotation_axes[2])
for i in range(4):
yield self.rot90_3d(rotated_array, i, self.rotation_axes[1])
def rot90_3d(self, m, k=1, axis=2):
"""Rotate an array by 90 degrees in the counter-clockwise direction around the given axis"""
m = np.swapaxes(m, 2, axis)
m = np.rot90(m, k)
m = np.swapaxes(m, 2, axis)
return m
def augment(self, augmentation_num=0):
# Hacky -- the rotation generator is weird here.
if augmentation_num != self.augmentation_num:
self.augmentation_num = augmentation_num
for label, data_group in self.data_groups.items():
self.rotation_generator[label] = self.rotations24(data_group.augmentation_cases[self.augmentation_num])
for label, data_group in self.data_groups.items():
data_group.augmentation_cases[augmentation_num + 1] = next(self.rotation_generator[label])
class ExtractPatches(Augmentation):
def load(self, kwargs):
# Patch Parameters
add_parameter(self, kwargs, 'patch_shape', None)
add_parameter(self, kwargs, 'patch_extraction_conditions', None)
add_parameter(self, kwargs, 'patch_region_conditions', None)
add_parameter(self, kwargs, 'patch_dimensions', {})
# Derived Parameters
self.patch_regions = []
self.patches = None
self.patch_corner = None
self.patch_slice = None
self.leading_dims = {}
self.input_shape = {}
self.output_shape = {} # Redundant
self.augmentation_string = '_patch_'
def initialize_augmentation(self):
""" There are some batch dimension problems with output_shape here. Hacky fixes for now, but revisit. TODO
"""
if not self.initialization:
# A weird way to proportionally divvy up patch conditions.
# TODO: Rewrite
self.condition_list = [None] * (self.multiplier)
self.region_list = [None] * (self.multiplier)
if self.patch_extraction_conditions is not None:
start_idx = 0
for condition_idx, patch_extraction_condition in enumerate(self.patch_extraction_conditions):
end_idx = start_idx + int(np.ceil(patch_extraction_condition[1] * self.multiplier))
self.condition_list[start_idx:end_idx] = [condition_idx] * (end_idx - start_idx)
start_idx = end_idx
if self.patch_region_conditions is not None:
start_idx = 0
for condition_idx, patch_region_condition in enumerate(self.patch_region_conditions):
end_idx = start_idx + int(np.ceil(patch_region_condition[1] * self.multiplier))
self.region_list[start_idx:end_idx] = [condition_idx] * (end_idx - start_idx)
start_idx = end_idx
for label, data_group in self.data_groups.items():
self.input_shape[label] = data_group.get_shape()
if label not in list(self.patch_dimensions.keys()):
# If no provided patch dimensions, just presume the format is [batch, patch_dimensions, channel]
# self.patch_dimensions[label] = [-4 + x for x in xrange(len(self.input_shape[label]) - 1)]
self.patch_dimensions[label] = [x + 1 for x in range(len(self.input_shape[label]) - 1)]
# This is a little goofy.
self.output_shape[label] = np.array(self.input_shape[label])
# self.output_shape[label][self.patch_dimensions[label]] = list(self.patch_shape)
self.output_shape[label][[x - 1 for x in self.patch_dimensions[label]]] = list(self.patch_shape)
self.output_shape[label] = tuple(self.output_shape[label])
# Batch dimension correction, revisit
# self.patch_dimensions[label] = [x + 1 for x in self.patch_dimensions[label]]
self.initialization = True
def iterate(self):
super(ExtractPatches, self).iterate()
self.generate_patch_corner()
def reset(self, augmentation_num=0):
self.patch_regions = []
region_input_data = {label: self.data_groups[label].augmentation_cases[augmentation_num] for label in list(self.data_groups.keys())}
if self.patch_region_conditions is not None:
for region_condition in self.patch_region_conditions:
# self.patch_regions += [np.where(region_condition[0](region_input_data))]
self.patch_regions += self.get_indices_sparse(region_condition[0](region_input_data))
return
def augment(self, augmentation_num=0):
# Any more sensible way to deal with this case?
if self.patches is None:
self.generate_patch_corner(augmentation_num)
for label, data_group in self.data_groups.items():
# A bit lengthy. Also unnecessarily rebuffers patches
data_group.augmentation_cases[augmentation_num + 1] = self.patches[label]
def generate_patch_corner(self, augmentation_num=0):
""" Think about how one could to this, say, with 3D and 4D volumes at the same time.
Also, patching across the modality dimension..? Interesting..
"""
# TODO: Escape clause in case acceptable patches cannot be found.
# acceptable_patch = False
if self.patch_region_conditions is None:
corner_idx = None
else:
region = self.patch_regions[self.region_list[self.iteration]]
# TODO: Make errors like these more ubiquitous.
if len(region[0]) == 0:
# raise ValueError('The region ' + str(self.patch_region_conditions[self.region_list[self.iteration]][0]) + ' has no voxels to select patches from. Please modify your patch-sampling region')
# Tempfix -- Eek
region = self.patch_regions[self.region_list[1]]
if len(region[0]) == 0:
print('emergency brain region..')
region = np.where(self.data_groups['input_data'].augmentation_cases[augmentation_num] != 0)
self.patch_regions[self.region_list[0]] = region
corner_idx = np.random.randint(len(region[0]))
self.patches = {}
# Pad edge patches.
for label, data_group in self.data_groups.items():
# TODO: Some redundancy here
if corner_idx is None:
corner = np.array([np.random.randint(0, self.input_shape[label][i]) for i in range(len(self.input_shape[label]))])[self.patch_dimensions[label]]
else:
corner = np.array([d[corner_idx] for d in region])[self.patch_dimensions[label]]
patch_slice = [slice(None)] * (len(self.input_shape[label]) + 1)
# Will run into problems with odd-shaped patches.
for idx, patch_dim in enumerate(self.patch_dimensions[label]):
patch_slice[patch_dim] = slice(max(0, corner[idx] - self.patch_shape[idx] / 2), corner[idx] + self.patch_shape[idx] / 2, 1)
input_shape = self.data_groups[label].augmentation_cases[augmentation_num].shape
self.patches[label] = self.data_groups[label].augmentation_cases[augmentation_num][tuple(patch_slice)]
# More complicated padding needed for center-voxel based patches.
pad_dims = [(0, 0)] * len(self.patches[label].shape)
for idx, patch_dim in enumerate(self.patch_dimensions[label]):
pad = [0, 0]
if corner[idx] > input_shape[patch_dim] - self.patch_shape[idx] / 2:
pad[1] = self.patch_shape[idx] / 2 - (input_shape[patch_dim] - corner[idx])
if corner[idx] < self.patch_shape[idx] / 2:
pad[0] = self.patch_shape[idx] / 2 - corner[idx]
pad_dims[patch_dim] = tuple(pad)
self.patches[label] = np.lib.pad(self.patches[label], tuple(pad_dims), 'edge')
return
def compute_M(self, data):
# Magic, vectorized sparse matrix calculation method to replace np.where
# https://stackoverflow.com/questions/33281957/faster-alternative-to-numpy-where
cols = np.arange(data.size)
return csr_matrix((cols, (data.ravel(), cols)), shape=(data.max() + 1, data.size))
def get_indices_sparse(self, data):
# Magic, vectorized sparse matrix calculation method to replace np.where
# https://stackoverflow.com/questions/33281957/faster-alternative-to-numpy-where
M = self.compute_M(data)
return [np.unravel_index(row.data, data.shape) for row in M]
class MaskData(Augmentation):
def load(self, kwargs):
# Add functionality for masking multiples axes.
# Mask Parameters
add_parameter(self, kwargs, 'mask_channels', {})
add_parameter(self, kwargs, 'num_masked', 1)
add_parameter(self, kwargs, 'masked_value', -10)
add_parameter(self, kwargs, 'random_sample', True)
# Derived Parameters
self.input_shape = {}
self.augmentation_string = '_mask_'
def initialize_augmentation(self):
if not self.initialization:
for label, data_group in self.data_groups.items():
self.mask_channels[label] = np.array(self.mask_channels[label])
# self.input_shape[label] = data_group.get_shape()
# if label not in self.mask_channels.keys():
# self.mask_channels[label] = np.arange(self.input_shape[label][-1])
# else:
# self.mask_channels[label] = np.arange(self.input_shape[label][self.mask_channels[label] + 1])
self.initialization = True
def iterate(self):
super(MaskData, self).iterate()
def augment(self, augmentation_num=0):
for label, data_group in self.data_groups.items():
if self.random_sample:
channels = np.random.choice(self.mask_channels[label], self.num_masked, replace=False)
else:
idx = [x % len(self.mask_channels[label]) for x in range(self.iteration, self.iteration + self.num_masked)]
channels = self.mask_channels[label][idx]
# Currently only works if applied to channels; revisit
masked_data = np.copy(data_group.augmentation_cases[augmentation_num])
# for channel in channels:
masked_data[..., channels] = self.masked_value
data_group.augmentation_cases[augmentation_num + 1] = masked_data
data_group.augmentation_strings[augmentation_num + 1] = data_group.augmentation_strings[augmentation_num] + self.augmentation_string + str(channels).strip('[]').replace(' ', '')
class ChooseData(Augmentation):
def load(self, kwargs):
# Add functionality for choosing multiple axes
# Choose Parameters
add_parameter(self, kwargs, 'axis', {})
add_parameter(self, kwargs, 'choices', None)
add_parameter(self, kwargs, 'num_chosen', 1)
add_parameter(self, kwargs, 'random_sample', True)
# Derived Parameters
self.input_shape = {}
self.augmentation_string = '_choose_'
def initialize_augmentation(self):
if not self.initialization:
self.choices = np.array(self.choices)
for label, data_group in self.data_groups.items():
input_shape = data_group.get_shape()
self.output_shape[label] = np.array(input_shape)
self.output_shape[label][self.axis[label]] = self.num_chosen
self.output_shape[label] = tuple(self.output_shape[label])
self.initialization = True
def iterate(self):
super(ChooseData, self).iterate()
def augment(self, augmentation_num=0):
choice = None # This is messed up
for label, data_group in self.data_groups.items():
# Wrote this function while half-asleep; revisit
input_data = data_group.augmentation_cases[augmentation_num]
if self.choices is None:
choices = np.arange(input_data.shape[self.axis[label]])
else:
choices = self.choices
if choice is None:
if self.random_sample:
choice = np.random.choice(choices, self.num_chosen, replace=False)
else:
idx = [x % len(choices) for x in range(self.iteration, self.iteration + self.num_chosen)]
choice = choices[idx]
# Temporary
if input_data.shape[-1] == 6:
choice = choice.tolist()
choice = list(range(4)) + choice
choice_slice = [slice(None)] * (len(input_data.shape))
choice_slice[self.axis[label]] = choice
# Currently only works if applied to channels; revisit
data_group.augmentation_cases[augmentation_num + 1] = input_data[choice_slice]
data_group.augmentation_strings[augmentation_num + 1] = data_group.augmentation_strings[augmentation_num] + self.augmentation_string + str(choice).strip('[]').replace(' ', '')
class Downsample(Augmentation):
def load(self, kwargs):
# A lot of this functionality is vague and messy, revisit
# Downsample Parameters
add_parameter(self, kwargs, 'channel', 0)
add_parameter(self, kwargs, 'axes', {})
add_parameter(self, kwargs, 'factor', 2)
add_parameter(self, kwargs, 'random_sample', True)
add_parameter(self, kwargs, 'num_downsampled', 1)
self.input_shape = {}
self.augmentation_string = '_resample_'
def initialize_augmentation(self):
if not self.initialization:
for label, data_group in self.data_groups.items():
self.input_shape[label] = data_group.get_shape()
self.initialization = True
def iterate(self):
super(Downsample, self).iterate()
def augment(self, augmentation_num=0):
for label, data_group in self.data_groups.items():
if self.random_sample:
axes = np.random.choice(self.axes[label], self.num_downsampled, replace=False)
else:
idx = [x % len(self.axes[label]) for x in range(self.iteration, self.iteration + self.num_downsampled)]
axes = np.array(self.axes[label])[idx]
resampled_data = | np.copy(data_group.augmentation_cases[augmentation_num]) | numpy.copy |
# This module has been generated automatically from space group information
# obtained from the Computational Crystallography Toolbox
#
"""
Space groups
This module contains a list of all the 230 space groups that can occur in
a crystal. The variable space_groups contains a dictionary that maps
space group numbers and space group names to the corresponding space
group objects.
.. moduleauthor:: <NAME> <<EMAIL>>
"""
#-----------------------------------------------------------------------------
# Copyright (C) 2013 The Mosaic Development Team
#
# Distributed under the terms of the BSD License. The full license is in
# the file LICENSE.txt, distributed as part of this software.
#-----------------------------------------------------------------------------
import numpy as N
class SpaceGroup(object):
"""
Space group
All possible space group objects are created in this module. Other
modules should access these objects through the dictionary
space_groups rather than create their own space group objects.
"""
def __init__(self, number, symbol, transformations):
"""
:param number: the number assigned to the space group by
international convention
:type number: int
:param symbol: the Hermann-Mauguin space-group symbol as used
in PDB and mmCIF files
:type symbol: str
:param transformations: a list of space group transformations,
each consisting of a tuple of three
integer arrays (rot, tn, td), where
rot is the rotation matrix and tn/td
are the numerator and denominator of the
translation vector. The transformations
are defined in fractional coordinates.
:type transformations: list
"""
self.number = number
self.symbol = symbol
self.transformations = transformations
self.transposed_rotations = N.array([N.transpose(t[0])
for t in transformations])
self.phase_factors = N.exp(N.array([(-2j*N.pi*t[1])/t[2]
for t in transformations]))
def __repr__(self):
return "SpaceGroup(%d, %s)" % (self.number, repr(self.symbol))
def __len__(self):
"""
:return: the number of space group transformations
:rtype: int
"""
return len(self.transformations)
def symmetryEquivalentMillerIndices(self, hkl):
"""
:param hkl: a set of Miller indices
:type hkl: Scientific.N.array_type
:return: a tuple (miller_indices, phase_factor) of two arrays
of length equal to the number of space group
transformations. miller_indices contains the Miller
indices of each reflection equivalent by symmetry to the
reflection hkl (including hkl itself as the first element).
phase_factor contains the phase factors that must be applied
to the structure factor of reflection hkl to obtain the
structure factor of the symmetry equivalent reflection.
:rtype: tuple
"""
hkls = N.dot(self.transposed_rotations, hkl)
p = N.multiply.reduce(self.phase_factors**hkl, -1)
return hkls, p
space_groups = {}
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(1, 'P 1', transformations)
space_groups[1] = sg
space_groups['P 1'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(2, 'P -1', transformations)
space_groups[2] = sg
space_groups['P -1'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(3, 'P 1 2 1', transformations)
space_groups[3] = sg
space_groups['P 1 2 1'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(4, 'P 1 21 1', transformations)
space_groups[4] = sg
space_groups['P 1 21 1'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(5, 'C 1 2 1', transformations)
space_groups[5] = sg
space_groups['C 1 2 1'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(6, 'P 1 m 1', transformations)
space_groups[6] = sg
space_groups['P 1 m 1'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(7, 'P 1 c 1', transformations)
space_groups[7] = sg
space_groups['P 1 c 1'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(8, 'C 1 m 1', transformations)
space_groups[8] = sg
space_groups['C 1 m 1'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(9, 'C 1 c 1', transformations)
space_groups[9] = sg
space_groups['C 1 c 1'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(10, 'P 1 2/m 1', transformations)
space_groups[10] = sg
space_groups['P 1 2/m 1'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,-1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(11, 'P 1 21/m 1', transformations)
space_groups[11] = sg
space_groups['P 1 21/m 1'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(12, 'C 1 2/m 1', transformations)
space_groups[12] = sg
space_groups['C 1 2/m 1'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,-1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(13, 'P 1 2/c 1', transformations)
space_groups[13] = sg
space_groups['P 1 2/c 1'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,-1,-1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(14, 'P 1 21/c 1', transformations)
space_groups[14] = sg
space_groups['P 1 21/c 1'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,-1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,-1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(15, 'C 1 2/c 1', transformations)
space_groups[15] = sg
space_groups['C 1 2/c 1'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(16, 'P 2 2 2', transformations)
space_groups[16] = sg
space_groups['P 2 2 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(17, 'P 2 2 21', transformations)
space_groups[17] = sg
space_groups['P 2 2 21'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(18, 'P 21 21 2', transformations)
space_groups[18] = sg
space_groups['P 21 21 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(19, 'P 21 21 21', transformations)
space_groups[19] = sg
space_groups['P 21 21 21'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(20, 'C 2 2 21', transformations)
space_groups[20] = sg
space_groups['C 2 2 21'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(21, 'C 2 2 2', transformations)
space_groups[21] = sg
space_groups['C 2 2 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(22, 'F 2 2 2', transformations)
space_groups[22] = sg
space_groups['F 2 2 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(23, 'I 2 2 2', transformations)
space_groups[23] = sg
space_groups['I 2 2 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(24, 'I 21 21 21', transformations)
space_groups[24] = sg
space_groups['I 21 21 21'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(25, 'P m m 2', transformations)
space_groups[25] = sg
space_groups['P m m 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(26, 'P m c 21', transformations)
space_groups[26] = sg
space_groups['P m c 21'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(27, 'P c c 2', transformations)
space_groups[27] = sg
space_groups['P c c 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(28, 'P m a 2', transformations)
space_groups[28] = sg
space_groups['P m a 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(29, 'P c a 21', transformations)
space_groups[29] = sg
space_groups['P c a 21'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(30, 'P n c 2', transformations)
space_groups[30] = sg
space_groups['P n c 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(31, 'P m n 21', transformations)
space_groups[31] = sg
space_groups['P m n 21'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(32, 'P b a 2', transformations)
space_groups[32] = sg
space_groups['P b a 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(33, 'P n a 21', transformations)
space_groups[33] = sg
space_groups['P n a 21'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(34, 'P n n 2', transformations)
space_groups[34] = sg
space_groups['P n n 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(35, 'C m m 2', transformations)
space_groups[35] = sg
space_groups['C m m 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(36, 'C m c 21', transformations)
space_groups[36] = sg
space_groups['C m c 21'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(37, 'C c c 2', transformations)
space_groups[37] = sg
space_groups['C c c 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(38, 'A m m 2', transformations)
space_groups[38] = sg
space_groups['A m m 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(39, 'A b m 2', transformations)
space_groups[39] = sg
space_groups['A b m 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(40, 'A m a 2', transformations)
space_groups[40] = sg
space_groups['A m a 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(41, 'A b a 2', transformations)
space_groups[41] = sg
space_groups['A b a 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(42, 'F m m 2', transformations)
space_groups[42] = sg
space_groups['F m m 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([4,4,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([4,4,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,3,3])
trans_den = N.array([4,4,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,3,3])
trans_den = N.array([4,4,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([3,1,3])
trans_den = N.array([4,4,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([3,1,3])
trans_den = N.array([4,4,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([3,3,1])
trans_den = N.array([4,4,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([3,3,1])
trans_den = N.array([4,4,4])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(43, 'F d d 2', transformations)
space_groups[43] = sg
space_groups['F d d 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(44, 'I m m 2', transformations)
space_groups[44] = sg
space_groups['I m m 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(45, 'I b a 2', transformations)
space_groups[45] = sg
space_groups['I b a 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(46, 'I m a 2', transformations)
space_groups[46] = sg
space_groups['I m a 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(47, 'P m m m', transformations)
space_groups[47] = sg
space_groups['P m m m'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,-1,-1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([-1,0,-1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([-1,-1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(48, 'P n n n :2', transformations)
space_groups[48] = sg
space_groups['P n n n :2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,-1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,-1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(49, 'P c c m', transformations)
space_groups[49] = sg
space_groups['P c c m'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,-1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([-1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([-1,-1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(50, 'P b a n :2', transformations)
space_groups[50] = sg
space_groups['P b a n :2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([-1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([-1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(51, 'P m m a', transformations)
space_groups[51] = sg
space_groups['P m m a'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,-1,-1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([-1,-1,-1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([-1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(52, 'P n n a', transformations)
space_groups[52] = sg
space_groups['P n n a'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([-1,0,-1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([-1,0,-1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(53, 'P m n a', transformations)
space_groups[53] = sg
space_groups['P m n a'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([-1,0,-1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,-1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([-1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(54, 'P c c a', transformations)
space_groups[54] = sg
space_groups['P c c a'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([-1,-1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([-1,-1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(55, 'P b a m', transformations)
space_groups[55] = sg
space_groups['P b a m'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([-1,0,-1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,-1,-1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([-1,-1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(56, 'P c c n', transformations)
space_groups[56] = sg
space_groups['P c c n'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,-1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,-1,-1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,-1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(57, 'P b c m', transformations)
space_groups[57] = sg
space_groups['P b c m'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([-1,-1,-1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([-1,-1,-1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(58, 'P n n m', transformations)
space_groups[58] = sg
space_groups['P n n m'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([-1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,-1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([-1,-1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(59, 'P m m n :2', transformations)
space_groups[59] = sg
space_groups['P m m n :2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([-1,-1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,-1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([-1,-1,-1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(60, 'P b c n', transformations)
space_groups[60] = sg
space_groups['P b c n'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([-1,-1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,-1,-1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([-1,0,-1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(61, 'P b c a', transformations)
space_groups[61] = sg
space_groups['P b c a'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([-1,-1,-1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,-1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([-1,0,-1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(62, 'P n m a', transformations)
space_groups[62] = sg
space_groups['P n m a'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,-1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,-1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,-1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,-1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(63, 'C m c m', transformations)
space_groups[63] = sg
space_groups['C m c m'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([-1,0,-1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([-1,0,-1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,-1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,1,-1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(64, 'C m c a', transformations)
space_groups[64] = sg
space_groups['C m c a'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(65, 'C m m m', transformations)
space_groups[65] = sg
space_groups['C m m m'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,-1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,-1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,-1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,-1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(66, 'C c c m', transformations)
space_groups[66] = sg
space_groups['C c c m'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([-1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([-1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(67, 'C m m a', transformations)
space_groups[67] = sg
space_groups['C m m a'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([-1,0,-1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,-1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([-1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,-1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,-1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(68, 'C c c a :2', transformations)
space_groups[68] = sg
space_groups['C c c a :2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(69, 'F m m m', transformations)
space_groups[69] = sg
space_groups['F m m m'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,4,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([4,1,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([4,4,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,-1,-1])
trans_den = N.array([1,4,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([-1,0,-1])
trans_den = N.array([4,1,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([-1,-1,0])
trans_den = N.array([4,4,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,3,3])
trans_den = N.array([1,4,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,3])
trans_den = N.array([4,2,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,3,1])
trans_den = N.array([4,4,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,4,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([-1,1,1])
trans_den = N.array([4,2,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([-1,1,1])
trans_den = N.array([4,4,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,3])
trans_den = N.array([2,4,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([3,0,3])
trans_den = N.array([4,1,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([3,1,1])
trans_den = N.array([4,4,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,-1,1])
trans_den = N.array([2,4,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([4,1,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,-1,1])
trans_den = N.array([4,4,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,3,1])
trans_den = N.array([2,4,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([3,1,1])
trans_den = N.array([4,2,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([3,3,0])
trans_den = N.array([4,4,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,-1])
trans_den = N.array([2,4,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,-1])
trans_den = N.array([4,2,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([4,4,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(70, 'F d d d :2', transformations)
space_groups[70] = sg
space_groups['F d d d :2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(71, 'I m m m', transformations)
space_groups[71] = sg
space_groups['I m m m'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,-1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,-1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(72, 'I b a m', transformations)
space_groups[72] = sg
space_groups['I b a m'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,-1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([-1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,-1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(73, 'I b c a', transformations)
space_groups[73] = sg
space_groups['I b c a'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,-1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,-1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(74, 'I m m a', transformations)
space_groups[74] = sg
space_groups['I m m a'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(75, 'P 4', transformations)
space_groups[75] = sg
space_groups['P 4'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,3])
trans_den = N.array([1,1,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(76, 'P 41', transformations)
space_groups[76] = sg
space_groups['P 41'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(77, 'P 42', transformations)
space_groups[77] = sg
space_groups['P 42'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,3])
trans_den = N.array([1,1,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(78, 'P 43', transformations)
space_groups[78] = sg
space_groups['P 43'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(79, 'I 4', transformations)
space_groups[79] = sg
space_groups['I 4'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,3])
trans_den = N.array([2,1,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,3])
trans_den = N.array([2,1,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,5])
trans_den = N.array([1,2,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,5])
trans_den = N.array([1,2,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(80, 'I 41', transformations)
space_groups[80] = sg
space_groups['I 41'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(81, 'P -4', transformations)
space_groups[81] = sg
space_groups['P -4'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(82, 'I -4', transformations)
space_groups[82] = sg
space_groups['I -4'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(83, 'P 4/m', transformations)
space_groups[83] = sg
space_groups['P 4/m'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,-1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,-1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(84, 'P 42/m', transformations)
space_groups[84] = sg
space_groups['P 42/m'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([-1,0,0])
trans_den = N.array([2,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,-1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([-1,-1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(85, 'P 4/n :2', transformations)
space_groups[85] = sg
space_groups['P 4/n :2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,-1,-1])
trans_den = N.array([1,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([-1,0,-1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([-1,-1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(86, 'P 42/n :2', transformations)
space_groups[86] = sg
space_groups['P 42/n :2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(87, 'I 4/m', transformations)
space_groups[87] = sg
space_groups['I 4/m'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,3,3])
trans_den = N.array([4,4,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([4,4,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([-1,-3,-3])
trans_den = N.array([4,4,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([-1,-1,-1])
trans_den = N.array([4,4,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,-1,0])
trans_den = N.array([1,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([3,5,5])
trans_den = N.array([4,4,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([3,3,3])
trans_den = N.array([4,4,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,-1,-1])
trans_den = N.array([4,4,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([4,4,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,0,1])
trans_den = N.array([2,1,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(88, 'I 41/a :2', transformations)
space_groups[88] = sg
space_groups['I 41/a :2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,-1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(89, 'P 4 2 2', transformations)
space_groups[89] = sg
space_groups['P 4 2 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,0])
trans_den = N.array([2,2,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,-1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(90, 'P 4 21 2', transformations)
space_groups[90] = sg
space_groups['P 4 21 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,3])
trans_den = N.array([1,1,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,3])
trans_den = N.array([1,1,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,-1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,4])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(91, 'P 41 2 2', transformations)
space_groups[91] = sg
space_groups['P 41 2 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,3])
trans_den = N.array([2,2,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,3])
trans_den = N.array([2,2,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,-1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(92, 'P 41 21 2', transformations)
space_groups[92] = sg
space_groups['P 41 21 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,-1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(93, 'P 42 2 2', transformations)
space_groups[93] = sg
space_groups['P 42 2 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,-1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(94, 'P 42 21 2', transformations)
space_groups[94] = sg
space_groups['P 42 21 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,3])
trans_den = N.array([1,1,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,-1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,3])
trans_den = N.array([1,1,4])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(95, 'P 43 2 2', transformations)
space_groups[95] = sg
space_groups['P 43 2 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,3])
trans_den = N.array([2,2,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,3])
trans_den = N.array([2,2,4])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,-1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,1])
trans_den = N.array([1,1,2])
transformations.append((rot, trans_num, trans_den))
sg = SpaceGroup(96, 'P 43 21 2', transformations)
space_groups[96] = sg
space_groups['P 43 21 2'] = sg
transformations = []
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,-1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([0,0,0])
trans_den = N.array([1,1,1])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,-1,0,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([1,0,0,0,-1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,1,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([-1,0,0,0,-1,0,0,0,1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,1,0,1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = N.array([2,2,2])
transformations.append((rot, trans_num, trans_den))
rot = N.array([0,-1,0,-1,0,0,0,0,-1])
rot.shape = (3, 3)
trans_num = N.array([1,1,1])
trans_den = | N.array([2,2,2]) | numpy.array |
from __future__ import division
import math
import time
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.autograd import Variable
import numpy as np
from skimage.transform import resize
import matplotlib.pyplot as plt
import matplotlib.patches as patches
import visdom
class Visualizer():
"""
封装了visdom的基本操作,但是你仍然可以通过`self.vis.function`
调用原生的visdom接口
"""
def __init__(self, env='default', **kwargs):
import visdom
self.vis = visdom.Visdom(env=env, use_incoming_socket=False, **kwargs)
# 画的第几个数,相当于横座标
# 保存(’loss',23) 即loss的第23个点
self.index = {}
self.log_text = ''
def reinit(self, env='default', **kwargs):
"""
修改visdom的配置
"""
self.vis = visdom.Visdom(env=env,use_incoming_socket=False, **kwargs)
return self
def plot_many(self, d):
"""
一次plot多个
@params d: dict (name,value) i.e. ('loss',0.11)
"""
for k, v in d.items():
self.plot(k, v)
def img_many(self, d):
for k, v in d.items():
self.img(k, v)
def plot(self, name, y):
"""
self.plot('loss',1.00)
"""
x = self.index.get(name, 0)
self.vis.line(Y=np.array([y]), X= | np.array([x]) | numpy.array |
import os
import subprocess
import pickle
import matplotlib
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
import scipy as sc
import pathlib
import threading
import concurrent.futures as cf
from scipy.signal import medfilt
import csv
import tikzplotlib
import encoders_comparison_tool as enc
import video_info as vi
from bj_delta import bj_delta, bj_delta_akima
# Colors in terminal
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKCYAN = '\033[96m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
useage_log_suffix = "_useage.log"
psnr_log_suffix = "-psnr_logfile.txt"
ssim_log_suffix = "-ssim_logfile.txt"
vmaf_log_suffix = "-vmaf_logfile.txt"
videofiles = []
codecs = ["av1", "svtav1", "vp9", "x264", "x265", "vvc"]
codecs_short = {"av1": "AV1", "svtav1": "SVT-AV1", "vp9": "VP9", "x264": "x264", "x265": "x265", "vvc": "VVenC",}
sequences = ["Netflix Aerial yuv420p10le 60fps",
"ShakeNDry yuv420p 30fps",
"SunBath yuv420p10le 50fps",
"Tree Shade yuv420p10le 30fps",
"Sintel2 yuv420p10le 24fps",
]
preset = ["preset"]
top_dir = "/run/media/ondra/video/test2/"
# top_dir = "/run/media/ondra/61597e72-9c9f-4edd-afab-110602521f55/test2/"
graphics_dir = "graphs/"
sequences_short = {
"Netflix Aerial yuv420p10le 60fps": "Aerial",
"ShakeNDry yuv420p 30fps": "ShakeNDry",
"SunBath yuv420p10le 50fps": "SunBath",
"Tree Shade yuv420p10le 30fps": "Tree Shade",
"Sintel2 yuv420p10le 24fps": "Sintel2",
}
series_labels = {
'av1-cpu-used_3-': "AV1 cpu-used 3",
'av1-cpu-used_4-': "AV1 cpu-used 4",
'av1-cpu-used_5-': "AV1 cpu-used 5",
'av1-cpu-used_6-': "AV1 cpu-used 6",
'svtav1-preset_3-': "SVT-AV1 preset 3",
'svtav1-preset_5-': "SVT-AV1 preset 5",
'svtav1-preset_7-': "SVT-AV1 preset 7",
'svtav1-preset_9-': "SVT-AV1 preset 9",
'svtav1-preset_11-': "SVT-AV1 preset 11",
'svtav1-preset_13-': "SVT-AV1 preset 13",
'vp9-rc_0-': "VP9 RC 0",
'vp9-cpu-used_0-': "VP9 cpu-used 0",
'vp9-cpu-used_2-': "VP9 cpu-used 2",
'vp9-cpu-used_4-': "VP9 cpu-used 4",
# 'x264-preset_ultrafast-': "x264 ultrafast",
'x264-preset_fast-': "x264 fast",
'x264-preset_medium-': "x264 medium",
'x264-preset_slow-': "x264 slow",
'x264-preset_veryslow-': "x264 veryslow",
'x264-preset_placebo-': "x264 placebo",
'x265-preset_ultrafast-': "x265 ultrafast",
'x265-preset_fast-': "x265 fast",
'x265-preset_medium-': "x265 medium",
'x265-preset_slow-': "x265 slow",
'x265-preset_veryslow-': "x265 veryslow",
'vvc-preset_faster-': "VVenC faster",
'vvc-preset_fast-': "VVenC fast",
'vvc-preset_medium-': "VVenC medium",
}
psnr_lim = {
"Netflix Aerial yuv420p10le 60fps": (33, 47),
"ShakeNDry yuv420p 30fps": (33, 44),
"Sintel2 yuv420p10le 24fps": (40, 60),
"SunBath yuv420p10le 50fps": (35, 55),
"Tree Shade yuv420p10le 30fps": (35, 45),
}
ssim_lim = {
"Netflix Aerial yuv420p10le 60fps": (0.9, 1),
"ShakeNDry yuv420p 30fps": (0.9, 0.98),
"Sintel2 yuv420p10le 24fps": (0.98, 1),
"SunBath yuv420p10le 50fps": (0.94, 1),
"Tree Shade yuv420p10le 30fps": (0.92, 0.99),
}
msssim_lim = {
"Netflix Aerial yuv420p10le 60fps": (0.9, 1),
"ShakeNDry yuv420p 30fps": (0.92, 1),
"Sintel2 yuv420p10le 24fps": (0.98, 1),
"SunBath yuv420p10le 50fps": (0.94, 1),
"Tree Shade yuv420p10le 30fps": (0.96, 1),
}
vmaf_lim = {
"Netflix Aerial yuv420p10le 60fps": (60, 100),
"ShakeNDry yuv420p 30fps": (70, 100),
"Sintel2 yuv420p10le 24fps": (70, 100),
"SunBath yuv420p10le 50fps": (70, 100),
"Tree Shade yuv420p10le 30fps": (80, 100),
}
bitrate_lim = {
"Netflix Aerial yuv420p10le 60fps": (0, 150),
"ShakeNDry yuv420p 30fps": (0, 200),
"Sintel2 yuv420p10le 24fps": (0, 45),
"SunBath yuv420p10le 50fps": (0, 150),
"Tree Shade yuv420p10le 30fps": (0, 200),
}
bitrate_lim_log = {
"Netflix Aerial yuv420p10le 60fps": (0.1, 1000),
"ShakeNDry yuv420p 30fps": (0.1, 1000),
"SunBath yuv420p10le 50fps": (0.1, 1000),
"Tree Shade yuv420p10le 30fps": (0.1, 1000),
"Sintel2 yuv420p10le 24fps": (0.1, 100),
}
processing_lim = {
"Netflix Aerial yuv420p10le 60fps": (0, 50000),
"ShakeNDry yuv420p 30fps": (0, 8000),
"SunBath yuv420p10le 50fps": (0, 5000),
"Tree Shade yuv420p10le 30fps": (0, 12000),
"Sintel2 yuv420p10le 24fps": (0, 12000),
}
processing_lim_log = {
"Netflix Aerial yuv420p10le 60fps": (1, 1000),
"ShakeNDry yuv420p 30fps": (1, 10000),
"SunBath yuv420p10le 50fps": (1, 1000),
"Tree Shade yuv420p10le 30fps": (1, 1000),
"Sintel2 yuv420p10le 24fps": (1, 1000),
}
cpu_time_lim = {
"Netflix Aerial yuv420p10le 60fps": (0, 200000),
"ShakeNDry yuv420p 30fps": (0, 60000),
"SunBath yuv420p10le 50fps": (0, 35000),
"Tree Shade yuv420p10le 30fps": (0, 70000),
"Sintel2 yuv420p10le 24fps": (0, 70000),
}
cpu_time_lim_log = {
"Netflix Aerial yuv420p10le 60fps": (0.1, 1000),
"ShakeNDry yuv420p 30fps": (0.1, 10000),
"SunBath yuv420p10le 50fps": (0.1, 1000),
"Tree Shade yuv420p10le 30fps": (0.1, 1000),
"Sintel2 yuv420p10le 24fps": (0.1, 1000),
}
cpu_fps_lim = {
"Netflix Aerial yuv420p10le 60fps": (0, 200),
"ShakeNDry yuv420p 30fps": (0, 200),
"SunBath yuv420p10le 50fps": (0, 200),
"Tree Shade yuv420p10le 30fps": (0, 200),
"Sintel2 yuv420p10le 24fps": (0, 200),
}
decode_fps_lim = {
"Netflix Aerial yuv420p10le 60fps": (0, None),
"ShakeNDry yuv420p 30fps": (0, 60),
"SunBath yuv420p10le 50fps": (0, 60),
"Tree Shade yuv420p10le 30fps": (0, 60),
"Sintel2 yuv420p10le 24fps": (0, 60),
}
BJ1_serie = "x264-preset_placebo-"
BD_xname = "avg_bitrate_mb"
BD_ynames = ["psnr_avg", "ssim_avg", "msssim_avg", "vmaf_avg"]
BD_names = []
for n in BD_ynames:
# BD_names.append("bd_" + n)
BD_names.append("bd_rate_" + n)
encode_excluded_states = ["measuring decode"]
speeds_table = {
"placebo": 0,
"slow": 3,
"slower": 2,
"veryslow": 1,
"medium": 4,
"fast": 5,
"faster": 6,
"veryfast": 7,
"superfast": 8,
"ultrafast": 9,
}
binaries = {
"ffprobe": "/usr/bin/ffprobe",
"ffmpeg": "/usr/bin/ffmpeg"
}
vi.set_defaults(binaries)
def video_stream_size(videofile_path):
if videofile_path.endswith(".266"):
return os.path.getsize(videofile_path[0:-4] + ".266") / 1024 #in KiB
log = videofile_path + ".stream_size"
if os.path.exists(log):
with open(log, "r") as f:
s = f.readline()
print("stream size hit!")
return float(s)
result = subprocess.run(
[
"ffmpeg",
"-hide_banner",
"-i", videofile_path,
"-map", "0:v:0",
"-c", "copy",
"-f", "null", "-"
],
capture_output=True,
text=True,
)
try:
size = (result.stderr.rsplit("\n")[-2].rsplit(" ")[0].rsplit(":")[1][0: -2])
s = float(size) # in KiB
with open(log, "w") as f:
f.write(str(s))
return s
except ValueError:
raise ValueError(result.stderr.rstrip("\n"))
def video_stream_length(videofile_path):
if videofile_path.endswith(".266"):
videofile = videofile_path[:-4] + ".mkv"
else:
videofile = videofile_path
log = videofile + ".stream_length"
if os.path.exists(log):
with open(log, "r") as f:
s = f.readline()
print("stream length hit!")
return float(s)
result = vi.video_length_seconds(videofile)
with open(log, "w") as f:
f.write(str(result))
return result
def video_stream_frames(videofile_path):
if videofile_path.endswith(".266"):
videofile = videofile_path[:-4] + ".mkv"
else:
videofile = videofile_path
log = videofile + ".stream_frames"
if os.path.exists(log):
with open(log, "r") as f:
s = f.readline()
print("stream framenum hit!")
return int(s)
result = vi.video_frames(videofile)
with open(log, "w") as f:
f.write(str(result))
return result
def series_label(key, sequence=None):
if sequence is None or sequence in key:
k = series_labels.keys()
for s in (s for s in k if s in key):
return series_labels[s]
raise KeyError
'''
def simple_plot(x, y, xlabel, ylabel, savefile, minxlim=True):
i1, ax1 = plt.subplots()
plt.plot(x, y)
ax1.set(xlabel=xlabel, ylabel=ylabel)
if minxlim:
ax1.set_xlim(left=min(x), right=max(x))
ax1.grid()
plt.savefig(f"{savefile}.svg")
plt.savefig(f"{savefile}.pgf")
tikzplotlib.save(f"{savefile}.tex")
plt.close(i1)
def composite_plot(mxy, mlegend, xlabel, ylabel, savefile, xlim=None, ylim=None):
i1, ax1 = plt.subplots()
i = enc.count()
for m in mxy:
t = zip(*m)
x, y = [list(t) for t in t]
plt.plot(x, y, label=mlegend[next(i)], marker="+")
ax1.set(xlabel=xlabel, ylabel=ylabel)
plt.legend()
if xlim is True:
ax1.set_xlim(left=min(x), right=max(x))
elif xlim is not None:
ax1.set_xlim(left=xlim[0], right=xlim[1])
if ylim is True:
ax1.set_ylim(bottom=min(y), top=max(y))
elif ylim is not None:
ax1.set_ylim(bottom=ylim[0], top=ylim[1])
ax1.grid()
p = os.path.split(savefile)
enc.create_dir(p[0] + '/svg/')
enc.create_dir(p[0] + '/png/')
enc.create_dir(p[0] + '/tex/')
plt.savefig(f"{p[0] + '/svg/' + p[1]}.svg")
plt.savefig(f"{p[0] + '/png/' + p[1]}.png")
tikzplotlib.save(f"{p[0] + '/tex/' + p[1]}.tex")
plt.close(i1)
def composite_plot_smooth(mxy, mlegend, xlabel, ylabel, savefile, xlim=None, ylim=None):
i1, ax1 = plt.subplots()
i = enc.count()
for m in mxy:
t = zip(*m)
x, y = [list(t) for t in t]
c = plt.scatter(x, y, label=mlegend[next(i)], marker="+")
colr = c.get_facecolor()[0]
lx = np.log(x)
p = sc.interpolate.Akima1DInterpolator(lx, y)
x_smooth = np.linspace(min(x), max(x), 1000)
y_smooth = p(np.log(x_smooth))
plt.plot(x_smooth, y_smooth, color=colr)
ax1.set(xlabel=xlabel, ylabel=ylabel)
plt.legend()
if xlim is True:
ax1.set_xlim(left=x.min(), right=x.max())
elif xlim is not None:
ax1.set_xlim(left=xlim[0], right=xlim[1])
if ylim is True:
ax1.set_ylim(bottom=y.min(), top=y.max())
elif ylim is not None:
ax1.set_ylim(bottom=ylim[0], top=ylim[1])
ax1.grid()
p = os.path.split(savefile)
enc.create_dir(p[0] + '/svg/')
enc.create_dir(p[0] + '/png/')
enc.create_dir(p[0] + '/tex/')
plt.savefig(f"{p[0] + '/svg/' + p[1]}.svg")
plt.savefig(f"{p[0] + '/png/' + p[1]}.png")
tikzplotlib.save(f"{p[0] + '/tex/' + p[1]}.tex")
plt.close(i1)
'''
def plot_graphs(data, sequence=None, codec=None):
if sequence is None and codec is None:
out = graphics_dir
elif sequence is None:
out = graphics_dir + codec + "/"
elif codec is None:
out = graphics_dir + sequences_short[sequence] + "/"
else:
out = graphics_dir + sequences_short[sequence] + "/" + codec + "/"
lower_right = 4
d = df_to_plot(data, "avg_bitrate_mb", "psnr_avg")
composite_plot(d, "Bitrate [Mbit/s]", "PSNR (YUV) [dB]", out + "psnr", xlim=bitrate_lim[sequence], ylim=psnr_lim[sequence], legend_loc=lower_right)
composite_plot(d, "Bitrate [Mbit/s]", "PSNR (YUV) [dB]", out + "psnr_log", ylim=psnr_lim[sequence], xlog=True, legend_loc=lower_right)
d = df_to_plot(data, "avg_bitrate_mb", "ssim_avg")
composite_plot(d, "Bitrate [Mbit/s]", "SSIM", out + "ssim", xlim=bitrate_lim[sequence], ylim=ssim_lim[sequence], legend_loc=lower_right)
# composite_plot(d, "Bitrate [Mbit/s]", "SSIM", out + "ssim_log", ylim=ssim_lim[sequence], xlog=True, legend_loc=lower_right)
d = df_to_plot(data, "avg_bitrate_mb", "msssim_avg")
composite_plot(d, "Bitrate [Mbit/s]", "MS-SSIM", out + "msssim", xlim=bitrate_lim[sequence], ylim=msssim_lim[sequence], legend_loc=lower_right)
# composite_plot(d, "Bitrate [Mbit/s]", "MS-SSIM", out + "msssim_log", ylim=msssim_lim[sequence], xlog=True, legend_loc=lower_right)
d = df_to_plot(data, "avg_bitrate_mb", "vmaf_avg")
composite_plot(d, "Bitrate [Mbit/s]", "VMAF", out + "vmaf", xlim=bitrate_lim[sequence], ylim=vmaf_lim[sequence], legend_loc=lower_right)
# composite_plot(d, "Bitrate [Mbit/s]", "VMAF", out + "vmaf_log", ylim=vmaf_lim[sequence], xlog=True, legend_loc=lower_right)
d = df_to_plot(data, "avg_bitrate_mb", "decode_time_fps")
composite_plot(d, "Bitrate [Mbit/s]", "Rychlost dekódování [frame/s]", out + "decode", ylim=(0, None), xlim=bitrate_lim_log[sequence], xlog=True)
d = df_to_plot(data, "avg_bitrate_mb", "total_time_fps")
composite_plot(d, "Bitrate [Mbit/s]", "Procesorový čas [s/frame]", out + "encode", ylim=(0.1, None), xlim=bitrate_lim_log[sequence], xlog=True, ylog=True)
def df_to_plot(data, x_name, y_name):
tables = [t[[x_name, y_name]].rename(columns={x_name: "x", y_name: "y"}).sort_values(by="x") for t in list(data["table"])]
l = list(data["label"])
s = list(data["speed"])
lt = zip(l, tables, s)
for m in lt:
setattr(m[1], "label", m[0])
setattr(m[1], "speed", m[2])
return tables
def df_to_plot2(data, x_name, y_name):
tables = [data[[x_name, y_name]].rename(columns={x_name: "x", y_name: "y"}).loc[data["codec"] == s].sort_values(by="x") for s in codecs]
lt = zip(codecs, tables)
for m in lt:
setattr(m[1], "label", codecs_short[m[0]])
return tables
#def composite_plot(data, xlabel, ylabel, savefile, xlim=None, ylim=None, log_inter=True, xlog=False, ylog=False, smooth=True, xlogscalar=False, ylogscalar=False, legend_loc=None, tikz_before=True):
#i1, ax1 = plt.subplots()
#if not (xlog or ylog):
#tikz_before = False
#if xlog:
#ax1.set_xscale('log')
#ax1.grid(True, which="both")
#if xlogscalar:
#ax1.xaxis.set_major_formatter(matplotlib.ticker.ScalarFormatter())
#else:
#ax1.set_xscale('linear')
#ax1.grid(True)
#if ylog:
#ax1.set_yscale('log')
#ax1.grid(True, which="both")
#if ylogscalar:
#ax1.yaxis.set_major_formatter(matplotlib.ticker.ScalarFormatter())
#else:
#ax1.set_yscale('linear')
#ax1.grid(True)
#for table in data:
#if smooth:
#c = plt.scatter(table.x, table.y, label=table.label, marker="+")
#colr = c.get_facecolor()[0]
#if log_inter:
#lx = np.log(table.x)
#p = sc.interpolate.Akima1DInterpolator(lx, table.y)
#x_smooth = np.logspace(np.log10(min(table.x)), np.log10(max(table.x)), 200)
#else:
#lx = table.x
#p = sc.interpolate.Akima1DInterpolator(lx, table.y)
#x_smooth = np.linspace(min(table.x), max(table.x), 200)
#y_smooth = p(np.log(x_smooth))
#plt.plot(x_smooth, y_smooth, color=colr)
#else:
#plt.plot(table.x, table.y, label=table.label, marker="+")
#ax1.set(xlabel=xlabel, ylabel=ylabel)
#if legend_loc is None:
#ax1.legend()
#else:
#ax1.legend(loc=legend_loc)
#if xlim is True:
#ax1.set_xlim(left=table.x.min(), right=table.x.max())
#elif xlim is not None:
#ax1.set_xlim(left=xlim[0], right=xlim[1])
#if ylim is True:
#ax1.set_ylim(bottom=table.y.min(), top=table.y.max())
#elif ylim is not None:
#ax1.set_ylim(bottom=ylim[0], top=ylim[1])
#p = os.path.split(savefile)
#enc.create_dir(p[0] + '/svg/')
#enc.create_dir(p[0] + '/png/')
#enc.create_dir(p[0] + '/tex/')
#if tikz_before:
#tikzplotlib.save(f"{p[0] + '/tex/' + p[1]}.tex")
#plt.savefig(f"{p[0] + '/svg/' + p[1]}.svg")
#plt.savefig(f"{p[0] + '/png/' + p[1]}.png")
#if not tikz_before:
#tikzplotlib.save(f"{p[0] + '/tex/' + p[1]}.tex")
#plt.close(i1)
def composite_plot(data, xlabel, ylabel, savefile, xlim=None, ylim=None, log_inter=True, xlog=False, ylog=False, smooth=True, xlogscalar=False, ylogscalar=False, legend_loc=None, tikz_before=True):
plt.figure()
plt.axis()
if not (xlog or ylog):
tikz_before = False
if xlog:
plt.xscale('log')
plt.grid(True, which="both")
# if xlogscalar:
# plt.xaxis.set_major_formatter(matplotlib.ticker.ScalarFormatter())
else:
plt.xscale('linear')
plt.grid(True)
if ylog:
plt.yscale('log')
plt.grid(True, which="both")
# if ylogscalar:
# plt.yaxis.set_major_formatter(matplotlib.ticker.ScalarFormatter())
else:
plt.yscale('linear')
plt.grid(True)
for table in data:
if smooth:
c = plt.scatter(table.x, table.y, label=table.label, marker="+")
colr = c.get_facecolor()[0]
if log_inter:
lx = np.log(table.x)
p = sc.interpolate.Akima1DInterpolator(lx, table.y)
x_smooth = np.logspace(np.log10(min(table.x)), np.log10(max(table.x)), 200)
else:
lx = table.x
p = sc.interpolate.Akima1DInterpolator(lx, table.y)
x_smooth = np.linspace(min(table.x), max(table.x), 200)
y_smooth = p(np.log(x_smooth))
plt.plot(x_smooth, y_smooth, color=colr)
else:
plt.plot(table.x, table.y, label=table.label, marker="+")
plt.xlabel(xlabel)
plt.ylabel(ylabel)
plt.legend(loc=legend_loc)
if xlim is True:
plt.xlim(left=table.x.min(), right=table.x.max())
elif xlim is not None:
plt.xlim(left=xlim[0], right=xlim[1])
if ylim is True:
plt.ylim(bottom=table.y.min(), top=table.y.max())
elif ylim is not None:
plt.ylim(bottom=ylim[0], top=ylim[1])
p = os.path.split(savefile)
enc.create_dir(p[0] + '/svg/')
enc.create_dir(p[0] + '/png/')
enc.create_dir(p[0] + '/tex/')
if tikz_before:
tikzplotlib.save(f"{p[0] + '/tex/' + p[1]}.tex")
plt.savefig(f"{p[0] + '/svg/' + p[1]}.svg")
plt.savefig(f"{p[0] + '/png/' + p[1]}.png")
if not tikz_before:
tikzplotlib.save(f"{p[0] + '/tex/' + p[1]}.tex")
plt.close()
def df_to_latex_table(values, save_path):
pass
def calc_bj(mxy_o, mlegend_o, bd_metric_legend, bd_rate_legend):
mxy = mxy_o.copy()
mlegend = mlegend_o.copy()
xy1 = mxy[mlegend.index(BJ1_serie)]
t1 = zip(*xy1)
x1, y1 = [list(t1) for t1 in t1]
mxy.remove(xy1)
mlegend.remove(BJ1_serie)
i = enc.count()
for m in mxy:
t = zip(*m)
x, y = [list(t) for t in t]
bd_metric = bj_delta(x1, y1, x, y, mode=0)
bd_rate = bj_delta(x1, y1, x, y, mode=1)
l = mlegend[next(i)]
print(f"{l}: BD-{bd_metric_legend}: {bd_metric}%")
print(f"{l}: BD-{bd_rate_legend}: {bd_rate}%")
def formatter1(x):
s = ('%1.2f' % x).replace(".",",") + "\,\%"
return s
def formatter2(x):
s = ('%1.2f' % x).replace(".",",") + "\%"
if x > 0:
s = "\cellcolor{red!25}" + s
elif x < 0:
s = "\cellcolor{green!25}" + s
return s
def calc_bj_cross_to_table(mxy_o, mlegend_o, bd_metric_legend, bd_rate_legend):
table_metric = pd.DataFrame(np.zeros((len(mlegend_o), len(mlegend_o))), columns=mlegend_o, index=mlegend_o)
table_rate = pd.DataFrame(np.zeros((len(mlegend_o), len(mlegend_o))), columns=mlegend_o, index=mlegend_o)
for mleg in mlegend_o:
mxy = mxy_o.copy()
mlegend = mlegend_o.copy()
xy1 = mxy[mlegend.index(mleg)]
t1 = zip(*xy1)
x1, y1 = [list(t1) for t1 in t1]
mxy.remove(xy1)
mlegend.remove(mleg)
i = enc.count()
for m in mxy:
t = zip(*m)
x, y = [list(t) for t in t]
bd_metric = bj_delta(x1, y1, x, y, mode=0)
bd_rate = bj_delta(x1, y1, x, y, mode=1)
l = mlegend[next(i)]
table_metric.loc[l, mleg] = bd_metric
table_rate.loc[l, mleg] = bd_rate
# print(table_metric.to_latex(float_format="%.2f", decimal=","))
# print(table_rate.to_latex(float_format="%.2f"))
return table_metric, table_rate
'''
def calc_bj_akima(dftable, x_name, y_name, bd_metric_legend, bd_rate_legend):
xy1 = mxy[mlegend.index(BJ1_serie)]
t1 = zip(*xy1)
x1, y1 = [list(t1) for t1 in t1]
mxy.remove(xy1)
mlegend.remove(BJ1_serie)
i = enc.count()
for m in mxy:
t = zip(*m)
x, y = [list(t) for t in t]
bd_metric = bj_delta_akima(x1, y1, x, y, mode=0)
bd_rate = bj_delta_akima(x1, y1, x, y, mode=1)
l = mlegend[next(i)]
print(f"{l}: BD-{bd_metric_legend}: {bd_metric}%")
print(f"{l}: BD-{bd_rate_legend}: {bd_rate}%")
'''
def calc_bj_akima(data, x_name, y_name, bd_metric_legend, bd_rate_legend):
df = data.copy()
for t in df.itertuples():
t.table.rename(columns={x_name: "x", y_name: "y"}).sort_values(by="x")
df
bd_metric = bj_delta_akima(x1, y1, x, y, mode=0)
bd_rate = bj_delta_akima(x1, y1, x, y, mode=1)
def read_table_kcolv(logpath):
with open(logpath, "r") as f:
firstline = next(f).rstrip(" \n")
columns = []
for x in firstline.rsplit(" "):
columns.append(x.rsplit(":")[0])
r = range(len(columns))
table = pd.read_table(logpath, names=columns, usecols=list(r), sep=" ",
converters={k: lambda x: (x.rsplit(":")[1]) for k in r})
return table.apply(pd.to_numeric)
class PSNR_values:
def __init__(self, logpath):
self.logpath = logpath
table = read_table_kcolv(self.logpath)
self.n = table.n
self.mse_avg = table.mse_avg
self.mse_y = table.mse_y
self.mse_u = table.mse_u
self.mse_v = table.mse_v
self.psnr_avg = table.psnr_avg
self.psnr_y = table.psnr_y
self.psnr_u = table.psnr_u
self.psnr_v = table.psnr_v
self.mse_avg_avg = np.average(self.mse_avg)
self.mse_y_avg = np.average(self.mse_y)
self.mse_u_avg = np.average(self.mse_u)
self.mse_v_avg = np.average(self.mse_v)
self.psnr_avg_avg = np.average(self.psnr_avg)
self.psnr_y_avg = np.average(self.psnr_y)
self.psnr_u_avg = np.average(self.psnr_u)
self.psnr_v_avg = np.average(self.psnr_v)
class SSIM_values:
def __init__(self, logpath):
self.logpath = logpath
names = ("n", "Y", "U", "V", "All", "unnorm")
table = pd.read_table(self.logpath, names=names, sep=" ",
converters={k: lambda x: (x.rsplit(":")[1]) for k in range(5)})
table.unnorm = table.unnorm.str.slice(start=1, stop=-1)
table = table.apply(pd.to_numeric)
self.n = table.n
self.Y = table.Y
self.U = table.U
self.V = table.V
self.All = table.All
self.unnorm = table.unnorm # unnorm = 10*log10(1-All)
self.Y_avg = np.average(self.Y)
self.U_avg = np.average(self.U)
self.V_avg = np.average(self.V)
self.All_avg = np.average(self.All)
self.unnorm_avg = np.average(self.unnorm)
class VMAF_values:
def __init__(self, logpath):
self.logpath = logpath
table = pd.read_table(logpath, sep=",")
table = table.loc[:, ~table.columns.str.contains('^Unnamed')]
self.table = table
self.vmaf_avg = table.vmaf.mean()
class Useage_values:
def __init__(self, logpath):
self.logpath = logpath
with open(logpath, "r") as log:
firstline = next(log)
self.row_names = firstline.rsplit(",")[0:-1]
table = pd.read_csv(self.logpath)
self.table = table
self.state_names = list(table.state.unique())
total_time = 0
total_cpu_time = 0
for state in [x for x in self.state_names if x not in encode_excluded_states]:
for row in self.row_names:
if row == "state":
pass
else:
arr = np.array(table[row][table.index[table['state'] == state]])
setattr(self, state + "_" + row, arr)
cpu_time_user = getattr(self, state + "_cpu_time_user")
cpu_time_user = np.append(np.array([0]), cpu_time_user)
cpu_time_system = getattr(self, state + "_cpu_time_system")
cpu_time_system = np.append(np.array([0]), cpu_time_system)
cpu_time_total = cpu_time_user + cpu_time_system
setattr(self, state + "_cpu_time_total", cpu_time_total)
cpu_time_diff = np.ediff1d(cpu_time_total)
time = np.append(np.array([0]), getattr(self, state + "_time"))
time_diff = np.ediff1d(time)
cpu_percent_calc = cpu_time_diff / time_diff
setattr(self, state + "_cpu_percent_calc", cpu_percent_calc)
total_time += time[-1]
total_cpu_time += cpu_time_total[-1]
self.total_time = total_time
self.total_cpu_time = total_cpu_time
cpu_time_diff = np.ediff1d(np.append( | np.array([0]) | numpy.array |
"""Implementation of unuspervised and supervised Fourier feature selection algorithms
"""
from sklearn.base import BaseEstimator, ClassifierMixin
import numpy as np
from itertools import chain, combinations
import sys
import compute_fourier_coeff_supervised
import compute_norms_features_unsupervised
import math
# Generates the set of all subsets with the size of each subset as maximum k
def powerset(iterable, k):
"powerset([1,2,3]) --> () (1,) (2,) (3,) (1,2) (1,3) (2,3) (1,2,3)"
s = list(iterable)
return chain.from_iterable(combinations(s, r) for r in range(1, k + 1))
class OptionsUnsupervisedFourierFS:
def __init__(self, max_depth, cluster_sizes, selection_thresholds, norm_epsilon, shuffle, preranking):
self.max_depth = max_depth
self.cluster_sizes = cluster_sizes
self.selection_thresholds = selection_thresholds # same as n_redundant_threshold
self.norm_epsilon = norm_epsilon
self.shuffle = shuffle
self.preranking = preranking
def UnsupervisedFourierFS_helper(X_nmlzd, depth, input_features, options):
X_nmlzd_depth = X_nmlzd[:, input_features]
d = len(input_features)
n_clusters = math.ceil(d / options.cluster_sizes[depth])
if n_clusters == 0:
print("Error : n_clusters is zero!")
sys.exit(2)
clusters = np.linspace(0, d, n_clusters + 1, dtype=np.int)
nonredundant_Features = []
for i in range(1, len(clusters)):
features_cluster = np.arange(clusters[i - 1], clusters[i])
X_cluster = X_nmlzd_depth[:, features_cluster]
sel_feats_norm2 = compute_norms_features_unsupervised.estimate_A(X_cluster,
depth+1,
options.norm_epsilon[depth])
# import pdb; pdb.set_trace()
sel_feats_norm2 = np.array(sel_feats_norm2)
sel_feats_norm2 = sel_feats_norm2 ** 2
Sorted_Feature_Indx = (-sel_feats_norm2).argsort()
sel_feats_norm2_sorted = sel_feats_norm2[Sorted_Feature_Indx]
cum_orthogonalization = np.cumsum(sel_feats_norm2_sorted) / sum(sel_feats_norm2_sorted)
nonredundant_Orth = sum(cum_orthogonalization < options.selection_thresholds[depth])
sel_feats_indices_local = Sorted_Feature_Indx[:nonredundant_Orth]
nonredundant_Features.extend(features_cluster[sel_feats_indices_local])
return nonredundant_Features
def UnsupervisedFourierFS(X, options):
'''
The main function for unsupervised Fourier feature selection algorithm (UFFS)
Arguments:
X: the input data with columns as features and rows correspond to data samples
mean_emp: vector of empirical mean of each features
std_emp: vector of empirical standard deviation of each features
output_all_depth: if it's set to false, only output UFFS selected features for t=3
Otherwise, output selected features for t=1, t=2, and t=3
'''
# mask = (np.std(X, ddof=1, axis=0) > 1e-5)
# orig_features = np.arange(X.shape[1])
# valid_features = orig_features[mask]
# X = X[:, valid_features]
mean_emp = np.mean(X, axis=0)
std_emp = | np.std(X, ddof=1, axis=0) | numpy.std |
import casadi as ca
import numpy as np
import sys
sys.path.insert(0, '../../../../../python/pyecca')
import matplotlib.pyplot as plt
import pyecca.lie.so3 as so3
from pyecca.util import rk4
def u_to_fin(u):
ail = u[1]
elv = u[2]
rdr = u[3]
# top, left, down right
return ca.vertcat(ail - rdr, ail - elv, ail + rdr, ail + elv)
def rocket_equations(jit=True):
x = ca.SX.sym('x', 14)
u = ca.SX.sym('u', 4)
p = ca.SX.sym('p', 16)
t = ca.SX.sym('t')
dt = ca.SX.sym('dt')
# State: x
# body frame: Forward, Right, Down
omega_b = x[0:3] # inertial angular velocity expressed in body frame
r_nb = x[3:7] # modified rodrigues parameters
v_b = x[7:10] # inertial velocity expressed in body components
p_n = x[10:13] # positon in nav frame
m_fuel = x[13] # mass
# Input: u
m_dot = ca.if_else(m_fuel > 0, u[0], 0)
fin = u_to_fin(u)
# Parameters: p
g = p[0] # gravity
Jx = p[1] # moment of inertia
Jy = p[2]
Jz = p[3]
Jxz = p[4]
ve = p[5]
l_fin = p[6]
w_fin = p[7]
CL_alpha = p[8]
CL0 = p[9]
CD0 = p[10]
K = p[11]
s_fin = p[12]
rho = p[13]
m_empty = p[14]
l_motor = p[15]
# Calculations
m = m_empty + m_fuel
J_b = ca.SX.zeros(3, 3)
J_b[0, 0] = Jx + m_fuel*l_motor**2
J_b[1, 1] = Jy + m_fuel*l_motor**2
J_b[2, 2] = Jz
J_b[0, 2] = J_b[2, 0] = Jxz
C_nb = so3.Dcm.from_mrp(r_nb)
g_n = ca.vertcat(0, 0, g)
v_n = ca.mtimes(C_nb, v_b)
# aerodynamics
VT = ca.norm_2(v_b)
q = 0.5*rho*ca.dot(v_b, v_b)
fins = {
'top': {
'fwd': [1, 0, 0],
'up': [0, 1, 0],
'angle': fin[0]
},
'left': {
'fwd': [1, 0, 0],
'up': [0, 0, -1],
'angle': fin[1]
},
'down': {
'fwd': [1, 0, 0],
'up': [0, -1, 0],
'angle': fin[2]
},
'right': {
'fwd': [1, 0, 0],
'up': [0, 0, 1],
'angle': fin[3]
},
}
rel_wind_dir = v_b/VT
# build fin lift/drag forces
vel_tol = 1e-3
FA_b = ca.vertcat(0, 0, 0)
MA_b = ca.vertcat(0, 0, 0)
for key, data in fins.items():
fwd = data['fwd']
up = data['up']
angle = data['angle']
U = ca.dot(fwd, v_b)
W = ca.dot(up, v_b)
side = ca.cross(fwd, up)
alpha = ca.if_else(ca.fabs(U) > vel_tol, -ca.atan(W/U), 0)
perp_wind_dir = ca.cross(side, rel_wind_dir)
norm_perp = ca.norm_2(perp_wind_dir)
perp_wind_dir = ca.if_else(ca.fabs(norm_perp) > vel_tol,
perp_wind_dir/norm_perp, up)
CL = CL0 + CL_alpha*(alpha + angle)
CD = CD0 + K*(CL - CL0)**2
# model stall as no lift if above 23 deg.
L = ca.if_else(ca.fabs(alpha)<0.4, CL*q*s_fin, 0)
D = CD*q*s_fin
FAi_b = L*perp_wind_dir - D*rel_wind_dir
FA_b += FAi_b
MA_b += ca.cross(-l_fin*fwd - w_fin*side, FAi_b)
FA_b = ca.if_else(ca.fabs(VT) > vel_tol, FA_b, ca.SX.zeros(3))
MA_b = ca.if_else(ca.fabs(VT) > vel_tol, MA_b, ca.SX.zeros(3))
# propulsion
FP_b = ca.vertcat(m_dot*ve, 0, 0)
# force and momental total
F_b = FA_b + FP_b + ca.mtimes(C_nb.T, m*g_n)
M_b = MA_b
force_moment = ca.Function(
'force_moment', [x, u, p], [F_b, M_b], ['x', 'u', 'p'], ['F_b', 'M_b'])
# right hand side
rhs = ca.Function('rhs', [x, u, p], [ca.vertcat(
ca.mtimes(ca.inv(J_b), M_b - ca.cross(omega_b, ca.mtimes(J_b, omega_b))),
so3.Mrp.kinematics(r_nb, omega_b),
F_b/m - ca.cross(omega_b, v_b),
ca.mtimes(C_nb, v_b), -m_dot)], ['x', 'u', 'p'], ['rhs'], {'jit': jit})
# prediction
t0 = ca.SX.sym('t0')
h = ca.SX.sym('h')
x0 = ca.SX.sym('x', 14)
x1 = rk4(lambda t, x: rhs(x, u, p), t0, x0, h)
x1[3:7] = so3.Mrp.shadow_if_necessary(x1[3:7])
predict = ca.Function('predict', [x0, u, p, t0, h], [x1], {'jit': jit})
def schedule(t, start, ty_pairs):
val = start
for ti, yi in ty_pairs:
val = ca.if_else(t > ti, yi, val)
return val
# reference trajectory
pitch_d = 1.0
euler = so3.Euler.from_mrp(r_nb) # roll, pitch, yaw
pitch = euler[1]
# control
u_control = ca.SX.zeros(4)
# these controls are just test controls to make sure the fins are working
u_control[0] = 0.1 # mass flow rate
u_control[1] = 0
u_control[2] = (pitch - 1)
u_control[3] = 0
control = ca.Function('control', [x, p, t, dt], [u_control],
['x', 'p', 't', 'dt'], ['u'])
# initialize
pitch_deg = ca.SX.sym('pitch_deg')
omega0_b = ca.vertcat(0, 0, 0)
r0_nb = so3.Mrp.from_euler(ca.vertcat(0, pitch_deg*ca.pi/180, 0))
v0_b = ca.vertcat(0, 0, 0)
p0_n = ca.vertcat(0, 0, 0)
m0_fuel = 0.8
# x: omega_b, r_nb, v_b, p_n, m_fuel
x0 = ca.vertcat(omega0_b, r0_nb, v0_b, p0_n, m0_fuel)
# g, Jx, Jy, Jz, Jxz, ve, l_fin, w_fin, CL_alpha, CL0, CD0, K, s, rho, m_emptpy, l_motor
p0 = [9.8, 0.05, 1.0, 1.0, 0.0, 350, 1.0, 0.05, 2*np.pi, 0, 0.01, 0.01, 0.05, 1.225, 0.2, 1.0]
initialize = ca.Function('initialize', [pitch_deg], [x0, p0])
return {
'rhs': rhs,
'predict': predict,
'control': control,
'initialize': initialize,
'force_moment': force_moment,
'x': x,
'u': u,
'p': p
}
return rhs, x, u, p
def analyze_data(data):
plt.figure(figsize=(20, 20))
plt.subplot(331)
plt.title('fuel')
plt.plot(data['t'], data['x'][:, 13])
plt.xlabel('t, sec')
plt.ylabel('mass, kg')
plt.grid()
plt.subplot(332)
#plt.title('velocity')
plt.plot(data['t'], data['x'][:, 7], label='x')
plt.plot(data['t'], data['x'][:, 8], label='y')
plt.plot(data['t'], data['x'][:, 9], label='z')
plt.xlabel('t, sec')
plt.ylabel('body velocity, m/s')
plt.grid()
plt.legend()
plt.subplot(333)
euler = np.array(
[np.array(ca.DM(so3.Euler.from_mrp(x))).reshape(-1) for x in data['x'][:, 3:7]])
plt.plot(data['t'], np.rad2deg(euler[:, 0]), label='roll')
plt.plot(data['t'], np.rad2deg(euler[:, 1]), label='pitch')
plt.plot(data['t'], np.rad2deg(euler[:, 2]), label='yaw')
plt.legend()
plt.grid()
plt.xlabel('t, sec')
plt.ylabel('euler angles, deg')
#plt.title('euler')
plt.subplot(334)
#plt.title('angular velocity')
plt.plot(data['t'], data['x'][:, 0], label='x')
plt.plot(data['t'], data['x'][:, 1], label='y')
plt.plot(data['t'], data['x'][:, 2], label='z')
plt.xlabel('t, sec')
plt.ylabel('angular velocity, rad/s')
plt.grid()
plt.legend()
plt.subplot(335)
#plt.title('trajectory [side]')
plt.plot(data['x'][:, 10], -data['x'][:, 12])
plt.xlabel('North, m')
plt.ylabel('Altitude, m')
plt.axis('equal')
plt.grid()
plt.subplot(336)
#plt.title('trajectory [top]')
plt.plot(data['x'][:, 11], data['x'][:, 10])
plt.xlabel('East, m')
plt.ylabel('North, m')
plt.axis('equal')
plt.grid()
plt.subplot(337)
#plt.title('control input')
plt.plot(data['t'], data['u'][:, 0], label='mdot')
plt.plot(data['t'], data['u'][:, 1], label='aileron')
plt.plot(data['t'], data['u'][:, 2], label='elevator')
plt.plot(data['t'], data['u'][:, 3], label='rudder')
plt.xlabel('t, sec')
plt.ylabel('control')
plt.legend()
plt.grid()
def simulate(rocket, x0, p0, dt=0.005, t0=0, tf=5):
"""
An integrator using a fixed step runge-kutta approach.
"""
x = x0
u = rocket['control'](x0, p0, t0, dt)
data = {
't': [],
'x': [],
'u': []
}
for t in | np.arange(t0, tf, dt) | numpy.arange |
from csbdeep.data import PadAndCropResizer, PercentileNormalizer, NoResizer
from csbdeep.internals.predict import Progress, total_n_tiles, tile_iterator_1d, to_tensor, from_tensor
from csbdeep.models import CARE
from csbdeep.utils import _raise, axes_check_and_normalize, axes_dict
import warnings
import numpy as np
import tensorflow as tf
class CryoCARE(CARE):
def train(self, train_dataset, val_dataset, epochs=None, steps_per_epoch=None):
"""Train the neural network with the given data.
Parameters
----------
X : :class:`numpy.ndarray`
Array of source images.
Y : :class:`numpy.ndarray`
Array of target images.
validation_data : tuple(:class:`numpy.ndarray`, :class:`numpy.ndarray`)
Tuple of arrays for source and target validation images.
epochs : int
Optional argument to use instead of the value from ``config``.
steps_per_epoch : int
Optional argument to use instead of the value from ``config``.
Returns
-------
``History`` object
See `Keras training history <https://keras.io/models/model/#fit>`_.
"""
axes = axes_check_and_normalize('S' + self.config.axes, len(train_dataset.element_spec[0].shape) + 1)
ax = axes_dict(axes)
train_shape = (1,) + train_dataset.element_spec[0].shape
for a, div_by in zip(axes, self._axes_div_by(axes)):
n = train_shape[ax[a]]
print(ax[a], n)
if n % div_by != 0:
raise ValueError(
"training images must be evenly divisible by %d along axis %s"
" (which has incompatible size %d)" % (div_by, a, n)
)
if epochs is None:
epochs = self.config.train_epochs
if steps_per_epoch is None:
steps_per_epoch = self.config.train_steps_per_epoch
if not self._model_prepared:
self.prepare_for_training()
history = self.keras_model.fit(train_dataset.batch(self.config.train_batch_size),
validation_data=val_dataset.batch(self.config.train_batch_size),
epochs=epochs, steps_per_epoch=steps_per_epoch,
callbacks=self.callbacks, verbose=1)
if self.basedir is not None:
self.keras_model.save_weights(str(self.logdir / 'weights_last.h5'))
if self.config.train_checkpoint is not None:
print()
self._find_and_load_weights(self.config.train_checkpoint)
try:
# remove temporary weights
(self.logdir / 'weights_now.h5').unlink()
except FileNotFoundError:
pass
return history
def predict(self, even, odd, output, axes, normalizer=PercentileNormalizer(), resizer=PadAndCropResizer(), mean=0,
std=1, n_tiles=None):
"""Apply neural network to raw image to predict restored image.
Parameters
----------
img : :class:`numpy.ndarray`
Raw input image
axes : str
Axes of the input ``img``.
normalizer : :class:`csbdeep.data.Normalizer` or None
Normalization of input image before prediction and (potentially) transformation back after prediction.
resizer : :class:`csbdeep.data.Resizer` or None
If necessary, input image is resized to enable neural network prediction and result is (possibly)
resized to yield original image size.
n_tiles : iterable or None
Out of memory (OOM) errors can occur if the input image is too large.
To avoid this problem, the input image is broken up into (overlapping) tiles
that can then be processed independently and re-assembled to yield the restored image.
This parameter denotes a tuple of the number of tiles for every image axis.
Note that if the number of tiles is too low, it is adaptively increased until
OOM errors are avoided, albeit at the expense of runtime.
A value of ``None`` denotes that no tiling should initially be used.
Returns
-------
:class:`numpy.ndarray`
Returns the restored image. If the model is probabilistic, this denotes the `mean` parameter of
the predicted per-pixel Laplace distributions (i.e., the expected restored image).
Axes semantics are the same as in the input image. Only if the output is multi-channel and
the input image didn't have a channel axis, then output channels are appended at the end.
"""
self._predict_mean_and_scale(self._crop(even), self._crop(odd), self._crop(output), axes, normalizer, resizer=NoResizer(), mean=mean, std=std,
n_tiles=n_tiles)
def _crop(self, data):
div_by = self._axes_div_by('XYZ')
data_shape = data.shape
slices = ()
for i in range(3):
if data_shape[i] % div_by[i] == 0:
slices += (slice(None),)
else:
slices += (slice(0, -(data_shape[i]%div_by[i])),)
return data[slices]
def _predict_mean_and_scale(self, even, odd, output, axes, normalizer, resizer, mean, std, n_tiles=None):
"""Apply neural network to raw image to predict restored image.
See :func:`predict` for parameter explanations.
Returns
-------
tuple(:class:`numpy.ndarray`, :class:`numpy.ndarray` or None)
If model is probabilistic, returns a tuple `(mean, scale)` that defines the parameters
of per-pixel Laplace distributions. Otherwise, returns the restored image via a tuple `(restored,None)`
"""
print(even.shape)
normalizer, resizer = self._check_normalizer_resizer(normalizer, resizer)
# axes = axes_check_and_normalize(axes,img.ndim)
# different kinds of axes
# -> typical case: net_axes_in = net_axes_out, img_axes_in = img_axes_out
img_axes_in = axes_check_and_normalize(axes, even.ndim)
net_axes_in = self.config.axes
net_axes_out = axes_check_and_normalize(self._axes_out)
set(net_axes_out).issubset(set(net_axes_in)) or _raise(ValueError("different kinds of output than input axes"))
net_axes_lost = set(net_axes_in).difference(set(net_axes_out))
img_axes_out = ''.join(a for a in img_axes_in if a not in net_axes_lost)
# print(' -> '.join((img_axes_in, net_axes_in, net_axes_out, img_axes_out)))
tiling_axes = net_axes_out.replace('C', '') # axes eligible for tiling
_permute_axes = self._make_permute_axes(img_axes_in, net_axes_in, net_axes_out, img_axes_out)
# _permute_axes: (img_axes_in -> net_axes_in), undo: (net_axes_out -> img_axes_out)
even = _permute_axes(even)
odd = _permute_axes(odd)
# x has net_axes_in semantics
x_tiling_axis = tuple(axes_dict(net_axes_in)[a] for a in tiling_axes) # numerical axis ids for x
channel_in = axes_dict(net_axes_in)['C']
channel_out = axes_dict(net_axes_out)['C']
net_axes_in_div_by = self._axes_div_by(net_axes_in)
net_axes_in_overlaps = self._axes_tile_overlap(net_axes_in)
self.config.n_channel_in == even.shape[channel_in] or _raise(ValueError())
# TODO: refactor tiling stuff to make code more readable
def _total_n_tiles(n_tiles):
n_block_overlaps = [int(np.ceil(1. * tile_overlap / block_size)) for tile_overlap, block_size in
zip(net_axes_in_overlaps, net_axes_in_div_by)]
return total_n_tiles(even, n_tiles=n_tiles, block_sizes=net_axes_in_div_by,
n_block_overlaps=n_block_overlaps, guarantee='size')
_permute_axes_n_tiles = self._make_permute_axes(img_axes_in, net_axes_in)
# _permute_axes_n_tiles: (img_axes_in <-> net_axes_in) to convert n_tiles between img and net axes
def _permute_n_tiles(n, undo=False):
# hack: move tiling axis around in the same way as the image was permuted by creating an array
return _permute_axes_n_tiles(np.empty(n, np.bool), undo=undo).shape
# to support old api: set scalar n_tiles value for the largest tiling axis
if np.isscalar(n_tiles) and int(n_tiles) == n_tiles and 1 <= n_tiles:
largest_tiling_axis = [i for i in np.argsort(even.shape) if i in x_tiling_axis][-1]
_n_tiles = [n_tiles if i == largest_tiling_axis else 1 for i in range(x.ndim)]
n_tiles = _permute_n_tiles(_n_tiles, undo=True)
warnings.warn("n_tiles should be a tuple with an entry for each image axis")
print("Changing n_tiles to %s" % str(n_tiles))
if n_tiles is None:
n_tiles = [1] * even.ndim
try:
n_tiles = tuple(n_tiles)
even.ndim == len(n_tiles) or _raise(TypeError())
except TypeError:
raise ValueError("n_tiles must be an iterable of length %d" % even.ndim)
all(np.isscalar(t) and 1 <= t and int(t) == t for t in n_tiles) or _raise(
ValueError("all values of n_tiles must be integer values >= 1"))
n_tiles = tuple(map(int, n_tiles))
n_tiles = _permute_n_tiles(n_tiles)
(all(n_tiles[i] == 1 for i in range(even.ndim) if i not in x_tiling_axis) or
_raise(ValueError("entry of n_tiles > 1 only allowed for axes '%s'" % tiling_axes)))
# n_tiles_limited = self._limit_tiling(x.shape,n_tiles,net_axes_in_div_by)
# if any(np.array(n_tiles) != np.array(n_tiles_limited)):
# print("Limiting n_tiles to %s" % str(_permute_n_tiles(n_tiles_limited,undo=True)))
# n_tiles = n_tiles_limited
n_tiles = list(n_tiles)
# normalize & resize
even = resizer.before(even, net_axes_in, net_axes_in_div_by)
odd = resizer.before(odd, net_axes_in, net_axes_in_div_by)
done = False
progress = Progress(_total_n_tiles(n_tiles), 1)
c = 0
while not done:
try:
# raise tf.errors.ResourceExhaustedError(None,None,None) # tmp
pred = predict_tiled(self.keras_model, even, odd, output, [4 * (slice(None),)], 4 * (slice(None),),
mean=mean, std=std,
axes_in=net_axes_in, axes_out=net_axes_out,
n_tiles=n_tiles, block_sizes=net_axes_in_div_by,
tile_overlaps=net_axes_in_overlaps, pbar=progress)
output = pred
# x has net_axes_out semantics
done = True
progress.close()
except tf.errors.ResourceExhaustedError:
# TODO: how to test this code?
# n_tiles_prev = list(n_tiles) # make a copy
tile_sizes_approx = | np.array(even.shape) | numpy.array |
from typing import List
import cv2
import torch
from albumentations import BasicTransform
from torch.nn import Module
import numpy as np
from pietoolbelt.tta import AbstractTTA
from pietoolbelt.viz import ColormapVisualizer
class SegmentationInference:
def __init__(self, model: Module):
self._model = model
self._transform = None
self._target_transform = None
self._tta = None
self._threshold = 0.5
self._vis = ColormapVisualizer([0.5, 0.5])
self._device = None
def set_device(self, device: str) -> 'SegmentationInference':
self._model = self._model.to(device)
self._device = device
return self
def set_data_transform(self, transform: BasicTransform) -> 'SegmentationInference':
self._transform = transform
return self
def set_target_transform(self, transform: BasicTransform) -> 'SegmentationInference':
self._target_transform = transform
return self
def set_tta(self, tta: List[AbstractTTA]) -> 'SegmentationInference':
self._tta = tta
return self
def _process_imag(self, image) -> np.ndarray:
data = np.swapaxes(image, 0, -1).astype(np.float32) / 128 - 1
data = torch.from_numpy( | np.expand_dims(data, axis=0) | numpy.expand_dims |
import numpy as np
def istri_1(ppos, tri1):
"""
ISTRI-1 TRUE if (PPOS, TRIA) is 1-simplex triangulation.
"""
okay = True
#--------------------------------------- some simple checks!
if (not isinstance(ppos, np.ndarray)):
raise Exception("Invalid type: PPOS.")
if (not isinstance(tri1, np.ndarray)):
raise Exception("Invalid type: TRIA.")
if (ppos.ndim != +2):
raise Exception("Invalid PPOS ndims.")
if (ppos.shape[1] < +2):
raise Exception("Invalid PPOS shape.")
nump = ppos.shape[0]
if (tri1.ndim != +2):
raise Exception("Invalid TRIA ndims.")
if (tri1.shape[1] < +2):
raise Exception("Invalid TRIA shape.")
if (np.min(tri1[:, 0:1]) < +0 or
np.max(tri1[:, 0:1]) >= nump):
raise Exception("Invalid TRIA index.")
return okay
def istri_2(ppos, tri2):
"""
ISTRI-2 TRUE if (PPOS, TRIA) is 2-simplex triangulation.
"""
okay = True
#--------------------------------------- some simple checks!
if (not isinstance(ppos, np.ndarray)):
raise Exception("Invalid type: PPOS.")
if (not isinstance(tri2, np.ndarray)):
raise Exception("Invalid type: TRIA.")
if (ppos.ndim != +2):
raise Exception("Invalid PPOS ndims.")
if (ppos.shape[1] < +2):
raise Exception("Invalid PPOS shape.")
nump = ppos.shape[0]
if (tri2.ndim != +2):
raise Exception("Invalid TRIA ndims.")
if (tri2.shape[1] < +3):
raise Exception("Invalid TRIA shape.")
if ( | np.min(tri2[:, 0:2]) | numpy.min |
"""
Module for neural analysis
"""
import numpy as np
from typing import Any, Callable, Dict, List, NamedTuple, Optional, Tuple
def get_isi(spk_ts_list: list):
"""
Get inter-analysis interval of spikes
Parameters
----------
spk_ts_list : list
Returns
-------
isi : class object
class object for inter-spike intervals
"""
isi = np.array([], dtype=np.float64)
for spk in spk_ts_list:
isi = np.append(isi, np.diff(spk))
isi = ISI(isi) # return the class object
return isi
def get_peth(evt_ts_list: list, spk_ts_list: list,
pre_evt_buffer=None, duration=None,
bin_size=None,
nb_bins=None
):
"""
Get peri-event histogram & firing rates
Parameters
----------
evt_ts_list : list
Timestamps for behavioral events (e.g., syllable onset/offsets)
spk_ts_list : list
Spike timestamps
pre_evt_buffer : int, default=None
Size of buffer window prior to the first event (in ms)
duration : int, optional
Duration of the peth (in ms). Truncate the
bin_size : int, default=None
Time bin size
nb_bins : int, default=None
Number of bins
Returns
-------
peth : np.ndarray
Peri-event time histograms
time_bin : np.ndarray
Time bin vector
parameter : dict
Parameters for draw peth
Notes
-----
If pre_evt_buffer, bin_size, nb_bins not specified,
take values from analysis ..analysis.parameters
"""
from ..analysis.parameters import peth_parm
import copy
import math
parameter = peth_parm.copy()
if pre_evt_buffer is None:
pre_evt_buffer = parameter['buffer']
if bin_size is None:
bin_size = parameter['bin_size']
if nb_bins is None:
nb_bins = parameter['nb_bins']
time_bin = np.arange(0, nb_bins, bin_size) - pre_evt_buffer
peth = np.zeros((len(evt_ts_list), nb_bins)) # nb of trials x nb of time bins
for trial_ind, (evt_ts, spk_ts) in enumerate(zip(evt_ts_list, spk_ts_list)):
spk_ts_new = copy.deepcopy(spk_ts)
if not isinstance(evt_ts, np.float64):
# evt_ts = np.asarray(list(map(float, evt_ts))) + pre_evt_buffer
# spk_ts_new -= evt_ts[0]
evt_ts = np.asarray(list(map(float, evt_ts)))
spk_ts_new -= evt_ts[0]
spk_ts_new += pre_evt_buffer
else:
spk_ts_new -= evt_ts
spk_ts_new += pre_evt_buffer
for spk in spk_ts_new:
ind = math.ceil(spk / bin_size)
# print("spk = {}, bin index = {}".format(spk, ind)) # for debugging
if ind < 0: raise Exception("Index out of bound")
peth[trial_ind, ind] += 1
# Truncate the array leaving out only the portion of our interest
if duration:
ind = np.where(((0 - pre_evt_buffer) <= time_bin) & (time_bin < duration))[0]
peth = peth[:, ind[0]:ind[-1]+1]
time_bin = time_bin[ind[0]:ind[-1]+1]
return peth, time_bin, parameter
def get_pcc(fr_array: np.ndarray) -> dict:
"""
Get pairwise cross-correlation
Parameters
----------
fr_array : np.ndarray
(trial x time_bin)
Returns
-------
pcc_dict : dict
"""
pcc_dict = {}
pcc_arr = np.array([])
for ind1, fr1 in enumerate(fr_array):
for ind2, fr2 in enumerate(fr_array):
if ind2 > ind1:
if np.linalg.norm((fr1 - fr1.mean()), ord=1) * np.linalg.norm((fr2 - fr2.mean()), ord=1):
if not np.isnan(np.corrcoef(fr1, fr2)[0, 1]):
pcc_arr = np.append(pcc_arr, np.corrcoef(fr1, fr2)[0, 1]) # get correlation coefficient
pcc_dict['array'] = pcc_arr
pcc_dict['mean'] = round(pcc_arr.mean(), 3)
return pcc_dict
def jitter_spk_ts(spk_ts_list, shuffle_limit, reproducible=True):
"""
Add a random temporal jitter to the spike
Parameters
----------
reproducible : bool
Make the results reproducible by setting the seed as equal to index
"""
spk_ts_jittered_list = []
for ind, spk_ts in enumerate(spk_ts_list):
np.random.seed()
if reproducible: # randomization seed
seed = ind
np.random.seed(seed) # make random jitter reproducible
else:
seed = np.random.randint(len(spk_ts_list), size=1)
np.random.seed(seed) # make random jitter reproducible
nb_spk = spk_ts.shape[0]
jitter = np.random.uniform(-shuffle_limit, shuffle_limit, nb_spk)
spk_ts_jittered_list.append(spk_ts + jitter)
return spk_ts_jittered_list
def pcc_shuffle_test(ClassObject, PethInfo, plot_hist=False, alpha=0.05):
"""
Run statistical test to see if baseline pairwise cross-correlation obtained by spike time shuffling is significant
Parameters
----------
ClassObject : class object (e.g., NoteInfo, MotifInfo)
PethInfo : peth info class object
plot_hist : bool
Plot histogram of bootstrapped pcc values (False by default)
Returns
-------
p_sig : dict
True if the pcc is significantly above the baseline
"""
from ..analysis.parameters import peth_shuffle
from collections import defaultdict
from functools import partial
import scipy.stats as stats
import matplotlib.pyplot as plt
pcc_shuffle = defaultdict(partial(np.ndarray, 0))
for i in range(peth_shuffle['shuffle_iter']):
ClassObject.jitter_spk_ts(peth_shuffle['shuffle_limit'])
pi_shuffle = ClassObject.get_note_peth(shuffle=True) # peth object
pi_shuffle.get_fr() # get firing rates
pi_shuffle.get_pcc() # get pcc
for context, pcc in pi_shuffle.pcc.items():
pcc_shuffle[context] = np.append(pcc_shuffle[context], pcc['mean'])
# One-sample t-test (one-sided)
p_val = {}
p_sig = {}
for context in pcc_shuffle.keys():
(_, p_val[context]) = stats.ttest_1samp(a=pcc_shuffle[context], popmean=PethInfo.pcc[context]['mean'],
nan_policy='omit', alternative='less') # one-tailed t-test
for context, value in p_val.items():
p_sig[context] = value < alpha
# Plot histogram
if plot_hist:
from ..utils.draw import remove_right_top
fig, axes = plt.subplots(1, 2, figsize=(6, 3))
plt.suptitle('PCC shuffle distribution', y=.98, fontsize=10)
for axis, context in zip(axes, pcc_shuffle.keys()):
axis.set_title(context)
axis.hist(pcc_shuffle[context], color='k')
axis.set_xlim([-0.1, 0.6])
axis.set_xlabel('PCC'), axis.set_ylabel('Count')
if p_sig[context]:
axis.axvline(x=PethInfo.pcc[context]['mean'], color='r', linewidth=1, ls='--')
else:
axis.axvline(x=PethInfo.pcc[context]['mean'], color='k', linewidth=1, ls='--')
remove_right_top(axis)
plt.tight_layout()
plt.show()
return p_sig
class ClusterInfo:
def __init__(self, path, channel_nb, unit_nb, format='rhd', *name, update=False, time_unit='ms'):
"""
Load information about cluster
Parameters
----------
path : path
path that contains recording files for the cluster
channel_nb : int
number of the channel that recorded the cluster
unit_nb : int
number id of the cluster (needed because multiple neurons could have been recorded in the same session & channel)
format : str
'rhd' by default (Intan)
name : name of the cluster
e.g., ('096-g70r40-Predeafening-D07(20191106)-S03-Ch17-Cluster01')
update : bool
If not exists, create a .npz cache file in the same folder so that it doesn't read from the raw data every time the class is called.
time_unit : str
'ms' by default
"""
from ..analysis.load import load_song
self.path = path
if channel_nb: # if a neuron was recorded
if len(str(channel_nb)) == 1:
self.channel_nb = 'Ch0' + str(channel_nb)
elif len(str(channel_nb)) == 2:
self.channel_nb = 'Ch' + str(channel_nb)
else:
self.channel_nb = 'Ch'
self.unit_nb = unit_nb
self.format = format
if name:
self.name = name[0]
else:
self.name = self.path
self._print_name()
# Load events
file_name = self.path / "ClusterInfo_{}_Cluster{}.npy".format(self.channel_nb, self.unit_nb)
if update or not file_name.exists(): # if .npy doesn't exist or want to update the file
song_info = load_song(self.path)
# Save cluster_info as a numpy object
np.save(file_name, song_info)
else:
song_info = np.load(file_name, allow_pickle=True).item()
# Set the dictionary values to class attributes
for key in song_info:
setattr(self, key, song_info[key])
# Load spike
if channel_nb and unit_nb:
self._load_spk(time_unit)
def __repr__(self): # print attributes
return str([key for key in self.__dict__.keys()])
def _print_name(self) -> None:
print('')
print('Load cluster {self.name}'.format(self=self))
def list_files(self, ext: str):
from ..utils.functions import list_files
return list_files(self.path, ext)
def _load_spk(self, time_unit, delimiter='\t') -> None:
"""
Load spike information
Parameters
----------
time_unit : str
time unit (e.g., 'ms')
delimiter : str
delimiter of the cluster file (tab (\t) by default)
Returns
-------
sets spk_wf, spk_ts, nb_spk as attributes
"""
spk_txt_file = list(self.path.glob('*' + self.channel_nb + '(merged).txt'))
if not spk_txt_file:
print("spk text file doesn't exist !")
return
spk_txt_file = spk_txt_file[0]
spk_info = np.loadtxt(spk_txt_file, delimiter=delimiter, skiprows=1) # skip header
# Select only the unit (there could be multiple isolated units in the same file)
if self.unit_nb: # if the unit number is specified
spk_info = spk_info[spk_info[:, 1] == self.unit_nb, :]
spk_ts = spk_info[:, 2] # analysis time stamps
spk_wf = spk_info[:, 3:] # analysis waveform
nb_spk = spk_wf.shape[0] # total number of spikes
self.spk_wf = spk_wf # individual waveforms
self.nb_spk = nb_spk # the number of spikes
# Units are in second by default, but convert to millisecond with the argument
if time_unit == 'ms':
spk_ts *= 1E3
# Output analysis timestamps per file in a list
spk_list = []
for file_start, file_end in zip(self.file_start, self.file_end):
spk_list.append(spk_ts[np.where((spk_ts >= file_start) & (spk_ts <= file_end))])
self.spk_ts = spk_list # analysis timestamps in ms
# print("spk_ts, spk_wf, nb_spk attributes added")
def analyze_waveform(self,
align_wf=True,
interpolate=True, interp_factor=None
):
"""
Perform waveform analysis
Parameters
----------
align_wf : bool
align all spike waveforms relative to the max location
interpolate : bool
Set to true if waveform interpolation is needed
interp_factor : int
Factor by which to increase the sampling frequency of the waveform
e.g., 100 if you want to increase the data points by 100 fold
"""
from ..analysis.functions import align_waveform, get_half_width
from ..analysis.parameters import sample_rate
if align_wf:
self.spk_wf = align_waveform(self.spk_wf)
def _get_spk_profile(wf_ts, avg_wf, interpolate=interpolate):
spk_height = np.abs(np.max(avg_wf) - np.min(avg_wf)) # in microseconds
if interpolate:
spk_width = abs(((np.argmax(avg_wf) - np.argmin(avg_wf)) + 1)) * (
(1 / sample_rate[self.format]) / interp_factor) * 1E6 # in microseconds
else:
spk_width = abs(((np.argmax(avg_wf) - np.argmin(avg_wf)) + 1)) * (
1 / sample_rate[self.format]) * 1E6 # in microseconds
deflection_range, half_width = get_half_width(wf_ts, avg_wf) # get the half width from the peak deflection
return spk_height, spk_width, half_width, deflection_range
if not interp_factor:
from ..analysis.parameters import interp_factor
interp_factor = interp_factor
self.avg_wf = np.nanmean(self.spk_wf, axis=0)
self.wf_ts = np.arange(0, self.avg_wf.shape[0]) / sample_rate[self.format] * 1E3 # x-axis in ms
if interpolate: # interpolate the waveform to increase sampling frequency
from scipy import interpolate
f = interpolate.interp1d(self.wf_ts, self.avg_wf)
wf_ts_interp = np.arange(0, self.wf_ts[-1], ((self.wf_ts[1] - self.wf_ts[0]) * (1 / interp_factor)))
assert (np.diff(wf_ts_interp)[0] * interp_factor) == np.diff(self.wf_ts)[0]
avg_wf_interp = f(wf_ts_interp) # use interpolation function returned by `interp1d`
# Replace the original value with interpolated ones
self.wf_ts_interp = wf_ts_interp
self.avg_wf_interp = avg_wf_interp
spk_height, spk_width, half_width, deflection_range = _get_spk_profile(wf_ts_interp, avg_wf_interp)
else:
spk_height, spk_width, half_width, deflection_range = _get_spk_profile(self.wf_ts, self.avg_wf)
self.spk_height = round(spk_height, 3) # in microvolts
self.spk_width = round(spk_width, 3) # in microseconds
self.half_width = half_width
self.deflection_range = deflection_range # the range where half width was calculated
# print("avg_wf, spk_height (uv), spk_width (us), wf_ts (ms) added")
def get_conditional_spk(self) -> dict:
"""Get spike timestamps from different contexts"""
conditional_spk = {}
conditional_spk['U'] = [spk_ts for spk_ts, context in zip(self.spk_ts, self.contexts) if context == 'U']
conditional_spk['D'] = [spk_ts for spk_ts, context in zip(self.spk_ts, self.contexts) if context == 'D']
return conditional_spk
def get_correlogram(self, ref_spk_list, target_spk_list, normalize=False) -> dict:
"""Get auto- or cross-correlogram"""
from ..analysis.parameters import spk_corr_parm
import math
correlogram = {}
for social_context in set(self.contexts):
# Compute spk correlogram
corr_temp = np.zeros(len(spk_corr_parm['time_bin']))
for ref_spks, target_spks, context in zip(ref_spk_list, target_spk_list, self.contexts):
if context == social_context:
for ref_spk in ref_spks:
for target_spk in target_spks:
diff = target_spk - ref_spk # time difference between two spikes
if (diff) and (diff <= spk_corr_parm['lag'] and diff >= -spk_corr_parm['lag']):
if diff < 0:
ind = np.where(spk_corr_parm['time_bin'] <= -math.ceil(abs(diff)))[0][-1]
elif diff > 0:
ind = np.where(spk_corr_parm['time_bin'] >= math.ceil(diff))[0][0]
# print("diff = {}, bin index = {}".format(diff, spk_corr_parm['time_bin'][ind])) # for debugging
corr_temp[ind] += 1
# Make sure the array is symmetrical
first_half = np.fliplr([corr_temp[:int((spk_corr_parm['lag'] / spk_corr_parm['bin_size']))]])[0]
second_half = corr_temp[int((spk_corr_parm['lag'] / spk_corr_parm['bin_size'])) + 1:]
assert np.sum(first_half - second_half) == 0
# Normalize correlogram by the total sum (convert to probability density )
if normalize:
corr_temp /= np.sum(correlogram)
correlogram[social_context] = corr_temp
correlogram['parameter'] = spk_corr_parm # store parameters in the dictionary
return correlogram
def jitter_spk_ts(self, shuffle_limit, reproducible=True):
"""
Add a random temporal jitter to the spike
Parameters
----------
shuffle_limit : int
shuffling limit (in ms)
e.g., If set to 5, any integer values between -5 to 5 drawn from uniform distribution will be added to the spike timestamp
reproducible : bool
make the results reproducible by setting the seed as equal to index
"""
spk_ts_jittered_list = []
for ind, spk_ts in enumerate(self.spk_ts):
np.random.seed()
if reproducible: # randomization seed
seed = ind
np.random.seed(seed) # make random jitter reproducible
else:
seed = np.random.randint(len(self.spk_ts), size=1)
np.random.seed(seed) # make random jitter reproducible
nb_spk = spk_ts.shape[0]
jitter = np.random.uniform(-shuffle_limit, shuffle_limit, nb_spk)
spk_ts_jittered_list.append(spk_ts + jitter)
self.spk_ts_jittered = spk_ts_jittered_list
def get_jittered_corr(self) -> dict:
"""Get spike correlogram from time-jittered spikes"""
from ..analysis.parameters import corr_shuffle
from collections import defaultdict
correlogram_jitter = defaultdict(list)
for iter in range(corr_shuffle['shuffle_iter']):
self.jitter_spk_ts(corr_shuffle['shuffle_limit'])
corr_temp = self.get_correlogram(self.spk_ts_jittered, self.spk_ts_jittered)
# Combine correlogram from two contexts
for key, value in corr_temp.items():
if key != 'parameter':
try:
correlogram_jitter[key].append(value)
except:
correlogram_jitter[key] = value
# Convert to array
for key, value in correlogram_jitter.items():
correlogram_jitter[key] = (np.array(value))
return correlogram_jitter
def get_isi(self, add_premotor_spk=False):
"""
Get inter-spike interval
Parameters
----------
add_premotor_spk : bool
Add spikes from the premotor window for calculation
"""
isi_dict = {}
list_zip = zip(self.onsets, self.offsets, self.spk_ts)
if not add_premotor_spk:
# Include spikes from the pre-motif buffer for calculation
# Pre-motor spikes are included in spk_list by default
spk_list = []
for onset, offset, spks in list_zip:
onset = np.asarray(list(map(float, onset)))
offset = np.asarray(list(map(float, offset)))
spk_list.append(spks[np.where((spks >= onset[0]) & (spks <= offset[-1]))])
for context1 in set(self.contexts):
if not add_premotor_spk:
spk_list_context = [spk_ts for spk_ts, context2 in zip(spk_list, self.contexts) if context2 == context1]
else:
spk_list_context = [spk_ts for spk_ts, context2 in zip(self.spk_ts, self.contexts) if
context2 == context1]
isi_dict[context1] = get_isi(spk_list_context)
return isi_dict
@property
def nb_files(self) -> dict:
"""
Return the number of files per context
Returns
-------
nb_files : dict
Number of files per context ('U', 'D', 'All')
"""
nb_files = {}
nb_files['U'] = len([context for context in self.contexts if context == 'U'])
nb_files['D'] = len([context for context in self.contexts if context == 'D'])
nb_files['All'] = nb_files['U'] + nb_files['D']
return nb_files
def nb_bouts(self, song_note: str) -> dict:
"""
Return the number of bouts per context
Parameters
----------
song_note : str
song motif syllables
Returns
-------
nb_bouts : dict
"""
from ..analysis.functions import get_nb_bouts
nb_bouts = {}
syllable_list = [syllable for syllable, context in zip(self.syllables, self.contexts) if context == 'U']
syllables = ''.join(syllable_list)
nb_bouts['U'] = get_nb_bouts(song_note, syllables)
syllable_list = [syllable for syllable, context in zip(self.syllables, self.contexts) if context == 'D']
syllables = ''.join(syllable_list)
nb_bouts['D'] = get_nb_bouts(song_note, syllables)
nb_bouts['All'] = nb_bouts['U'] + nb_bouts['D']
return nb_bouts
def nb_motifs(self, motif: str) -> dict:
"""
Return the number of motifs per context
Parameters
----------
motf : str
Song motif (e.g., 'abcd')
Returns
-------
nb_motifs : dict
"""
from ..utils.functions import find_str
nb_motifs = {}
syllable_list = [syllable for syllable, context in zip(self.syllables, self.contexts) if context == 'U']
syllables = ''.join(syllable_list)
nb_motifs['U'] = len(find_str(syllables, motif))
syllable_list = [syllable for syllable, context in zip(self.syllables, self.contexts) if context == 'D']
syllables = ''.join(syllable_list)
nb_motifs['D'] = len(find_str(syllables, motif))
nb_motifs['All'] = nb_motifs['U'] + nb_motifs['D']
return nb_motifs
def get_note_info(self, target_note,
pre_buffer=0, post_buffer=0
):
"""
Obtain a class object (NoteInfo) for individual note
spikes will be collected from note onset (+- pre_buffer) to offset (+- post_buffer)
Parameters
----------
target_note : str
Get information from this note
pre_buffer : int
Amount of time buffer relative to the event onset (e.g., syllable onset)
post_buffer : int
Amount of time buffer relative to the event offset (e.g., syllable onset)
Returns
-------
NoteInfo : class object
"""
from ..utils.functions import find_str
syllables = ''.join(self.syllables)
onsets = np.hstack(self.onsets)
offsets = np.hstack(self.offsets)
durations = np.hstack(self.durations)
contexts = ''
for i in range(len(self.contexts)): # concatenate contexts
contexts += self.contexts[i] * len(self.syllables[i])
ind = np.array(find_str(syllables, target_note)) # get note indices
if not ind.any(): # skil if the note does not exist
return
note_onsets = np.asarray(list(map(float, onsets[ind])))
note_offsets = np.asarray(list(map(float, offsets[ind])))
note_durations = np.asarray(list(map(float, durations[ind])))
note_contexts = ''.join(np.asarray(list(contexts))[ind])
# Get the note that immeidately follows
next_notes = ''
for i in ind:
next_notes += syllables[i + 1]
# Get spike info
spk_ts = np.hstack(self.spk_ts)
note_spk_ts_list = []
for onset, offset in zip(note_onsets, note_offsets):
note_spk_ts_list.append(
spk_ts[np.where((spk_ts >= onset - pre_buffer) & (spk_ts <= offset + post_buffer))])
# Organize data into a dictionary
note_info = {
'note': target_note,
'next_notes' : next_notes,
'onsets': note_onsets,
'offsets': note_offsets,
'durations': note_durations,
'contexts': note_contexts,
'median_dur': np.median(note_durations, axis=0),
'spk_ts': note_spk_ts_list,
'path': self.path, # directory where the data exists
'pre_buffer' : pre_buffer,
'post_buffer' : post_buffer
}
return NoteInfo(note_info) # return note info
@property
def open_folder(self) -> None:
"""Open the data folder"""
from ..utils.functions import open_folder
open_folder(self.path)
class NoteInfo:
"""
Class for storing information about a single note syllable and its associated spikes
"""
def __init__(self, note_dict):
# Set the dictionary values to class attributes
for key in note_dict:
setattr(self, key, note_dict[key])
# Perform PLW (piecewise linear warping)
self.spk_ts_warp = self._piecewise_linear_warping()
def __repr__(self):
return str([key for key in self.__dict__.keys()])
def select_index(self, index) -> None:
"""
Select only the notes with the matching index
Parameters
----------
index : np.array or list
Note indices to keep
"""
if isinstance(index, list):
index = np.array(index)
self.contexts = ''.join(np.array(list(self.contexts))[index])
self.onsets, self.offsets, self.durations, self.spk_ts, self.spk_ts_warp \
= self.onsets[index], self.offsets[index], self.durations[index], self.spk_ts[index], self.spk_ts_warp[index]
def select_context(self, target_context : str,
keep_median_duration=True
) -> None:
"""
Select one context
Parameters
----------
target_context : str
'U' or 'D'
keep_median_duration : bool
Normally medial note duration is calculated using all syllables regardless of the context
one may prefer to use this median to reduce variability when calculating pcc
if set False, new median duration will be calculated using the selected notes
"""
zipped_list = \
list(zip(self.contexts, self.next_notes, self.onsets, self.offsets, self.durations, self.spk_ts, self.spk_ts_warp))
zipped_list = list(filter(lambda x: x[0] == target_context, zipped_list)) # filter context
unzipped_object = zip(*zipped_list)
self.contexts, self.next_notes, self.onsets, self.offsets, self.durations, self.spk_ts, self.spk_ts_warp = \
list(unzipped_object)
self.contexts = ''.join(self.contexts)
self.next_notes = ''.join(self.next_notes)
self.onsets = np.array(self.onsets)
self.offsets = np.array(self.offsets)
self.durations = np.array(self.durations)
self.spk_ts = np.array(self.spk_ts)
self.spk_ts_warp = np.array(self.spk_ts_warp)
if not keep_median_duration:
self.median_dur = np.median(self.median_dur, axis=0)
def get_entropy(self, normalize=True, mode='spectral'):
"""
Calculate syllable entropy from all renditions and get the average
Two versions : spectro-temporal entropy & spectral entropy
"""
from ..analysis.parameters import nb_note_crit
from ..analysis.functions import get_spectral_entropy, get_spectrogram
from ..utils.functions import find_str
entropy_mean = {}
entropy_var = {}
audio = AudioData(self.path)
for context in ['U', 'D']:
se_mean_arr = np.array([], dtype=np.float32)
se_var_arr = np.array([], dtype=np.float32)
ind = np.array(find_str(self.contexts, context))
if ind.shape[0] >= nb_note_crit:
for (start, end) in zip(self.onsets[ind], self.offsets[ind]):
timestamp, data = audio.extract([start, end]) # audio object
_, spect, _ = get_spectrogram(timestamp, data, audio.sample_rate)
se = get_spectral_entropy(spect, normalize=normalize, mode=mode)
if isinstance(se, dict):
se_mean_arr = np.append(se_mean_arr, se['mean']) # spectral entropy averaged over time bins per rendition
se_var_arr = np.append(se_var_arr, se['var']) # spectral entropy variance per rendition
else:
se_mean_arr = np.append(se_mean_arr, se) # spectral entropy time-resolved
entropy_mean[context] = round(se_mean_arr.mean(), 3)
entropy_var[context] = round(se_var_arr.mean(), 5)
if mode == 'spectro_temporal':
return entropy_mean, entropy_var
else: # spectral entropy (does not have entropy variance)
return entropy_mean
def _piecewise_linear_warping(self):
"""Perform piecewise linear warping per note"""
import copy
note_spk_ts_warp_list = []
for onset, duration, spk_ts in zip(self.onsets, self.durations, self.spk_ts):
spk_ts_new = copy.deepcopy(spk_ts)
ratio = self.median_dur / duration
origin = 0
spk_ts_temp, ind = spk_ts[spk_ts >= onset], np.where(spk_ts >= onset)
spk_ts_temp = ((ratio * ((spk_ts_temp - onset))) + origin) + onset
np.put(spk_ts_new, ind, spk_ts_temp) # replace original spk timestamps with warped timestamps
note_spk_ts_warp_list.append(spk_ts_new)
return note_spk_ts_warp_list
def get_note_peth(self, time_warp=True, shuffle=False, pre_evt_buffer=None, duration=None,
bin_size=None,
nb_bins=None
):
"""
Get peri-event time histograms for single syllable
Parameters
----------
time_warp : perform piecewise linear transform
shuffle : add jitter to spike timestamps
duration : duration of the peth
bin_size : size of single bin (in ms) (take values from peth_parm by default)
nb_bins : number of time bins (take values from peth_parm by default)
Returns
-------
PethInfo : class object
"""
peth_dict = {}
if shuffle:
peth, time_bin, peth_parm = \
get_peth(self.onsets, self.spk_ts_jittered,
pre_evt_buffer=pre_evt_buffer, duration=duration,
bin_size=bin_size,
nb_bins=nb_bins
)
else:
if time_warp: # peth calculated from time-warped spikes by default
# peth, time_bin = get_note_peth(self.onsets, self.spk_ts_warp, self.median_durations.sum()) # truncated version to fit the motif duration
peth, time_bin, peth_parm = \
get_peth(self.onsets, self.spk_ts_warp,
pre_evt_buffer=pre_evt_buffer, duration=duration,
bin_size = bin_size,
nb_bins = nb_bins
)
else:
peth, time_bin, peth_parm = \
get_peth(self.onsets, self.spk_ts,
pre_evt_buffer=pre_evt_buffer, duration=duration,
bin_size=bin_size,
nb_bins=nb_bins
)
peth_dict['peth'] = peth
peth_dict['time_bin'] = time_bin
peth_dict['parameters'] = peth_parm
peth_dict['contexts'] = self.contexts
peth_dict['median_duration'] = self.median_dur
return PethInfo(peth_dict) # return peth class object for further analysis
def jitter_spk_ts(self, shuffle_limit):
"""
Add a random temporal jitter to the spike
This version limit the jittered timestamp within the motif window
"""
from ..analysis.parameters import pre_motor_win_size
spk_ts_jittered_list = []
list_zip = zip(self.onsets, self.offsets, self.spk_ts)
for ind, (onset, offset, spk_ts) in enumerate(list_zip):
# Find motif onset & offset
onset = float(onset) - pre_motor_win_size # start from the premotor window
jittered_spk = np.array([], dtype=np.float32)
for spk_ind, spk in enumerate(spk_ts):
while True:
jitter = np.random.uniform(-shuffle_limit, shuffle_limit, 1)
new_spk = spk + jitter
if onset < new_spk < offset:
jittered_spk = np.append(jittered_spk, spk + jitter)
break
spk_ts_jittered_list.append(jittered_spk)
self.spk_ts_jittered = spk_ts_jittered_list
@property
def nb_note(self) -> dict:
"""Return number of notes per context"""
from ..utils.functions import find_str
nb_note = {}
for context in ['U', 'D']:
nb_note[context] = len(find_str(self.contexts, context))
return nb_note
@property
def mean_fr(self) -> dict:
"""Return mean firing rates for the note (includes pre-motor window) per context"""
from ..analysis.parameters import nb_note_crit, pre_motor_win_size
from ..utils.functions import find_str
note_spk = {}
note_fr = {}
for context1 in ['U', 'D']:
if self.nb_note[context1] >= nb_note_crit:
note_spk[context1] = \
sum([len(spk) for context2, spk in zip(self.contexts, self.spk_ts) if context2 == context1])
note_fr[context1] = \
round(note_spk[context1] / ((self.durations[find_str(self.contexts, context1)] + pre_motor_win_size).sum() / 1E3), 3)
else:
note_fr[context1] = np.nan
return note_fr
# @property
# def open_folder(self) -> None:
# """Open the data folder"""
# from ..utils.functions import open_folder
#
# open_folder(self.path)
class MotifInfo(ClusterInfo):
"""
Class object for motif information
child class of ClusterInfo
"""
def __init__(self, path, channel_nb, unit_nb, motif, format='rhd', *name, update=False):
super().__init__(path, channel_nb, unit_nb, format, *name, update=False)
self.motif = motif
if name:
self.name = name[0]
else:
self.name = str(self.path)
# Load motif info
file_name = self.path / "MotifInfo_{}_Cluster{}.npy".format(self.channel_nb, self.unit_nb)
if update or not file_name.exists(): # if .npy doesn't exist or want to update the file
motif_info = self._load_motif()
# Save info dict as a numpy object
np.save(file_name, motif_info)
else:
motif_info = np.load(file_name, allow_pickle=True).item()
# Set the dictionary values to class attributes
for key in motif_info:
setattr(self, key, motif_info[key])
# Delete un-used attributes
self._delete_attr()
def _delete_attr(self):
"""Delete un-used attributes/methods inheritied from the parent class """
delattr(self, 'spk_wf')
delattr(self, 'nb_spk')
delattr(self, 'file_start')
delattr(self, 'file_end')
def _load_motif(self):
"""Load motif info"""
from ..analysis.parameters import peth_parm
from ..utils.functions import find_str
# Store values here
file_list = []
spk_list = []
onset_list = []
offset_list = []
syllable_list = []
duration_list = []
context_list = []
list_zip = zip(self.files, self.spk_ts, self.onsets, self.offsets, self.syllables, self.contexts)
for file, spks, onsets, offsets, syllables, context in list_zip:
print('Loading... ' + file)
onsets = onsets.tolist()
offsets = offsets.tolist()
# Find motifs
motif_ind = find_str(syllables, self.motif)
# Get syllable, analysis time stamps
for ind in motif_ind:
# start (first syllable) and stop (last syllable) index of a motif
start_ind = ind
stop_ind = ind + len(self.motif) - 1
motif_onset = float(onsets[start_ind])
motif_offset = float(offsets[stop_ind])
# Includes pre-motor spikes
motif_spk = spks[np.where((spks >= motif_onset - peth_parm['buffer']) & (spks <= motif_offset))]
onsets_in_motif = onsets[start_ind:stop_ind + 1] # list of motif onset timestamps
offsets_in_motif = offsets[start_ind:stop_ind + 1] # list of motif offset timestamps
file_list.append(file)
spk_list.append(motif_spk)
duration_list.append(motif_offset - motif_onset)
onset_list.append(onsets_in_motif)
offset_list.append(offsets_in_motif)
syllable_list.append(syllables[start_ind:stop_ind + 1])
context_list.append(context)
# Organize event-related info into a single dictionary object
motif_info = {
'files': file_list,
'spk_ts': spk_list,
'onsets': onset_list,
'offsets': offset_list,
'durations': duration_list, # this is motif durations
'syllables': syllable_list,
'contexts': context_list,
'parameter': peth_parm
}
# Set the dictionary values to class attributes
for key in motif_info:
setattr(self, key, motif_info[key])
# Get duration
note_duration_list, median_duration_list = self.get_note_duration()
self.note_durations = note_duration_list
self.median_durations = median_duration_list
motif_info['note_durations'] = note_duration_list
motif_info['median_durations'] = median_duration_list
# Get PLW (piecewise linear warping)
spk_ts_warp_list = self.piecewise_linear_warping()
# self.spk_ts_warp = spk_ts_warp_list
motif_info['spk_ts_warp'] = spk_ts_warp_list
return motif_info
def select_context(self, target_context : str,
keep_median_duration=True
) -> None:
"""
Select one context
Parameters
----------
target_context : str
'U' or 'D'
keep_median_duration : bool
Normally medial note duration is calculated using all syllables regardless of the context.
One may prefer to use this median to reduce variability when calculating pcc.
IF set False, new median duration will be calculated using the selected notes.
"""
zipped_list = \
list(zip(self.contexts, self.files, self.onsets, self.offsets, self.durations, self.spk_ts, self.spk_ts_warp, self.note_durations))
zipped_list = list(filter(lambda x: x[0] == target_context, zipped_list)) # filter context
unzipped_object = zip(*zipped_list)
self.contexts, self.files, self.onsets, self.offsets, self.durations, self.spk_ts, self.spk_ts_warp, self.note_durations = \
list(unzipped_object)
if not keep_median_duration:
_, self.median_durations = self.get_note_duration()
def get_note_duration(self):
"""
Calculate note & gap duration per motif
"""
note_durations = np.empty((len(self), len(self.motif) * 2 - 1))
list_zip = zip(self.onsets, self.offsets)
for motif_ind, (onset, offset) in enumerate(list_zip):
# Convert from string to array of floats
onset = np.asarray(list(map(float, onset)))
offset = np.asarray(list(map(float, offset)))
# Calculate note & interval duration
timestamp = [[onset, offset] for onset, offset in zip(onset, offset)]
timestamp = sum(timestamp, [])
for i in range(len(timestamp) - 1):
note_durations[motif_ind, i] = timestamp[i + 1] - timestamp[i]
# Get median duration
median_durations = np.median(note_durations, axis=0)
return note_durations, median_durations
def piecewise_linear_warping(self):
"""
Performs piecewise linear warping on raw analysis timestamps
Based on each median note and gap durations
"""
import copy
from ..utils.functions import extract_ind
spk_ts_warped_list = []
list_zip = zip(self.note_durations, self.onsets, self.offsets, self.spk_ts)
for motif_ind, (durations, onset, offset, spk_ts) in enumerate(list_zip): # per motif
onset = np.asarray(list(map(float, onset)))
offset = np.asarray(list(map(float, offset)))
# Make a deep copy of spk_ts so as to make it modification won't affect the original
spk_ts_new = copy.deepcopy(spk_ts)
# Calculate note & interval duration
timestamp = [[onset, offset] for onset, offset in zip(onset, offset)]
timestamp = sum(timestamp, [])
for i in range(0, len(self.median_durations)):
ratio = self.median_durations[i] / durations[i]
diff = timestamp[i] - timestamp[0]
if i == 0:
origin = 0
else:
origin = sum(self.median_durations[:i])
# Add spikes from motif
ind, spk_ts_temp = extract_ind(spk_ts, [timestamp[i], timestamp[i + 1]])
spk_ts_temp = ((ratio * ((spk_ts_temp - timestamp[0]) - diff)) + origin) + timestamp[0]
# spk_ts_new = np.append(spk_ts_new, spk_ts_temp)
np.put(spk_ts_new, ind, spk_ts_temp) # replace original spk timestamps with warped timestamps
spk_ts_warped_list.append(spk_ts_new)
return spk_ts_warped_list
def get_mean_fr(self, add_pre_motor=False):
"""
Calculate mean firing rates during motif
Parameters
----------
add_pre_motor : bool
Set True if you want to include spikes from the pre-motor window for calculating firing rates
(False by default)
"""
from ..analysis.parameters import peth_parm
fr_dict = {}
motif_spk_list = []
list_zip = zip(self.onsets, self.offsets, self.spk_ts)
# Make sure spikes from the pre-motif buffer is not included in calculation
for onset, offset, spks in list_zip:
onset = np.asarray(list(map(float, onset)))
offset = np.asarray(list(map(float, offset)))
if add_pre_motor:
motif_spk_list.append(spks[np.where((spks >= (onset[0] - peth_parm['buffer'])) & (spks <= offset[-1]))])
else:
motif_spk_list.append(spks[np.where((spks >= onset[0]) & (spks <= offset[-1]))])
for context1 in set(self.contexts):
nb_spk = sum([len(spk) for spk, context2 in zip(motif_spk_list, self.contexts) if context2 == context1])
if add_pre_motor:
total_duration = sum([duration + peth_parm['buffer'] for duration, context2 in zip(self.durations, self.contexts) if context2 == context1])
else:
total_duration = sum([duration for duration, context2 in zip(self.durations, self.contexts) if context2 == context1])
mean_fr = nb_spk / (total_duration / 1E3)
fr_dict[context1] = round(mean_fr, 3)
# print("mean_fr added")
self.mean_fr = fr_dict
def jitter_spk_ts(self, shuffle_limit: int, **kwargs):
"""
Add a random temporal jitter to the spike
This version limit the jittered timestamp within the motif window
"""
from ..analysis.parameters import pre_motor_win_size
spk_ts_jittered_list = []
list_zip = zip(self.onsets, self.offsets, self.spk_ts)
for ind, (onset, offset, spk_ts) in enumerate(list_zip):
# Find motif onset & offset
onset = float(onset[0]) - pre_motor_win_size # start from the premotor window
offset = float(offset[-1])
jittered_spk = np.array([], dtype=np.float32)
for spk_ind, spk in enumerate(spk_ts):
while True:
jitter = np.random.uniform(-shuffle_limit, shuffle_limit, 1)
new_spk = spk + jitter
if onset < new_spk < offset:
jittered_spk = np.append(jittered_spk, spk + jitter)
break
spk_ts_jittered_list.append(jittered_spk)
self.spk_ts_jittered = spk_ts_jittered_list
def get_peth(self, time_warp=True, shuffle=False):
"""
Get peri-event time histogram & raster during song motif
Parameters
----------
time_warp : bool
perform piecewise linear transform
shuffle : bool
add jitter to spike timestamps
Returns
-------
PethInfo : class object
"""
peth_dict = {}
if shuffle: # Get peth with shuffled (jittered) spikes
peth, time_bin, peth_parm = get_peth(self.onsets, self.spk_ts_jittered)
else:
if time_warp: # peth calculated from time-warped spikes by default
# peth, time_bin = get_note_peth(self.onsets, self.spk_ts_warp, self.median_durations.sum()) # truncated version to fit the motif duration
peth, time_bin, peth_parm = get_peth(self.onsets, self.spk_ts_warp)
else:
peth, time_bin, peth_parm = get_peth(self.onsets, self.spk_ts)
peth_parm.pop('time_bin'); peth_parm.pop('nb_bins')
peth_dict['peth'] = peth
peth_dict['time_bin'] = time_bin
peth_dict['parameters'] = peth_parm
peth_dict['contexts'] = self.contexts
peth_dict['median_duration'] = self.median_durations.sum()
return PethInfo(peth_dict) # return peth class object for further analysis
def __len__(self):
return len(self.files)
def __repr__(self): # print attributes
return str([key for key in self.__dict__.keys()])
@property
def open_folder(self):
"""Open the data folder"""
from ..utils.functions import open_folder
open_folder(self.path)
def _print_name(self):
print('')
print('Load motif {self.name}'.format(self=self))
class PethInfo():
def __init__(self, peth_dict: dict):
"""
Class object for peri-event time histogram (PETH)
Parameters
----------
peth_dict : dict
"peth" : array (nb of trials (motifs) x time bins), numbers indicate analysis counts in that bin
"contexts" : list of strings, social contexts
"""
# Set the dictionary values to class attributes
for key in peth_dict:
setattr(self, key, peth_dict[key])
# Get conditional peth, fr, spike counts
peth_dict = {}
peth_dict['All'] = self.peth
for context in set(self.contexts):
if type(self.contexts) == str:
self.contexts = list(self.contexts)
ind = np.array(self.contexts) == context
peth_dict[context] = self.peth[ind, :]
self.peth = peth_dict
def get_fr(self, gaussian_std=None, smoothing=True):
"""
Get trials-by-trial firing rates by default
Parameters
----------
gaussian_std : int
gaussian smoothing parameter. If not specified, read from analysis.parameters
smoothing : bool
performs gaussian smoothing on the firing rates
"""
# if duration:
# ind = (((0 - peth_parm['buffer']) <= time_bin) & (time_bin <= duration))
# peth = peth[:, ind]
# time_bin = time_bin[ind]
from ..analysis.parameters import peth_parm, gauss_std, nb_note_crit
from scipy.ndimage import gaussian_filter1d
if not gaussian_std: # if not specified, get the value fromm analysis.parameters
gaussian_std = gauss_std
# Get trial-by-trial firing rates
fr_dict = {}
for k, v in self.peth.items(): # loop through different conditions in peth dict
if v.shape[0] >= nb_note_crit:
fr = v / (peth_parm['bin_size'] / 1E3) # in Hz
if smoothing: # Gaussian smoothing
fr = gaussian_filter1d(fr, gaussian_std)
# Truncate values outside the range
ind = (((0 - peth_parm['buffer']) <= self.time_bin) & (self.time_bin <= self.median_duration))
fr = fr[:, ind]
fr_dict[k] = fr
self.fr = fr_dict
self.time_bin = self.time_bin[ind]
# Get mean firing rates
mean_fr_dict = {}
for context, fr in self.fr.items():
fr = np.mean(fr, axis=0)
mean_fr_dict[context] = fr
if smoothing:
mean_fr_dict['gauss_std'] = gauss_std
self.mean_fr = mean_fr_dict
def get_pcc(self):
"""Get pairwise cross-correlation"""
from ..analysis.parameters import nb_note_crit
pcc_dict = {}
for k, v in self.fr.items(): # loop through different conditions in peth dict
if k != 'All':
if v.shape[0] >= nb_note_crit:
pcc = get_pcc(v)
pcc_dict[k] = pcc
self.pcc = pcc_dict
def get_fr_cv(self):
"""Get coefficient of variation (CV) of firing rates"""
if not self.mean_fr:
self.get_fr()
fr_cv = {}
for context, fr in self.mean_fr.items(): # loop through different conditions in peth dict
if context in ['U', 'D']:
fr_cv[context] = round(fr.std(axis=0) / fr.mean(axis=0), 3)
return fr_cv
def get_sparseness(self, bin_size=None):
"""
Get sparseness index
Parameters
----------
bin_size : int
By default, it uses the same time bin size used in peth calculation (in ms)
Returns
-------
sparseness : dict
"""
from ..analysis.parameters import gauss_std, nb_note_crit
import math
mean_fr = dict()
sparseness = dict()
if bin_size != None and bin_size != self.parameters['bin_size']:
for context, peth in self.peth.items():
if context == 'All': continue
new_peth = np.empty([peth.shape[0], 0])
nb_bins = math.ceil(peth.shape[1] / bin_size)
bin_ind = 0
start_ind = 0
end_ind = 0 + bin_size
while bin_ind < nb_bins:
if end_ind > peth.shape[1]:
end_ind = peth.shape[1]
# print(start_ind, end_ind)
peth_bin = peth[:, start_ind: end_ind].sum(axis=1).reshape(peth.shape[0], 1)
new_peth = np.append(new_peth, peth_bin, axis=1)
start_ind += bin_size
end_ind += bin_size
bin_ind += 1
fr = new_peth / (bin_size / 1E3) # in Hz
mean_fr[context] = np.mean(fr, axis=0)
else:
mean_fr = self.mean_fr
# Calculate sparseness
for context, fr in mean_fr.items():
if context not in ['U', 'D']: continue
norm_fr = fr / np.sum(fr)
sparseness[context] = round(1 + (np.nansum(norm_fr * np.log10(norm_fr)) / np.log10(len(norm_fr))), 3)
return sparseness
def get_spk_count(self):
"""
Calculate the number of spikes within a specified time window
"""
from ..analysis.parameters import peth_parm, spk_count_parm
win_size = spk_count_parm['win_size']
spk_count_dict = {}
fano_factor_dict = {}
spk_count_cv_dict = {}
for k, v in self.peth.items(): # loop through different conditions in peth dict
spk_arr = np.empty((v.shape[0], 0), int) # (renditions x time bins)
if k != 'All': # skip all trials
win_inc = 0
for i in range(v.shape[1] - win_size):
count = v[:, i: win_size + win_inc].sum(axis=1)
# print(f"from {i} to {win_size + win_inc}, count = {count}")
spk_arr = np.append(spk_arr, np.array([count]).transpose(), axis=1)
win_inc += 1
# Truncate values outside the range
ind = (((0 - peth_parm['buffer']) <= self.time_bin) & (self.time_bin <= self.median_duration))
spk_arr = spk_arr[:, :ind.shape[0]]
spk_count = spk_arr.sum(axis=0)
fano_factor = spk_arr.var(axis=0) / spk_arr.mean(
axis=0) # per time window (across renditions) (renditions x time window)
spk_count_cv = spk_count.std(axis=0) / spk_count.mean(axis=0) # cv across time (single value)
# store values in a dictionary
spk_count_dict[k] = spk_count
fano_factor_dict[k] = fano_factor
spk_count_cv_dict[k] = round(spk_count_cv, 3)
self.spk_count = spk_count_dict
self.fano_factor = fano_factor_dict
self.spk_count_cv = spk_count_cv_dict
def __repr__(self): # print attributes
return str([key for key in self.__dict__.keys()])
class BoutInfo(ClusterInfo):
"""
Get song & spike information for a song bout
Child class of ClusterInfo
"""
def __init__(self, path, channel_nb, unit_nb, song_note, format='rhd', *name, update=False):
super().__init__(path, channel_nb, unit_nb, format, *name, update=False)
self.song_note = song_note
if name:
self.name = name[0]
else:
self.name = str(self.path)
# Load bout info
file_name = self.path / "BoutInfo_{}_Cluster{}.npy".format(self.channel_nb, self.unit_nb)
if update or not file_name.exists(): # if .npy doesn't exist or want to update the file
bout_info = self._load_bouts()
# Save info dict as a numpy object
np.save(file_name, bout_info)
else:
bout_info = np.load(file_name, allow_pickle=True).item()
# Set the dictionary values to class attributes
for key in bout_info:
setattr(self, key, bout_info[key])
def _print_name(self):
print('')
print('Load bout {self.name}'.format(self=self))
def __len__(self):
return len(self.files)
def _load_bouts(self):
# Store values here
from ..utils.functions import find_str
file_list = []
spk_list = []
onset_list = []
offset_list = []
syllable_list = []
duration_list = []
context_list = []
list_zip = zip(self.files, self.spk_ts, self.onsets, self.offsets, self.syllables, self.contexts)
for file, spks, onsets, offsets, syllables, context in list_zip:
bout_ind = find_str(syllables, '*')
for ind in range(len(bout_ind)):
if ind == 0:
start_ind = 0
else:
start_ind = bout_ind[ind - 1] + 1
stop_ind = bout_ind[ind] - 1
# breakpoint()
bout_onset = float(onsets[start_ind])
bout_offset = float(offsets[stop_ind])
bout_spk = spks[np.where((spks >= bout_onset) & (spks <= bout_offset))]
onsets_in_bout = onsets[start_ind:stop_ind + 1] # list of bout onset timestamps
offsets_in_bout = offsets[start_ind:stop_ind + 1] # list of bout offset timestamps
file_list.append(file)
spk_list.append(bout_spk)
duration_list.append(bout_offset - bout_onset)
onset_list.append(onsets_in_bout)
offset_list.append(offsets_in_bout)
syllable_list.append(syllables[start_ind:stop_ind + 1])
context_list.append(context)
# Organize event-related info into a single dictionary object
bout_info = {
'files': file_list,
'spk_ts': spk_list,
'onsets': onset_list,
'offsets': offset_list,
'durations': duration_list, # this is bout durations
'syllables': syllable_list,
'contexts': context_list,
}
return bout_info
def plot(self):
#TODO: this function needs revision
from ..analysis.parameters import bout_buffer, freq_range, bout_color
from ..utils import save
from ..utils.draw import remove_right_top
import matplotlib.colors as colors
import matplotlib.pyplot as plt
import numpy as np
from ..database.load import ProjectLoader, DBInfo
from scipy import stats
import warnings
warnings.filterwarnings('ignore')
# Parameters
save_fig = False
update = False
dir_name = 'RasterBouts'
fig_ext = '.png' # .png or .pdf
font_size = 12 # figure font size
rec_yloc = 0.05
rect_height = 0.2
text_yloc = 1 # text height
nb_row = 13
nb_col = 1
tick_length = 1
tick_width = 1
# Load database
db = ProjectLoader().load_db()
# SQL statementwa
# query = "SELECT * FROM cluster"
# query = "SELECT * FROM cluster WHERE ephysOK"
query = "SELECT * FROM cluster WHERE id = 12"
db.execute(query)
# Loop through db
for row in db.cur.fetchall():
# Load cluster info from db
cluster_db = DBInfo(row)
name, path = cluster_db.load_cluster_db()
unit_nb = int(cluster_db.unit[-2:])
channel_nb = int(cluster_db.channel[-2:])
format = cluster_db.format
ci = ClusterInfo(path, channel_nb, unit_nb, format, name, update=update) # cluster object
bi = BoutInfo(path, channel_nb, unit_nb, cluster_db.songNote, format, name, update=update) # bout object
list_zip = zip(bi.files, bi.spk_ts, bi.onsets, bi.offsets, bi.syllables, bi.contexts)
for bout_ind, (file, spks, onsets, offsets, syllables, context) in enumerate(list_zip):
# Convert from string to array of floats
onsets = np.asarray(list(map(float, onsets)))
offsets = np.asarray(list(map(float, offsets)))
spks = spks - onsets[0]
# bout start and end
start = onsets[0] - bout_buffer
end = offsets[-1] + bout_buffer
duration = offsets[-1] - onsets[0]
# Get spectrogram
audio = AudioData(path, update=update).extract([start, end]) # audio object
audio.spectrogram()
audio.spect_time = audio.spect_time - audio.spect_time[0] - bout_buffer
# Plot figure
fig = plt.figure(figsize=(8, 7))
fig.tight_layout()
fig_name = f"{file} - Bout # {bout_ind}"
print("Processing... " + fig_name)
fig.suptitle(fig_name, y=0.95)
# Plot spectrogram
ax_spect = plt.subplot2grid((nb_row, nb_col), (2, 0), rowspan=2, colspan=1)
ax_spect.pcolormesh(audio.spect_time, audio.spect_freq, audio.spect, # data
cmap='hot_r',
norm=colors.SymLogNorm(linthresh=0.05,
linscale=0.03,
vmin=0.5,
vmax=100
))
remove_right_top(ax_spect)
ax_spect.set_ylim(freq_range[0], freq_range[1])
ax_spect.set_ylabel('Frequency (Hz)', fontsize=font_size)
plt.yticks(freq_range, [str(freq_range[0]), str(freq_range[1])])
plt.setp(ax_spect.get_xticklabels(), visible=False)
plt.xlim([audio.spect_time[0] - 100, audio.spect_time[-1] + 100])
# Plot syllable duration
ax_syl = plt.subplot2grid((nb_row, nb_col), (1, 0), rowspan=1, colspan=1, sharex=ax_spect)
note_dur = offsets - onsets # syllable duration
onsets -= onsets[0] # start from 0
offsets = onsets + note_dur
# Mark syllables
for i, syl in enumerate(syllables):
rectangle = plt.Rectangle((onsets[i], rec_yloc), note_dur[i], rect_height,
linewidth=1, alpha=0.5, edgecolor='k', facecolor=bout_color[syl])
ax_syl.add_patch(rectangle)
ax_syl.text((onsets[i] + (offsets[i] - onsets[i]) / 2), text_yloc, syl, size=font_size)
ax_syl.axis('off')
# Plot song amplitude
audio.data = stats.zscore(audio.data)
audio.timestamp = audio.timestamp - audio.timestamp[0] - bout_buffer
ax_amp = plt.subplot2grid((nb_row, nb_col), (4, 0), rowspan=2, colspan=1, sharex=ax_spect)
ax_amp.plot(audio.timestamp, audio.data, 'k', lw=0.1)
ax_amp.axis('off')
# Plot rasters
ax_raster = plt.subplot2grid((nb_row, nb_col), (6, 0), rowspan=2, colspan=1, sharex=ax_spect)
# spks2 = spks - start -peth_parm['buffer'] -peth_parm['buffer']
ax_raster.eventplot(spks, colors='k', lineoffsets=0.5,
linelengths=tick_length, linewidths=tick_width, orientation='horizontal')
ax_raster.axis('off')
# Plot raw neural data
nd = NeuralData(path, channel_nb, format, update=update).extract([start, end]) # raw neural data
nd.timestamp = nd.timestamp - nd.timestamp[0] - bout_buffer
ax_nd = plt.subplot2grid((nb_row, nb_col), (8, 0), rowspan=2, colspan=1, sharex=ax_spect)
ax_nd.plot(nd.timestamp, nd.data, 'k', lw=0.5)
# Add a scale bar
plt.plot([ax_nd.get_xlim()[0] + 50, ax_nd.get_xlim()[0] + 50],
[-250, 250], 'k', lw=3) # for amplitude
plt.text(ax_nd.get_xlim()[0] - (bout_buffer / 2), -200, '500 µV', rotation=90)
plt.subplots_adjust(wspace=0, hspace=0)
remove_right_top(ax_nd)
ax_nd.spines['left'].set_visible(False)
plt.yticks([], [])
ax_nd.set_xlabel('Time (ms)')
# Save results
if save_fig:
save_path = save.make_dir(ProjectLoader().path / 'Analysis', 'RasterBouts')
save.save_fig(fig, save_path, fig_name, fig_ext=fig_ext)
else:
plt.show()
print('Done!')
class BaselineInfo(ClusterInfo):
def __init__(self, path, channel_nb, unit_nb, format='rhd', *name, update=False):
super().__init__(path, channel_nb, unit_nb, format, *name, update=False)
from ..analysis.parameters import baseline
from ..utils.functions import find_str
if name:
self.name = name[0]
else:
self.name = str(self.path)
# Load baseline info
file_name = self.path / "BaselineInfo_{}_Cluster{}.npy".format(self.channel_nb, self.unit_nb)
if update or not file_name.exists(): # if .npy doesn't exist or want to update the file
# Store values in here
file_list = []
spk_list = []
nb_spk_list = []
duration_list = []
context_list = []
baseline_info = {}
list_zip = zip(self.files, self.spk_ts, self.file_start, self.onsets, self.offsets, self.syllables,
self.contexts)
for file, spks, file_start, onsets, offsets, syllables, context in list_zip:
bout_ind_list = find_str(syllables, '*')
bout_ind_list.insert(0, -1) # start from the first index
for bout_ind in bout_ind_list:
# print(bout_ind)
if bout_ind == len(syllables) - 1: # skip if * indicates the end syllable
continue
baseline_onset = float(onsets[bout_ind + 1]) - baseline['time_buffer'] - baseline['time_win']
if bout_ind > 0 and baseline_onset < float(offsets[
bout_ind - 1]): # skip if the baseline starts before the offset of the previous syllable
continue
if baseline_onset < file_start:
baseline_onset = file_start
baseline_offset = float(onsets[bout_ind + 1]) - baseline['time_buffer']
if baseline_offset - baseline_onset < 0: # skip if there's not enough baseline period at the start of a file
continue
if baseline_onset > baseline_offset:
print('start time ={} to end time = {}'.format(baseline_onset, baseline_offset))
baseline_spk = spks[np.where((spks >= baseline_onset) & (spks <= baseline_offset))]
file_list.append(file)
spk_list.append(baseline_spk)
nb_spk_list.append(len(baseline_spk))
duration_list.append(
(baseline_offset - baseline_onset)) # convert to seconds for calculating in Hz
context_list.append(context)
baseline_info = {
'files': file_list,
'spk_ts': spk_list,
'nb_spk': nb_spk_list,
'durations': duration_list,
'contexts': context_list,
'parameter': baseline
}
# Save baseline_info as a numpy object
np.save(file_name, baseline_info)
else:
baseline_info = np.load(file_name, allow_pickle=True).item()
# Set the dictionary values to class attributes
for key in baseline_info:
setattr(self, key, baseline_info[key])
def _print_name(self):
print('')
print('Load baseline {self.name}'.format(self=self))
def get_correlogram(self, ref_spk_list, target_spk_list, normalize=False):
"""
Override the parent method
Combine correlogram from undir and dir since no contextual differentiation is needed in baseline
"""
from ..analysis.parameters import spk_corr_parm
correlogram_all = super().get_correlogram(ref_spk_list, target_spk_list, normalize=False)
correlogram = np.zeros(len(spk_corr_parm['time_bin']))
# Combine correlogram from two contexts
for key, value in correlogram_all.items():
if key in ['U', 'D']:
correlogram += value
return correlogram # return class object for further analysis
def get_jittered_corr(self) -> np.ndarray:
"""Get spike correlogram from time-jittered spikes"""
from ..analysis.parameters import corr_shuffle
correlogram_jitter = []
for iter in range(corr_shuffle['shuffle_iter']):
self.jitter_spk_ts(corr_shuffle['shuffle_limit'])
corr_temp = self.get_correlogram(self.spk_ts_jittered, self.spk_ts_jittered)
correlogram_jitter.append(corr_temp)
return np.array(correlogram_jitter)
def get_isi(self):
"""Get inter-spike interval"""
return get_isi(self.spk_ts)
@property
def mean_fr(self):
"""Mean firing rates"""
nb_spk = sum([len(spk_ts) for spk_ts in self.spk_ts])
total_duration = sum(self.durations)
mean_fr = nb_spk / (total_duration / 1E3)
return round(mean_fr, 3)
def __repr__(self): # print attributes
return str([key for key in self.__dict__.keys()])
class AudioData:
"""
Create an object that has concatenated audio signal and its timestamps
Get all data by default; specify time range if needed
"""
def __init__(self, path, format='.wav', update=False):
from ..analysis.load import load_audio
self.path = path
self.format = format
file_name = self.path / "AudioData.npy"
if update or not file_name.exists(): # if .npy doesn't exist or want to update the file
audio_info = load_audio(self.path, self.format)
else:
audio_info = np.load(file_name, allow_pickle=True).item()
# Set the dictionary values to class attributes
for key in audio_info:
setattr(self, key, audio_info[key])
def __repr__(self): # print attributes
return str([key for key in self.__dict__.keys()])
@property
def open_folder(self):
"""Open the data folder"""
from ..utils.functions import open_folder
open_folder(self.path)
def extract(self, time_range: list):
"""
Extracts data from the specified range
Parameters
----------
time_range : list
"""
start = time_range[0]
end = time_range[-1]
ind = np.where((self.timestamp >= start) & (self.timestamp <= end))
return self.timestamp[ind], self.data[ind]
def spectrogram(self, timestamp, data, freq_range=[300, 8000]):
"""Calculate spectrogram"""
from ..utils.spect import spectrogram
spect, spect_freq, _ = spectrogram(data, self.sample_rate, freq_range=freq_range)
spect_time = np.linspace(timestamp[0], timestamp[-1], spect.shape[1]) # timestamp for spectrogram
return spect_time, spect, spect_freq
def get_spectral_entropy(self, spect, normalize=True, mode=None):
"""
Calculate spectral entropy
Parameters
----------
normalize : bool
Get normalized spectral entropy
mode : {'spectral', ''spectro_temporal'}
Returns
-------
array of spectral entropy
"""
from ..analysis.functions import get_spectral_entropy
return get_spectral_entropy(spect, normalize=normalize, mode=mode)
class NeuralData:
def __init__(self, path, channel_nb, format='rhd', update=False):
self.path = path
self.channel_nb = str(channel_nb).zfill(2)
self.format = format # format of the file (e.g., rhd), this info should be in the database
file_name = self.path / f"NeuralData_Ch{self.channel_nb}.npy"
if update or not file_name.exists(): # if .npy doesn't exist or want to update the file
data_info = self.load_neural_data()
# Save event_info as a numpy object
else:
data_info = np.load(file_name, allow_pickle=True).item()
# Set the dictionary values to class attributes
for key in data_info:
setattr(self, key, data_info[key])
def __repr__(self): # print attributes
return str([key for key in self.__dict__.keys()])
def load_neural_data(self):
"""
Load and concatenate all neural data files (e.g., .rhd) in the input dir (path)
"""
from ..analysis.load import read_rhd
from ..analysis.parameters import sample_rate
print("")
print("Load neural data")
# List .rhd files
files = list(self.path.glob(f'*.{self.format}'))
# Initialize
timestamp_concat = np.array([], dtype=np.float64)
amplifier_data_concat = np.array([], dtype=np.float64)
# Store values in these lists
file_list = []
if self.format == 'cbin':
# if the neural data is in .cbin format, read from .mat files that has contains concatenated data
# currently does not have files to extract data from .cbin files in python
import scipy.io
mat_file = list(self.path.glob(f'*Ch{self.channel_nb}(merged).mat'))[0]
timestamp_concat = scipy.io.loadmat(mat_file)['t_amplifier'][0].astype(np.float64)
amplifier_data_concat = scipy.io.loadmat(mat_file)['amplifier_data'][0].astype(np.float64)
else:
# Loop through Intan .rhd files
for file in files:
# Load data file
print('Loading... ' + file.stem)
file_list.append(file.name)
intan = read_rhd(file) # note that the timestamp is in second
# Concatenate timestamps
intan['t_amplifier'] -= intan['t_amplifier'][0] # start from t = 0
if timestamp_concat.size == 0:
timestamp_concat = np.append(timestamp_concat, intan['t_amplifier'])
else:
intan['t_amplifier'] += (timestamp_concat[-1] + (1 / sample_rate[self.format]))
timestamp_concat = np.append(timestamp_concat, intan['t_amplifier'])
# Concatenate neural data
for ind, ch in enumerate(intan['amplifier_channels']):
if int(self.channel_nb) == int(ch['native_channel_name'][-2:]):
amplifier_data_concat = np.append(amplifier_data_concat, intan['amplifier_data'][ind, :])
timestamp_concat *= 1E3 # convert to microsecond
# Organize data into a dictionary
data_info = {
'files': file_list,
'timestamp': timestamp_concat,
'data': amplifier_data_concat,
'sample_rate': sample_rate[self.format]
}
file_name = self.path / f"NeuralData_Ch{self.channel_nb}.npy"
np.save(file_name, data_info)
return data_info
def extract(self, time_range: list):
"""
Extracts data from the specified range
Parameters
----------
time_range : list
list of time stamps [start, end]
Returns
-------
timestamp : arr
data : arr
"""
start = time_range[0]
end = time_range[-1]
ind = np.where((self.timestamp >= start) & (self.timestamp <= end))
return self.timestamp[ind], self.data[ind]
@property
def open_folder(self):
"""Open the data folder"""
from ..utils.functions import open_folder
open_folder(self.path)
class Correlogram():
"""
Class for correlogram analysis
"""
def __init__(self, correlogram):
from ..analysis.parameters import spk_corr_parm, burst_hz
corr_center = round(correlogram.shape[0] / 2) + 1 # center of the correlogram
self.data = correlogram
self.time_bin = np.arange(-spk_corr_parm['lag'],
spk_corr_parm['lag'] + spk_corr_parm['bin_size'],
spk_corr_parm['bin_size'])
if self.data.sum():
self.peak_ind = np.min(
np.abs(np.argwhere(correlogram == np.amax(correlogram)) - corr_center)) + corr_center # index of the peak
self.peak_latency = self.time_bin[self.peak_ind] - 1
self.peak_value = self.data[self.peak_ind]
burst_range = np.arange(corr_center - (1000 / burst_hz) - 1, corr_center + (1000 / burst_hz),
dtype='int') # burst range in the correlogram
self.burst_index = round(self.data[burst_range].sum() / self.data.sum(), 3)
else:
self.peak_ind = self.peak_latency = self.peak_value = self.burst_index = np.nan
def __repr__(self): # print attributes
return str([key for key in self.__dict__.keys()])
def category(self, correlogram_jitter: np.ndarray) -> str:
"""
Get bursting category of a neuron based on autocorrelogram
Parameters
----------
correlogram_jitter : np.ndarray
Random time-jittered correlogram for baseline setting
Returns
-------
Category of a neuron ('Bursting' or 'Nonbursting')
"""
from ..analysis.parameters import corr_burst_crit
corr_mean = correlogram_jitter.mean(axis=0)
if corr_mean.sum():
corr_std = correlogram_jitter.std(axis=0)
upper_lim = corr_mean + (corr_std * 2)
lower_lim = corr_mean - (corr_std * 2)
self.baseline = upper_lim
# Check peak significance
if self.peak_value > upper_lim[self.peak_ind] and self.peak_latency <= corr_burst_crit:
self.category = 'Bursting'
else:
self.category = 'NonBursting'
else:
self.baseline = self.category = np.array(np.nan)
return self.category
def plot_corr(self, ax, time_bin, correlogram,
title, xlabel=None, ylabel=None,
font_size=10,
peak_line_width=0.8,
normalize=False,
peak_line=True,
baseline=True):
"""
Plot correlogram
Parameters
----------
ax : axis object
axis to plot the figure
time_bin : np.ndarray
correlogram : np.ndarray
title : str
font_size : int
title font size
normalize : bool
normalize the correlogram
"""
import matplotlib.pyplot as plt
from ..utils.draw import remove_right_top
from ..utils.functions import myround
if correlogram.sum():
ax.bar(time_bin, correlogram, color='k', rasterized=True)
ymax = max([self.baseline.max(), correlogram.max()])
round(ymax / 10) * 10
ax.set_ylim(0, ymax)
plt.yticks([0, ax.get_ylim()[1]], [str(0), str(int(ymax))])
ax.set_title(title, size=font_size)
ax.set_xlabel(xlabel)
if normalize:
ax.set_ylabel(ylabel)
else:
ax.set_ylabel(ylabel)
remove_right_top(ax)
if peak_line and not np.isnan(self.peak_ind):
# peak_time_ind = np.where(self.time_bin == self.peak_latency)
ax.axvline(x=self.time_bin[self.peak_ind], color='r', linewidth=peak_line_width, ls='--')
if baseline and not np.isnan(self.baseline.mean()):
ax.plot(self.time_bin, self.baseline, 'm', lw=0.5, ls='--')
else:
ax.axis('off')
ax.set_title(title, size=font_size)
class BurstingInfo:
def __init__(self, ClassInfo, *input_context):
from ..analysis.parameters import burst_hz
# ClassInfo can be BaselineInfo, MotifInfo etc
if input_context: # select data based on social context
spk_list = [spk_ts for spk_ts, context in zip(ClassInfo.spk_ts, ClassInfo.contexts) if
context == input_context[0]]
duration_list = [duration for duration, context in zip(ClassInfo.durations, ClassInfo.contexts) if
context == input_context[0]]
self.context = input_context
else:
spk_list = ClassInfo.spk_ts
duration_list = ClassInfo.durations
# Bursting analysis
burst_spk_list = []
burst_duration_arr = []
nb_bursts = []
nb_burst_spk_list = []
for ind, spks in enumerate(spk_list):
# spk = bi.spk_ts[8]
isi = np.diff(spks) # inter-spike interval
inst_fr = 1E3 / np.diff(spks) # instantaneous firing rates (Hz)
bursts = np.where(inst_fr >= burst_hz)[0] # burst index
# Skip if no bursting detected
if not bursts.size:
continue
# Get the number of bursts
temp = | np.diff(bursts) | numpy.diff |
# -*- coding: utf-8 -*-
# """Tests for functions in sji.py"""
import datetime
import pytest
import numpy as np
from astropy import units as u
from ndcube.utils.wcs import WCS
from irispy import iris_tools
from irispy.sji import IRISMapCube, IRISMapCubeSequence
# Sample data for IRISMapCube tests
data = np.array([[[1, 2, 3, 4], [2, 4, 5, 3], [0, 1, 2, 3]],
[[2, 4, 5, 1], [10, 5, 2, 2], [10, 3, 3, 0]]])
data_2D = np.array([[1, 2, 3, 4], [2, 4, 5, 3]])
data_1D = np.array([1, 2])
data_4D = np.array([[[[1, 2, 3, 4], [2, 4, 5, 3], [0, 1, 2, 3]],
[[2, 4, 5, 1], [10, 5, 2, 2], [10, 3, 3, 0]]],
[[[1, 2, 3, 4], [2, 4, 5, 3], [0, 1, 2, 3]],
[[2, 4, 5, 1], [10, 5, 2, 2], [10, 3, 3, 0]]]])
header = {'CTYPE1': 'HPLN-TAN', 'CUNIT1': 'arcsec', 'CDELT1': 0.4,
'CRPIX1': 0, 'CRVAL1': 0, 'NAXIS1': 4,
'CTYPE2': 'HPLT-TAN', 'CUNIT2': 'arcsec', 'CDELT2': 0.5,
'CRPIX2': 0, 'CRVAL2': 0, 'NAXIS2': 3,
'CTYPE3': 'Time ', 'CUNIT3': 'seconds', 'CDELT3': 0.3,
'CRPIX3': 0, 'CRVAL3': 0, 'NAXIS3': 2}
wcs = WCS(header=header, naxis=3)
header_2D = {'CTYPE1': 'Time ', 'CUNIT1': 'seconds', 'CDELT1': 0.4,
'CRPIX1': 0, 'CRVAL1': 0, 'NAXIS1': 4,
'CTYPE2': 'HPLT-TAN', 'CUNIT2': 'arcsec', 'CDELT2': 0.5,
'CRPIX2': 0, 'CRVAL2': 0, 'NAXIS2': 2}
wcs_2D = WCS(header=header_2D, naxis=2)
header_4D = {'CTYPE1': 'Time ', 'CUNIT1': 'seconds', 'CDELT1': 0.4,
'CRPIX1': 0, 'CRVAL1': 0, 'NAXIS1': 4,
'CTYPE2': 'HPLT-TAN', 'CUNIT2': 'arcsec', 'CDELT2': 0.5,
'CRPIX2': 0, 'CRVAL2': 0, 'NAXIS2': 3,
'CTYPE3': 'Time ', 'CUNIT3': 'seconds', 'CDELT3': 0.4,
'CRPIX3': 0, 'CRVAL3': 0, 'NAXIS3': 2,
'CTYPE4': 'HPLN-TAN', 'CUNIT4': 'arcsec', 'CDELT4': 0.5,
'CRPIX4': 0, 'CRVAL4': 0, 'NAXIS4': 2}
wcs_4D = WCS(header=header_4D, naxis=4)
header_1D = {'CTYPE1': 'Time ', 'CUNIT1': 'seconds', 'CDELT1': 0.4,
'CRPIX1': 0, 'CRVAL1': 0, 'NAXIS1': 2}
wcs_1D = WCS(header=header_1D, naxis=1)
unit = iris_tools.DN_UNIT["SJI"]
mask_cube = data >= 0
mask_4D = data_4D >= 0
uncertainty = np.sqrt(data)
uncertainty_2D = np.sqrt(data_2D)
uncertainty_1D = np.sqrt(data_1D)
uncertainty_4D = | np.sqrt(data_4D) | numpy.sqrt |
import numpy as np
from PIL import Image, ImageDraw, ImageFont, ImageMath
from pyray.shapes.solid.polyhedron import *
from pyray.axes import *
from pyray.rotation import *
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
#############################################################################
## Scene 1 - Platonic solids popping up.
basedir = '..\\images\\RotatingCube\\'
txt = "This is a Tetrahedron"
tt = Tetartoid(0.33,0)
for i in range(0, 31):
r = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*i/30)
#r = rotation(3,np.pi/15*i)
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
tt.render_solid_planes(draw, r, shift=np.array([1000, 1000, 0]), scale=150*i/10.0)
writeStaggeredText(txt, draw, i, speed=2)
im.save(basedir + "im" + str(i) + ".png")
#############################################################################
## Scene 2 - It has 12 symmetries.
basedir = '..\\images\\RotatingCube\\'
txt = "It can be slowly converted to this solid"
for i in range(0, 31):
tt = Tetartoid(0.4,0.1*i/30)
r = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*i/30)
#r = rotation(3,np.pi/15*i)
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
tt.render_solid_planes(draw, r, shift=np.array([1000, 1000, 0]), scale=450*(1+i/60))
writeStaggeredText(txt, draw, i, speed=2)
im.save(basedir + "im" + str(i) + ".png")
#############################################################################
## Scene a - Step by step tetartoid face
## Confirms that the tetartoid face given by wikipedia traces out a pentagon.
basedir = '..\\images\\RotatingCube\\'
pts = tetartoid_face(1,2,3)
w=10
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
for i in range(5):
j1 = pts[i]
j = j1*30+1000
draw.ellipse((j[0]-w,j[1]-w,j[0]+w,j[1]+w),fill=(255,255,255))
render_solid_planes(faces,draw,r,scale=150)
im.save(basedir + "im" + str(i) + ".png")
#############################################################################
## Scene b - Draws out tetartoid faces via tetrahedral rotations.
## Confirms that once we draw a face, we can rotate it by the tetrahedral
## rotation group to form a tetartoid.
basedir = '..\\images\\RotatingCube\\'
pts = tetartoid_face(1,2,3)
rots = tetrahedral_rotations()
r = general_rotation(np.array([0.5,0.5,0.5]),2*np.pi*10/30)
w=10
faces = []
faces.append(pts)
for i in range(len(rots)):
faces.append(np.dot(pts,rots[i]))
#render_solid_planes(faces,draw,r,scale=150)
#im.save(basedir + "im" + str(i) + ".png")
for i in range(31):
im = Image.new("RGB", (2048, 2048), (1,1,1))
draw = ImageDraw.Draw(im,'RGBA')
r = general_rotation( | np.array([0.5,0.5,0.5]) | numpy.array |
##### May 6, 2018 #####
# The goal of this effort is to re-generalize things. In particular, I want only one function, that will generate x disks, based on x scattering phase functions.
# Note on 2021.06.30: Download by <NAME> from https://github.com/maxwellmb/anadisk_model/blob/master/anadisk_model/anadisk_sum_mask.py
# Analytic disk model
# This version is being developed to create a version that is 3D and can be summed along the line of sight instead of intergrated.
import matplotlib.pyplot as plt
import numpy as np
import math as mt
from datetime import datetime
from numba import jit
from numba import vectorize,float64
from scipy.interpolate import interp1d
import scipy.ndimage.filters as snf
import copy
###################################################################################
####################### Some Built-In Scattering Functions ########################
###################################################################################
@jit
def hgg_phase_function(phi,g):
#Inputs:
# g - the g
# phi - the scattering angle in radians
g = g[0]
cos_phi = np.cos(phi)
g2p1 = g**2 + 1
gg = 2*g
k = 1./(4*np.pi)*(1-g*g)
return k/(g2p1 - (gg*cos_phi))**1.5
# This function will accept a vector of scattering angles, and a vector of scattering efficiencies
# and then compute a cubic spline that fits through them all.
@jit
def phase_function_spline(angles, efficiency):
#Input arguments:
# angles - in radians
# efficiencies - from 0 to 1
return interp1d(angles, efficiency, kind='cubic')
@jit
def rayleigh(phi, args):
#Using Rayleigh scattering (e.g. Eq 9 in Graham2007+)
pmax = args[0]
return pmax*np.sin(phi)**2/(1+np.cos(phi)**2)
@jit
def modified_rayleigh(phi, args):
#Using a Rayleigh scattering function where the peak is shifted by args[1]
pmax = args[0] #The maximum scattering phase function
return pmax*np.sin(phi-np.pi/2+args[1])**2/(1+np.cos(phi-np.pi/2+args[1])**2)
##########################################################################################
############ Gen disk and integrand for a 1 scattering function disk #####################
##########################################################################################
@jit
def calculate_disk(xci,zpsi_dx,yy_dy2,x2,z2,x,zpci,xsi,a_r,R1, Rc, R2, beta_in, beta_out,scattering_function_list):
'''
# compute the brightness in each pixel
# see analytic-disk.nb - originally by <NAME>
'''
#The 'x' distance in the disk plane
xx=(xci + zpsi_dx)
#Distance in the disk plane
# d1 = np.sqrt(yy_dy2 + np.square(xx))
# d1 = np.sqrt(yy_dy2 + xx*xx)
d1_2 = yy_dy2 + xx*xx
d1 = np.sqrt(d1_2)
#Total distance from the center
d2 = x2 + yy_dy2 + z2
#The line of sight scattering angle
cos_phi=x/np.sqrt(d2)
phi = np.arccos(cos_phi)
#The scale height exponent
zz = (zpci - xsi)
# hh = (a_r*d1)
# expo = np.square(zz)/np.square(hh)
# expo = (zz*zz)/(hh*hh)
expo = (zz*zz)/(d1_2)
# expo = zz/hh
#The power law here has been moved from previous versions of anadisk so that we only calculate it once
# int2 = np.exp(0.5*expo) / np.power((R1/d1),beta)
# int1 = np.piecewise(d1,[(d1 < R1),(d1 >=R1),(d1 > R2)],[(R1/d1)**-7,(R1/d1)**beta, 0.])
# int1 = (R1/d1)**beta
# int1 = np.piecewise(d1,[ d1 < R1, d1 >=R1, d1 > R2],[lambda d1:(R1/d1)**(-7.5),lambda d1:(R1/d1)**beta, 0.])
# 3 lines below commented out by <NAME>
int1 = (R1/d1)**beta_in
int1[d1 < R1] = 0.
int1[d1 > R2] = 0.
#Get rid of some problematic pixels
d2_no = d2 == 0.
int1[d2_no] = 0.
int2 = np.exp( (0.5/a_r**2)*expo) / int1
int3 = int2 * d2
#This version is faster than the integration version because of the vectorized nature of the
# #scattering functions.
# if sf1_args is not None:
# sf1 = scattering_function1(phi,sf1_args)
# else:
# sf1 = scattering_function1(phi)
# if sf2_args is not None:
# sf2 = scattering_function2(phi,sf2_args)
# else:
# sf2 = scattering_function2(phi)
out = []
for scattering_function in scattering_function_list:
sf = scattering_function(phi)
out.append(sf/int3)
out = np.array(out)
# print(out.shape)
# out = np.rollaxis(np.array(out),0,5)
return out.T
@jit
def generate_disk(scattering_function_list, scattering_function_args_list=None,
R1=74.42, Rc = 80, R2=82.45, beta_in=-7.5,beta_out=1.0, aspect_ratio=0.1, inc=76.49, pa=30, distance=72.8,
psfcenx=140,psfceny=140, sampling=1, mask=None, dx=0, dy=0., los_factor = 4, dim = 281.,pixscale=0.01414):
'''
Keyword Arguments:
pixscale - The pixelscale to be used in "/pixel. Defaults to GPI's pixel scale (0.01414)
dim - The final image will be dim/sampling x dim/sampling pixels. Defaults to GPI datacube size.
'''
#The number of input scattering phase functions and hence the number of disks to generate
n_sf = len(scattering_function_list)
###########################################
### Setup the initial coordinate system ###
###########################################
npts=int(np.floor(dim/sampling)) #The number of pixels to use in the final image directions
npts_los = int(los_factor*npts) #The number of points along the line of sight
factor = (pixscale*distance)*sampling # A multiplicative factor determined by the sampling.
# In all of the following we only want to do calculations in part of the non-masked part of the array
# So we need to replicate the mask along the line of sight.
if mask is not None:
mask = np.dstack([~mask]*npts_los)
else:
mask = np.ones([npts,npts])
mask = np.dstack([~mask]*npts_los)
#Set up the coordiname arrays
#The coordinate system here [x,y,z] is defined :
# +ve x is the line of sight
# +ve y is going right from the center
# +ve z is going up from the center
z,y,x = np.indices([npts,npts,npts_los])
#Center the line-of-sight coordinates on the disk center.
## THIS WAY DOESN'T WORK. IT CREATES INCONCISTENT RESULTS.
# x[mask] = x[mask]/(npts_los/(2*R2)) - R2 #We only need to calculate this where things aren't masked.
#THIS WAY IS A BIT SLOWER, BUT IT WORKS.
#Here we'll try just a set pixel scale equal to the y/z pixel scale divided by the los_factor
x = x.astype('float')
x[mask] = x[mask] - npts_los/2. #We only need to calculate this where things aren't masked.
x[mask] *=factor/los_factor
#Setting up the output array
threeD_disk = np.zeros([npts,npts,npts_los,n_sf]) + np.nan
#####################################
### Set up the coordinate system ####
#####################################
#Inclination Calculations
incl = np.radians(90-inc)
ci = mt.cos(incl) #Cosine of inclination
si = mt.sin(incl) #Sine of inclination
# x*cosine i and x*sin i
xci = x[mask] * ci
xsi = x[mask] * si
#Position angle calculations
pa_rad=np.radians(90-pa) #The position angle in radians
cos_pa=mt.cos(pa_rad) #Calculate these ahead of time
sin_pa=mt.sin(pa_rad)
a_r=aspect_ratio
# Rotate the coordinates in the image frame for the position angle
# yy=y[mask]*(cos_pa*factor) - z[mask] * (sin_pa*factor) - ((cos_pa*npts/2*factor)-sin_pa*npts/2*factor) #Rotate the y coordinate by the PA
# zz=y[mask]*(sin_pa*factor) + z[mask] * (cos_pa*factor) - ((cos_pa*npts/2*factor)+sin_pa*npts/2*factor) #Rotate the z coordinate by the PA
yy=y[mask]*(cos_pa*factor) - z[mask] * (sin_pa*factor) - ((cos_pa*psfcenx*factor)-sin_pa*psfceny*factor) #Rotate the y coordinate by the PA
zz=y[mask]*(sin_pa*factor) + z[mask] * (cos_pa*factor) - ((cos_pa*psfceny*factor)+sin_pa*psfcenx*factor) #Rotate the z coordinate by the PA
#The distance from the center in each coordiate squared
y2 = np.square(yy)
z2 = np.square(zz)
x2 = np.square(x[mask])
#This rotates the coordinates in and out of the sky
zpci=zz*ci #Rotate the z coordinate by the inclination.
zpsi=zz*si
#Subtract the stellocentric offset
zpsi_dx = zpsi - dx
yy_dy = yy - dy
#The distance from the stellocentric offset squared
yy_dy2=np.square(yy_dy)
# ########################################################
# ### Now calculate the actual brightness in each bin ####
# ########################################################
threeD_disk[:,:,:][mask] = calculate_disk(xci,zpsi_dx,yy_dy2,x2,z2,x[mask],zpci,xsi,aspect_ratio,R1, Rc, R2,beta_in,beta_out,scattering_function_list)
return np.sum(threeD_disk,axis=2)
########################################################################################
########################################################################################
########################################################################################
if __name__ == "__main__":
sampling = 1
#With two HG functions
# sf1 = hgg_phase_function
# sf1_args = [0.8]
# sf2 = hgg_phase_function
# sf2_args = [0.3]
# im = gen_disk_dxdy_2disk(sf1, sf2,sf1_args=sf1_args, sf2_args=sf2_args, sampling=2)
#With splines fit to HG function + rayleigh
n_points = 20
angles = np.linspace(0,np.pi,n_points)
g = 0.8
pmax = 0.3
hg = hgg_phase_function(angles,[g])
f = phase_function_spline(angles,hg)
pol = hg*rayleigh(angles, [pmax])
f_pol = phase_function_spline(angles,pol)
y,x = np.indices([281,281])
rads = | np.sqrt((x-140)**2+(y-140)**2) | numpy.sqrt |
import numpy as np
import os
import re
import requests
import sys
import time
from netCDF4 import Dataset
import pandas as pd
from bs4 import BeautifulSoup
from tqdm import tqdm
# setup constants used to access the data from the different M2M interfaces
BASE_URL = 'https://ooinet.oceanobservatories.org/api/m2m/' # base M2M URL
SENSOR_URL = '12576/sensor/inv/' # Sensor Information
# setup access credentials
AUTH = ['OOIAPI-853A3LA6QI3L62', '<KEY>']
def M2M_Call(uframe_dataset_name, start_date, end_date):
options = '?beginDT=' + start_date + '&endDT=' + end_date + '&format=application/netcdf'
r = requests.get(BASE_URL + SENSOR_URL + uframe_dataset_name + options, auth=(AUTH[0], AUTH[1]))
if r.status_code == requests.codes.ok:
data = r.json()
else:
return None
# wait until the request is completed
print('Waiting for OOINet to process and prepare data request, this may take up to 20 minutes')
url = [url for url in data['allURLs'] if re.match(r'.*async_results.*', url)][0]
check_complete = url + '/status.txt'
with tqdm(total=400, desc='Waiting') as bar:
for i in range(400):
r = requests.get(check_complete)
bar.update(1)
if r.status_code == requests.codes.ok:
bar.n = 400
bar.last_print_n = 400
bar.refresh()
print('\nrequest completed in %f minutes.' % elapsed)
break
else:
time.sleep(3)
elapsed = (i * 3) / 60
return data
def M2M_Files(data, tag=''):
"""
Use a regex tag combined with the results of the M2M data request to collect the data from the THREDDS catalog.
Collected data is gathered into an xarray dataset for further processing.
:param data: JSON object returned from M2M data request with details on where the data is to be found for download
:param tag: regex tag to use in discriminating the data files, so we only collect the correct ones
:return: the collected data as an xarray dataset
"""
# Create a list of the files from the request above using a simple regex as a tag to discriminate the files
url = [url for url in data['allURLs'] if re.match(r'.*thredds.*', url)][0]
files = list_files(url, tag)
return files
def list_files(url, tag=''):
"""
Function to create a list of the NetCDF data files in the THREDDS catalog created by a request to the M2M system.
:param url: URL to user's THREDDS catalog specific to a data request
:param tag: regex pattern used to distinguish files of interest
:return: list of files in the catalog with the URL path set relative to the catalog
"""
page = requests.get(url).text
soup = BeautifulSoup(page, 'html.parser')
pattern = re.compile(tag)
return [node.get('href') for node in soup.find_all('a', text=pattern)]
def M2M_Data(nclist,variables):
thredds = 'https://opendap.oceanobservatories.org/thredds/dodsC/ooi/'
#nclist is going to contain more than one url eventually
for jj in range(len(nclist)):
url=nclist[jj]
url=url[25:]
dap_url = thredds + url + '#fillmismatch'
openFile = Dataset(dap_url,'r')
for ii in range(len(variables)):
dum = openFile.variables[variables[ii].name]
variables[ii].data = np.append(variables[ii].data, dum[:].data)
tmp = variables[0].data/60/60/24
time_converted = pd.to_datetime(tmp, unit='D', origin=pd.Timestamp('1900-01-01'))
return variables, time_converted
class var(object):
def __init__(self):
"""A Class that generically holds data with a variable name
and the units as attributes"""
self.name = ''
self.data = np.array([])
self.units = ''
def __repr__(self):
return_str = "name: " + self.name + '\n'
return_str += "units: " + self.units + '\n'
return_str += "data: size: " + str(self.data.shape)
return return_str
class structtype(object):
def __init__(self):
""" A class that imitates a Matlab structure type
"""
self._data = []
def __getitem__(self, index):
"""implement index behavior in the struct"""
if index == len(self._data):
self._data.append(var())
return self._data[index]
def __len__(self):
return len(self._data)
def M2M_URLs(platform_name,node,instrument_class,method):
var_list = structtype()
#MOPAK
if platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/SBD17/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD11/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD11/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/SBD17/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD11/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD11/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSPM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSPM/SBS01/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#METBK
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD11/06-METBKA000/telemetered/metbk_a_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD11/06-METBKA000/telemetered/metbk_a_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD11/06-METBKA000/telemetered/metbk_a_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD11/06-METBKA000/telemetered/metbk_a_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
#FLORT
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/02-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/SBD17/06-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/02-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/SBD17/06-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID27/02-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID27/02-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID27/02-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID27/02-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSPM/WFP01/04-FLORTK000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
#FDCHP
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'FDCHP' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD12/08-FDCHPA000/telemetered/fdchp_a_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#DOSTA
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/03-DOSTAD000/telemetered/dosta_abcdjm_ctdbp_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID27/04-DOSTAD000/telemetered/dosta_abcdjm_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID27/04-DOSTAD000/telemetered/dosta_abcdjm_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/03-DOSTAD000/telemetered/dosta_abcdjm_ctdbp_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID27/04-DOSTAD000/telemetered/dosta_abcdjm_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID27/04-DOSTAD000/telemetered/dosta_abcdjm_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD37/03-DOSTAD000/telemetered/dosta_abcdjm_ctdbp_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD37/03-DOSTAD000/telemetered/dosta_abcdjm_ctdbp_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD37/03-DOSTAD000/telemetered/dosta_abcdjm_ctdbp_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD37/03-DOSTAD000/telemetered/dosta_abcdjm_ctdbp_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSPM/WFP01/02-DOFSTK000/telemetered/dofst_k_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dofst_k_oxygen_l2'
var_list[2].name = 'dofst_k_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'Hz'
var_list[3].units = 'dbar'
#ADCP
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID26/01-ADCPTA000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID26/01-ADCPTC000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID26/01-ADCPTA000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID26/01-ADCPTC000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD35/04-ADCPTM000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD35/04-ADCPTM000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD35/04-ADCPTC000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD35/04-ADCPSJ000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
#ZPLSC
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD37/07-ZPLSCC000/telemetered/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD37/07-ZPLSCC000/telemetered/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD37/07-ZPLSCC000/telemetered/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD37/07-ZPLSCC000/telemetered/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/MFD37/07-ZPLSCC000/recovered_host/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/MFD37/07-ZPLSCC000/recovered_host/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/MFD37/07-ZPLSCC000/recovered_host/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/MFD37/07-ZPLSCC000/recovered_host/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#WAVSS
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_statistics'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_statistics'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_statistics'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_statistics'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
#VELPT
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/SBD17/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD11/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD11/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/SBD17/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD11/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD11/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID26/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID26/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID26/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID26/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
#PCO2W
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'PCO2W' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/05-PCO2WB000/telemetered/pco2w_abc_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD35/05-PCO2WB000/telemetered/pco2w_abc_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'PCO2W' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/05-PCO2WB000/telemetered/pco2w_abc_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD35/05-PCO2WB000/telemetered/pco2w_abc_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD35/05-PCO2WB000/telemetered/pco2w_abc_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD35/05-PCO2WB000/telemetered/pco2w_abc_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
#PHSEN
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID26/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID26/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID26/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID26/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD35/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD35/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD35/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD35/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
#SPKIR
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/08-SPKIRB000/telemetered/spkir_abj_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID26/08-SPKIRB000/telemetered/spkir_abj_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID26/08-SPKIRB000/telemetered/spkir_abj_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/08-SPKIRB000/telemetered/spkir_abj_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID26/08-SPKIRB000/telemetered/spkir_abj_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID26/08-SPKIRB000/telemetered/spkir_abj_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
#PRESF
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD35/02-PRESFA000/telemetered/presf_abc_dcl_tide_measurement'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD35/02-PRESFA000/telemetered/presf_abc_dcl_tide_measurement'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD35/02-PRESFB000/telemetered/presf_abc_dcl_tide_measurement'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD35/02-PRESFC000/telemetered/presf_abc_dcl_tide_measurement'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
#CTDBP
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD37/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/SBD17/06-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD37/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/SBD17/06-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID27/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID27/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID27/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID27/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD37/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD37/03-CTDBPE000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
#VEL3D
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD35/01-VEL3DD000/telemetered/vel3d_cd_dcl_velocity_data'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD35/01-VEL3DD000/telemetered/vel3d_cd_dcl_velocity_data'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD35/01-VEL3DD000/telemetered/vel3d_cd_dcl_velocity_data'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD35/01-VEL3DD000/telemetered/vel3d_cd_dcl_velocity_data'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
#VEL3DK
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'VEL3D' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSPM/WFP01/01-VEL3DK000/telemetered/vel3d_k_wfp_stc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_k_eastward_velocity'
var_list[2].name = 'vel3d_k_northward_velocity'
var_list[3].name = 'vel3d_k_upward_velocity'
var_list[4].name = 'vel3d_k_heading'
var_list[5].name = 'vel3d_k_pitch'
var_list[6].name = 'vel3d_k_roll'
var_list[7].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'ddegrees'
var_list[5].units = 'ddegrees'
var_list[6].units = 'ddegrees'
var_list[7].units = 'dbar'
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSPM/WFP01/03-CTDPFK000/telemetered/ctdpf_ckl_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'ctdpf_ckl_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdpf_ckl_seawater_pressure'
var_list[5].name = 'ctdpf_ckl_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
#PCO2A
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD12/04-PCO2AA000/telemetered/pco2a_a_dcl_instrument_water'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD12/04-PCO2AA000/telemetered/pco2a_a_dcl_instrument_water'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD12/04-PCO2AA000/telemetered/pco2a_a_dcl_instrument_water'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD12/04-PCO2AA000/telemetered/pco2a_a_dcl_instrument_water'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
#PARAD
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSPM/WFP01/05-PARADK000/telemetered/parad_k__stc_imodem_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_k_par'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
#OPTAA
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID27/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID27/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID27/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID27/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD37/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD37/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD37/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD37/01-OPTAAC000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#NUTNR
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/07-NUTNRB000/telemetered/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID26/07-NUTNRB000/telemetered/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID26/07-NUTNRB000/telemetered/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/07-NUTNRB000/telemetered/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID26/07-NUTNRB000/telemetered/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID26/07-NUTNRB000/telemetered/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
##
#MOPAK
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/SBD17/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD11/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD11/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/SBD17/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD11/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD11/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSPM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSPM/SBS01/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#METBK
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD11/06-METBKA000/recovered_host/metbk_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD11/06-METBKA000/recovered_host/metbk_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD11/06-METBKA000/recovered_host/metbk_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD11/06-METBKA000/recovered_host/metbk_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
#FLORT
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/RID16/02-FLORTD000/recovered_host/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/SBD17/06-FLORTD000/recovered_host/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/RID16/02-FLORTD000/recovered_host/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/SBD17/06-FLORTD000/recovered_host/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/RID27/02-FLORTD000/recovered_host/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/RID27/02-FLORTD000/recovered_host/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/RID27/02-FLORTD000/recovered_host/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/RID27/02-FLORTD000/recovered_host/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
#FDCHP
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'FDCHP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD12/08-FDCHPA000/recovered_host/fdchp_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#DOSTA
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/RID16/03-DOSTAD000/recovered_host/dosta_abcdjm_ctdbp_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/RID27/04-DOSTAD000/recovered_host/dosta_abcdjm_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/RID27/04-DOSTAD000/recovered_host/dosta_abcdjm_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/RID16/03-DOSTAD000/recovered_host/dosta_abcdjm_ctdbp_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/RID27/04-DOSTAD000/recovered_host/dosta_abcdjm_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/RID27/04-DOSTAD000/recovered_host/dosta_abcdjm_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/MFD37/03-DOSTAD000/recovered_host/dosta_abcdjm_ctdbp_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/MFD37/03-DOSTAD000/recovered_host/dosta_abcdjm_ctdbp_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/MFD37/03-DOSTAD000/recovered_host/dosta_abcdjm_ctdbp_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/MFD37/03-DOSTAD000/recovered_host/dosta_abcdjm_ctdbp_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
#ADCP
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/RID26/01-ADCPTA000/recovered_host/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/RID26/01-ADCPTC000/recovered_host/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/RID26/01-ADCPTA000/recovered_host/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/RID26/01-ADCPTC000/recovered_host/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/MFD35/04-ADCPTM000/recovered_host/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/MFD35/04-ADCPTM000/recovered_host/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/MFD35/04-ADCPTC000/recovered_host/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/MFD35/04-ADCPSJ000/recovered_host/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
#WAVSS
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_statistics_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_statistics_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_statistics_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_statistics_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
#VELPT
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/SBD17/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD11/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD11/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredHost':
#uframe_dataset_name = 'CE06ISSM/RID16/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
uframe_dataset_name = 'CE06ISSM/RID16/04-VELPTA000/recovered_host/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD11/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD11/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/RID16/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/RID26/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/RID26/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/RID16/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/RID26/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/RID26/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
#PCO2W
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'PCO2W' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/RID16/05-PCO2WB000/recovered_host/pco2w_abc_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/MFD35/05-PCO2WB000/recovered_host/pco2w_abc_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'PCO2W' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/RID16/05-PCO2WB000/recovered_host/pco2w_abc_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/MFD35/05-PCO2WB000/recovered_host/pco2w_abc_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/MFD35/05-PCO2WB000/recovered_host/pco2w_abc_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/MFD35/05-PCO2WB000/recovered_host/pco2w_abc_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
#PHSEN
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/RID16/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/RID26/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/RID26/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/RID16/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/RID26/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/RID26/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/MFD35/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/MFD35/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/MFD35/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/MFD35/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
#SPKIR
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/RID16/08-SPKIRB000/recovered_host/spkir_abj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/RID26/08-SPKIRB000/recovered_host/spkir_abj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/RID26/08-SPKIRB000/recovered_host/spkir_abj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/RID16/08-SPKIRB000/recovered_host/spkir_abj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/RID26/08-SPKIRB000/recovered_host/spkir_abj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/RID26/08-SPKIRB000/recovered_host/spkir_abj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
#PRESF
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/MFD35/02-PRESFA000/recovered_host/presf_abc_dcl_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/MFD35/02-PRESFA000/recovered_host/presf_abc_dcl_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/MFD35/02-PRESFB000/recovered_host/presf_abc_dcl_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/MFD35/02-PRESFC000/recovered_host/presf_abc_dcl_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
#CTDBP
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/RID16/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/MFD37/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/SBD17/06-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/RID16/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/MFD37/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/SBD17/06-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/RID27/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/RID27/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/RID27/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/RID27/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/MFD37/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/MFD37/03-CTDBPE000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
#VEL3D
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/MFD35/01-VEL3DD000/recovered_host/vel3d_cd_dcl_velocity_data_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/MFD35/01-VEL3DD000/recovered_host/vel3d_cd_dcl_velocity_data_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/MFD35/01-VEL3DD000/recovered_host/vel3d_cd_dcl_velocity_data_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/MFD35/01-VEL3DD000/recovered_host/vel3d_cd_dcl_velocity_data_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
#PCO2A
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD12/04-PCO2AA000/recovered_host/pco2a_a_dcl_instrument_water_recovered'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD12/04-PCO2AA000/recovered_host/pco2a_a_dcl_instrument_water_recovered'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD12/04-PCO2AA000/recovered_host/pco2a_a_dcl_instrument_water_recovered'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD12/04-PCO2AA000/recovered_host/pco2a_a_dcl_instrument_water_recovered'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
#OPTAA
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/RID16/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/RID27/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/RID27/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/RID16/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/RID27/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/RID27/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/MFD37/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/MFD37/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/MFD37/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/MFD37/01-OPTAAC000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#NUTNR
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/RID16/07-NUTNRB000/recovered_host/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/RID26/07-NUTNRB000/recovered_host/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/RID26/07-NUTNRB000/recovered_host/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/RID16/07-NUTNRB000/recovered_host/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/RID26/07-NUTNRB000/recovered_host/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/RID26/07-NUTNRB000/recovered_host/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/RID16/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/MFD37/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/SBD17/06-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/RID16/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/MFD37/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/SBD17/06-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE02SHSM/RID27/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/RID27/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSSM/RID27/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/RID27/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/MFD37/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/MFD37/03-CTDBPE000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'RecoveredWFP':
uframe_dataset_name = 'CE09OSPM/WFP01/03-CTDPFK000/recovered_wfp/ctdpf_ckl_wfp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdpf_ckl_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdpf_ckl_seawater_pressure'
var_list[5].name = 'ctdpf_ckl_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CE02SHSM/RID26/01-ADCPTA000/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSSM/RID26/01-ADCPTC000/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/RID26/01-ADCPTA000/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/RID26/01-ADCPTC000/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/MFD35/04-ADCPTM000/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/MFD35/04-ADCPTM000/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/MFD35/04-ADCPTC000/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/MFD35/04-ADCPSJ000/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/MFD37/07-ZPLSCC000/recovered_inst/zplsc_echogram_data'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/MFD37/07-ZPLSCC000/recovered_inst/zplsc_echogram_data'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/MFD37/07-ZPLSCC000/recovered_inst/zplsc_echogram_data'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/MFD37/07-ZPLSCC000/recovered_inst/zplsc_echogram_data'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/SBD17/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE02SHSM/SBD11/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSSM/SBD11/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/SBD17/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/SBD11/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/SBD11/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/RID16/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE02SHSM/RID26/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSSM/RID26/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/RID16/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/RID26/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/RID26/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'VEL3D' and method == 'RecoveredWFP':
uframe_dataset_name = 'CE09OSPM/WFP01/01-VEL3DK000/recovered_wfp/vel3d_k_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_k_eastward_velocity'
var_list[2].name = 'vel3d_k_northward_velocity'
var_list[3].name = 'vel3d_k_upward_velocity'
var_list[4].name = 'vel3d_k_heading'
var_list[5].name = 'vel3d_k_pitch'
var_list[6].name = 'vel3d_k_roll'
var_list[7].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'ddegrees'
var_list[5].units = 'ddegrees'
var_list[6].units = 'ddegrees'
var_list[7].units = 'dbar'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/MFD35/01-VEL3DD000/recovered_inst/vel3d_cd_dcl_velocity_data_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/MFD35/01-VEL3DD000/recovered_inst/vel3d_cd_dcl_velocity_data_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/MFD35/01-VEL3DD000/recovered_inst/vel3d_cd_dcl_velocity_data_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/MFD35/01-VEL3DD000/recovered_inst/vel3d_cd_dcl_velocity_data_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/MFD35/02-PRESFA000/recovered_inst/presf_abc_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'presf_tide_pressure'
var_list[2].name = 'presf_tide_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/MFD35/02-PRESFA000/recovered_inst/presf_abc_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'presf_tide_pressure'
var_list[2].name = 'presf_tide_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/MFD35/02-PRESFB000/recovered_inst/presf_abc_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'presf_tide_pressure'
var_list[2].name = 'presf_tide_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/MFD35/02-PRESFC000/recovered_inst/presf_abc_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'presf_tide_pressure'
var_list[2].name = 'presf_tide_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/RID16/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE02SHSM/RID26/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSSM/RID26/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/RID16/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/RID26/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/RID26/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/MFD35/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/MFD35/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/MFD35/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/MFD35/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'PCO2W' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/RID16/05-PCO2WB000/recovered_inst/pco2w_abc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/MFD35/05-PCO2WB000/recovered_inst/pco2w_abc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'PCO2W' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/RID16/05-PCO2WB000/recovered_inst/pco2w_abc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/MFD35/05-PCO2WB000/recovered_inst/pco2w_abc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/MFD35/05-PCO2WB000/recovered_inst/pco2w_abc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/MFD35/05-PCO2WB000/recovered_inst/pco2w_abc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'RecoveredWFP':
uframe_dataset_name = 'CE09OSPM/WFP01/05-PARADK000/recovered_wfp/parad_k__stc_imodem_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_k_par'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/RID16/07-NUTNRB000/recovered_inst/suna_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredInst':
uframe_dataset_name = 'CE02SHSM/RID26/07-NUTNRB000/recovered_inst/suna_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSSM/RID26/07-NUTNRB000/recovered_inst/suna_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/RID16/07-NUTNRB000/recovered_inst/suna_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/RID26/07-NUTNRB000/recovered_inst/suna_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/RID26/07-NUTNRB000/recovered_inst/suna_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'FDCHP' and method == 'RecoveredInst':
uframe_dataset_name = 'CE02SHSM/SBD12/08-FDCHPA000/recovered_inst/fdchp_a_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'FLORT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/SBD17/06-FLORTD000/recovered_inst/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'FLORT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/SBD17/06-FLORTD000/recovered_inst/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'RecoveredWFP':
uframe_dataset_name = 'CE09OSPM/WFP01/04-FLORTK000/recovered_wfp/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'RecoveredWFP':
uframe_dataset_name = 'CE09OSPM/WFP01/02-DOFSTK000/recovered_wfp/dofst_k_wfp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dofst_k_oxygen_l2'
var_list[2].name = 'dofst_k_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'Hz'
var_list[3].units = 'dbar'
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/RID16/03-DOSTAD000/recovered_inst/dosta_abcdjm_ctdbp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'ctd_tc_oxygen'
var_list[3].name = 'ctdbp_seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/RID16/03-DOSTAD000/recovered_inst/dosta_abcdjm_ctdbp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'ctd_tc_oxygen'
var_list[3].name = 'ctdbp_seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/MFD37/03-DOSTAD000/recovered_inst/dosta_abcdjm_ctdbp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'ctd_tc_oxygen'
var_list[3].name = 'ctdbp_seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/MFD37/03-DOSTAD000/recovered_inst/dosta_abcdjm_ctdbp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'ctd_tc_oxygen'
var_list[3].name = 'ctdbp_seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/MFD37/03-DOSTAD000/recovered_inst/dosta_abcdjm_ctdbp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'ctd_tc_oxygen'
var_list[3].name = 'ctdbp_seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/MFD37/03-DOSTAD000/recovered_inst/dosta_abcdjm_ctdbp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'ctd_tc_oxygen'
var_list[3].name = 'ctdbp_seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'WAVSS_Stats' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/MFD35/04-ADCPTM000/recovered_inst/adcpt_m_instrument_log9_recovered'
var_list[0].name = 'time'
var_list[1].name = 'significant_wave_height'
var_list[2].name = 'peak_wave_period'
var_list[3].name = 'peak_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'seconds'
var_list[3].units = 'degrees'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'WAVSS_Stats' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/MFD35/04-ADCPTM000/recovered_inst/adcpt_m_instrument_log9_recovered'
var_list[0].name = 'time'
var_list[1].name = 'significant_wave_height'
var_list[2].name = 'peak_wave_period'
var_list[3].name = 'peak_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'seconds'
var_list[3].units = 'degrees'
elif platform_name == 'CE02SHBP' and node == 'BEP' and instrument_class == 'CTD' and method == 'Streamed':
uframe_dataset_name = 'CE02SHBP/LJ01D/06-CTDBPN106/streamed/ctdbp_no_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_no_seawater_pressure'
var_list[5].name = 'ctdbp_no_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE04OSBP' and node == 'BEP' and instrument_class == 'CTD' and method == 'Streamed':
uframe_dataset_name = 'CE04OSBP/LJ01C/06-CTDBPO108/streamed/ctdbp_no_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_no_seawater_pressure'
var_list[5].name = 'ctdbp_no_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE02SHBP' and node == 'BEP' and instrument_class == 'DOSTA' and method == 'Streamed':
uframe_dataset_name = 'CE02SHBP/LJ01D/06-CTDBPN106/streamed/ctdbp_no_sample'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'ctd_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE04OSBP' and node == 'BEP' and instrument_class == 'DOSTA' and method == 'Streamed':
uframe_dataset_name = 'CE04OSBP/LJ01C/06-CTDBPO108/streamed/ctdbp_no_sample'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'ctd_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE02SHBP' and node == 'BEP' and instrument_class == 'PHSEN' and method == 'Streamed':
uframe_dataset_name = 'CE02SHBP/LJ01D/10-PHSEND103/streamed/phsen_data_record'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE04OSBP' and node == 'BEP' and instrument_class == 'PHSEN' and method == 'Streamed':
uframe_dataset_name = 'CE04OSBP/LJ01C/10-PHSEND107/streamed/phsen_data_record'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE02SHBP' and node == 'BEP' and instrument_class == 'PCO2W' and method == 'Streamed':
uframe_dataset_name = 'CE02SHBP/LJ01D/09-PCO2WB103/streamed/pco2w_b_sami_data_record'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE04OSBP' and node == 'BEP' and instrument_class == 'PCO2W' and method == 'Streamed':
uframe_dataset_name = 'CE04OSBP/LJ01C/09-PCO2WB104/streamed/pco2w_b_sami_data_record'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE02SHBP' and node == 'BEP' and instrument_class == 'ADCP' and method == 'Streamed':
uframe_dataset_name = 'CE02SHBP/LJ01D/05-ADCPTB104/streamed/adcp_velocity_beam'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE04OSBP' and node == 'BEP' and instrument_class == 'ADCP' and method == 'Streamed':
uframe_dataset_name = 'CE04OSBP/LJ01C/05-ADCPSI103/streamed/adcp_velocity_beam'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE02SHBP' and node == 'BEP' and instrument_class == 'VEL3D' and method == 'Streamed':
uframe_dataset_name = 'CE02SHBP/LJ01D/07-VEL3DC108/streamed/vel3d_cd_velocity_data'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE04OSBP' and node == 'BEP' and instrument_class == 'VEL3D' and method == 'Streamed':
uframe_dataset_name = 'CE04OSBP/LJ01C/07-VEL3DC107/streamed/vel3d_cd_velocity_data'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE02SHBP' and node == 'BEP' and instrument_class == 'OPTAA' and method == 'Streamed':
uframe_dataset_name = 'CE02SHBP/LJ01D/08-OPTAAD106/streamed/optaa_sample'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE04OSBP' and node == 'BEP' and instrument_class == 'OPTAA' and method == 'Streamed':
uframe_dataset_name = 'CE04OSBP/LJ01C/08-OPTAAC104/streamed/optaa_sample'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#CSPP Data below
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSP/SP001/08-FLORTJ000/telemetered/flort_dj_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE01ISSP/SP001/08-FLORTJ000/recovered_cspp/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSP/SP001/08-FLORTJ000/telemetered/flort_dj_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE06ISSP/SP001/08-FLORTJ000/recovered_cspp/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSP/SP001/02-DOSTAJ000/telemetered/dosta_abcdjm_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[4].name = 'optode_temperature'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'umol/L'
var_list[4].units = 'degC'
var_list[5].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE01ISSP/SP001/02-DOSTAJ000/recovered_cspp/dosta_abcdjm_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[4].name = 'optode_temperature'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'umol/L'
var_list[4].units = 'degC'
var_list[5].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSP/SP001/02-DOSTAJ000/telemetered/dosta_abcdjm_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[4].name = 'optode_temperature'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'umol/L'
var_list[4].units = 'degC'
var_list[5].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE06ISSP/SP001/02-DOSTAJ000/recovered_cspp/dosta_abcdjm_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[4].name = 'optode_temperature'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'umol/L'
var_list[4].units = 'degC'
var_list[5].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSP/SP001/09-CTDPFJ000/telemetered/ctdpf_j_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temperature'
var_list[2].name = 'salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE01ISSP/SP001/09-CTDPFJ000/recovered_cspp/ctdpf_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temperature'
var_list[2].name = 'salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSP/SP001/09-CTDPFJ000/telemetered/ctdpf_j_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temperature'
var_list[2].name = 'salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE06ISSP/SP001/09-CTDPFJ000/recovered_cspp/ctdpf_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temperature'
var_list[2].name = 'salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSP/SP001/10-PARADJ000/telemetered/parad_j_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_j_par_counts_output'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE01ISSP/SP001/10-PARADJ000/recovered_cspp/parad_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_j_par_counts_output'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSP/SP001/10-PARADJ000/telemetered/parad_j_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_j_par_counts_output'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE06ISSP/SP001/10-PARADJ000/recovered_cspp/parad_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_j_par_counts_output'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'NUTNR' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE01ISSP/SP001/06-NUTNRJ000/recovered_cspp/nutnr_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'salinity_corrected_nitrate'
var_list[2].name = 'nitrate_concentration'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
var_list[3].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'NUTNR' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE06ISSP/SP001/06-NUTNRJ000/recovered_cspp/nutnr_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'salinity_corrected_nitrate'
var_list[2].name = 'nitrate_concentration'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
var_list[3].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSP/SP001/07-SPKIRJ000/telemetered/spkir_abj_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'SPKIR' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE01ISSP/SP001/07-SPKIRJ000/recovered_cspp/spkir_abj_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSP/SP001/07-SPKIRJ000/telemetered/spkir_abj_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'SPKIR' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE06ISSP/SP001/07-SPKIRJ000/recovered_cspp/spkir_abj_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSP/SP001/05-VELPTJ000/telemetered/velpt_j_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'velpt_j_eastward_velocity'
var_list[2].name = 'velpt_j_northward_velocity'
var_list[3].name = 'velpt_j_upward_velocity'
var_list[4].name = 'heading'
var_list[5].name = 'roll'
var_list[6].name = 'pitch'
var_list[7].name = 'temperature'
var_list[8].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'degrees'
var_list[5].units = 'degrees'
var_list[6].units = 'degrees'
var_list[7].units = 'degC'
var_list[8].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'VELPT' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE01ISSP/SP001/05-VELPTJ000/recovered_cspp/velpt_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'velpt_j_eastward_velocity'
var_list[2].name = 'velpt_j_northward_velocity'
var_list[3].name = 'velpt_j_upward_velocity'
var_list[4].name = 'heading'
var_list[5].name = 'roll'
var_list[6].name = 'pitch'
var_list[7].name = 'temperature'
var_list[8].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'degrees'
var_list[5].units = 'degrees'
var_list[6].units = 'degrees'
var_list[7].units = 'degC'
var_list[8].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSP/SP001/05-VELPTJ000/telemetered/velpt_j_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'velpt_j_eastward_velocity'
var_list[2].name = 'velpt_j_northward_velocity'
var_list[3].name = 'velpt_j_upward_velocity'
var_list[4].name = 'heading'
var_list[5].name = 'roll'
var_list[6].name = 'pitch'
var_list[7].name = 'temperature'
var_list[8].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'degrees'
var_list[5].units = 'degrees'
var_list[6].units = 'degrees'
var_list[7].units = 'degC'
var_list[8].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'VELPT' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE06ISSP/SP001/05-VELPTJ000/recovered_cspp/velpt_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'velpt_j_eastward_velocity'
var_list[2].name = 'velpt_j_northward_velocity'
var_list[3].name = 'velpt_j_upward_velocity'
var_list[4].name = 'heading'
var_list[5].name = 'roll'
var_list[6].name = 'pitch'
var_list[7].name = 'temperature'
var_list[8].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'degrees'
var_list[5].units = 'degrees'
var_list[6].units = 'degrees'
var_list[7].units = 'degC'
var_list[8].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'OPTAA' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE01ISSP/SP001/04-OPTAAJ000/recovered_cspp/optaa_dj_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'OPTAA' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE06ISSP/SP001/04-OPTAAJ000/recovered_cspp/optaa_dj_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
elif platform_name == 'CE02SHSP' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE02SHSP/SP001/07-FLORTJ000/recovered_cspp/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CE07SHSP' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE07SHSP/SP001/07-FLORTJ000/recovered_cspp/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CE02SHSP' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE02SHSP/SP001/01-DOSTAJ000/recovered_cspp/dosta_abcdjm_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[4].name = 'optode_temperature'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'umol/L'
var_list[4].units = 'degC'
var_list[5].units = 'dbar'
elif platform_name == 'CE07SHSP' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE07SHSP/SP001/01-DOSTAJ000/recovered_cspp/dosta_abcdjm_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[4].name = 'optode_temperature'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'umol/L'
var_list[4].units = 'degC'
var_list[5].units = 'dbar'
elif platform_name == 'CE02SHSP' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE02SHSP/SP001/08-CTDPFJ000/recovered_cspp/ctdpf_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temperature'
var_list[2].name = 'salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE07SHSP' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE07SHSP/SP001/08-CTDPFJ000/recovered_cspp/ctdpf_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temperature'
var_list[2].name = 'salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE02SHSP' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE02SHSP/SP001/09-PARADJ000/recovered_cspp/parad_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_j_par_counts_output'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE07SHSP' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE07SHSP/SP001/09-PARADJ000/recovered_cspp/parad_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_j_par_counts_output'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE02SHSP' and node == 'PROFILER' and instrument_class == 'NUTNR' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE02SHSP/SP001/05-NUTNRJ000/recovered_cspp/nutnr_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'salinity_corrected_nitrate'
var_list[2].name = 'nitrate_concentration'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
var_list[3].units = 'dbar'
elif platform_name == 'CE07SHSP' and node == 'PROFILER' and instrument_class == 'NUTNR' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE07SHSP/SP001/05-NUTNRJ000/recovered_cspp/nutnr_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'salinity_corrected_nitrate'
var_list[2].name = 'nitrate_concentration'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
var_list[3].units = 'dbar'
elif platform_name == 'CE02SHSP' and node == 'PROFILER' and instrument_class == 'SPKIR' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE02SHSP/SP001/06-SPKIRJ000/recovered_cspp/spkir_abj_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE07SHSP' and node == 'PROFILER' and instrument_class == 'SPKIR' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE07SHSP/SP001/06-SPKIRJ000/recovered_cspp/spkir_abj_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE02SHSP' and node == 'PROFILER' and instrument_class == 'VELPT' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE02SHSP/SP001/02-VELPTJ000/recovered_cspp/velpt_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'velpt_j_eastward_velocity'
var_list[2].name = 'velpt_j_northward_velocity'
var_list[3].name = 'velpt_j_upward_velocity'
var_list[4].name = 'heading'
var_list[5].name = 'roll'
var_list[6].name = 'pitch'
var_list[7].name = 'temperature'
var_list[8].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'degrees'
var_list[5].units = 'degrees'
var_list[6].units = 'degrees'
var_list[7].units = 'degC'
var_list[8].units = 'dbar'
elif platform_name == 'CE07SHSP' and node == 'PROFILER' and instrument_class == 'VELPT' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE07SHSP/SP001/02-VELPTJ000/recovered_cspp/velpt_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'velpt_j_eastward_velocity'
var_list[2].name = 'velpt_j_northward_velocity'
var_list[3].name = 'velpt_j_upward_velocity'
var_list[4].name = 'heading'
var_list[5].name = 'roll'
var_list[6].name = 'pitch'
var_list[7].name = 'temperature'
var_list[8].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'degrees'
var_list[5].units = 'degrees'
var_list[6].units = 'degrees'
var_list[7].units = 'degC'
var_list[8].units = 'dbar'
elif platform_name == 'CE02SHSP' and node == 'PROFILER' and instrument_class == 'OPTAA' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE02SHSP/SP001/04-OPTAAJ000/recovered_cspp/optaa_dj_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
elif platform_name == 'CE07SHSP' and node == 'PROFILER' and instrument_class == 'OPTAA' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE07SHSP/SP001/04-OPTAAJ000/recovered_cspp/optaa_dj_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
elif platform_name == 'CEGL386' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL386/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL386' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL386/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL384' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL384/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL384' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL384/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL383' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL383/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL383' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL383/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL382' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL382/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL382' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL382/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL381' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL381/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL381' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL381/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL327' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL327/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL327' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL327/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL326' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL326/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL326' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL326/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL320' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL320/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL320' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL320/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL319' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL319/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL319' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL319/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL312' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL312/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL312' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL312/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL311' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL311/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL311' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL311/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL247' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL247/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL247' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL247/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL386' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL386/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL386' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL386/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL384' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL384/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL384' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL384/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL383' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL383/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL383' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL383/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL382' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL382/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL382' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL382/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL381' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL381/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL381' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL381/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL327' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL327/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL327' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL327/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL326' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL326/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL326' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL326/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL320' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL320/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL320' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL320/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL319' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL319/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL319' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL319/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL312' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL312/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL312' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL312/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL311' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL311/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL311' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL311/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL247' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL247/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL247' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL247/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL386' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL386/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL386' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL386/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL384' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL384/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL384' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL384/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL383' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL383/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL383' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL383/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL382' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL382/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL382' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL382/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL381' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL381/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL381' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL381/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL327' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL327/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL327' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL327/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL326' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL326/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL326' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL326/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL320' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL320/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL320' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL320/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL319' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL319/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL319' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL319/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL312' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL312/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL312' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL312/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL311' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL311/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL311' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL311/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL247' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL247/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL247' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL247/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL386' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL386/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL386' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL386/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL384' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL384/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL384' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL384/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL383' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL383/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL383' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL383/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL382' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL382/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL382' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL382/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL381' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL381/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL381' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL381/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL327' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL327/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL327' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL327/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL326' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL326/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL326' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL326/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL320' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL320/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL320' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL320/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL319' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL319/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL319' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL319/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL312' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL312/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL312' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL312/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL311' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL311/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL311' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL311/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL247' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL247/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL247' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL247/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL386' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL386/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL384' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL384/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL383' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL383/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL382' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL382/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL381' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL381/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL327' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL327/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL326' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL326/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL320' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL320/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL319' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL319/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL312' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL312/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL311' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL311/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL247' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL247/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD11/06-METBKA000/telemetered/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD11/06-METBKA000/recovered_host/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD11/06-METBKA000/telemetered/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD11/06-METBKA000/recovered_host/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD11/06-METBKA000/telemetered/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD11/06-METBKA000/recovered_host/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD11/06-METBKA000/telemetered/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD11/06-METBKA000/recovered_host/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_mean_directional'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_mean_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_mean_directional'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_mean_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_mean_directional'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_mean_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_mean_directional'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_mean_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_non_directional'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_non_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_non_directional'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_non_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_non_directional'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_non_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_non_directional'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_non_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_motion'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_motion_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_motion'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_motion_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_motion'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_motion_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_motion'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_motion_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_fourier'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_fourier_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_fourier'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_fourier_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_fourier'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_fourier_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_fourier'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_fourier_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
elif platform_name == 'CE04OSPS' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/SF01B/2A-CTDPFA107/streamed/ctdpf_sbe43_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'seawater_pressure'
var_list[5].name = 'seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE04OSPD' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSPD/DP01B/01-CTDPFL105/recovered_inst/dpc_ctd_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'dpc_ctd_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE04OSPD' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'RecoveredWFP':
uframe_dataset_name = 'CE04OSPD/DP01B/01-CTDPFL105/recovered_wfp/dpc_ctd_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'dpc_ctd_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE04OSPS' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/SF01B/2A-CTDPFA107/streamed/ctdpf_sbe43_sample'
var_list[0].name = 'time'
var_list[1].name = 'corrected_dissolved_oxygen'
var_list[2].name = 'seawater_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'dbar'
elif platform_name == 'CE04OSPD' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSPD/DP01B/06-DOSTAD105/recovered_inst/dpc_optode_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'dbar'
elif platform_name == 'CE04OSPD' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'RecoveredWFP':
uframe_dataset_name = 'CE04OSPD/DP01B/06-DOSTAD105/recovered_wfp/dpc_optode_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'dbar'
elif platform_name == 'CE04OSPS' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/SF01B/3A-FLORTD104/streamed/flort_d_data_record'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CE04OSPD' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSPD/DP01B/04-FLNTUA103/recovered_inst/dpc_flnturtd_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'flntu_x_mmp_cds_fluorometric_chlorophyll_a'
var_list[2].name = 'flntu_x_mmp_cds_total_volume_scattering_coefficient '
var_list[3].name = 'flntu_x_mmp_cds_bback_total'
var_list[4].name = 'flcdr_x_mmp_cds_fluorometric_cdom'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'ug/L'
var_list[2].units = 'm-1 sr-1'
var_list[3].units = 'm-1'
var_list[4].units = 'ppb'
var_list[5].units = 'dbar'
elif platform_name == 'CE04OSPD' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'RecoveredWFP':
uframe_dataset_name = 'CE04OSPD/DP01B/03-FLCDRA103/recovered_wfp/dpc_flcdrtd_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'flntu_x_mmp_cds_fluorometric_chlorophyll_a'
var_list[2].name = 'flntu_x_mmp_cds_total_volume_scattering_coefficient '
var_list[3].name = 'flntu_x_mmp_cds_bback_total'
var_list[4].name = 'flcdr_x_mmp_cds_fluorometric_cdom'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'ug/L'
var_list[2].units = 'm-1 sr-1'
var_list[3].units = 'm-1'
var_list[4].units = 'ppb'
var_list[5].units = 'dbar'
elif platform_name == 'CE04OSPS' and node == 'PROFILER' and instrument_class == 'PHSEN' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/SF01B/2B-PHSENA108/streamed/phsen_data_record'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'ph_seawater'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'dbar'
elif platform_name == 'CE04OSPS' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/SF01B/3C-PARADA102/streamed/parad_sa_sample'
var_list[0].name = 'time'
var_list[1].name = 'par_counts_output'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE04OSPS' and node == 'PROFILER' and instrument_class == 'SPKIR' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/SF01B/3D-SPKIRA102/streamed/spkir_data_record'
var_list[0].name = 'time'
var_list[1].name = 'spkir_downwelling_vector'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE04OSPS' and node == 'PROFILER' and instrument_class == 'NUTNR' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/SF01B/4A-NUTNRA102/streamed/nutnr_a_sample'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
var_list[3].units = 'dbar'
elif platform_name == 'CE04OSPS' and node == 'PROFILER' and instrument_class == 'PCO2W' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/SF01B/4F-PCO2WA102/streamed/pco2w_a_sami_data_record'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
var_list[3].units = 'dbar'
elif platform_name == 'CE04OSPS' and node == 'PROFILER' and instrument_class == 'VELPT' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/SF01B/4B-VELPTD106/streamed/velpt_velocity_data'
var_list[0].name = 'time'
var_list[1].name = 'velpt_d_eastward_velocity'
var_list[2].name = 'velpt_d_northward_velocity'
var_list[3].name = 'velpt_d_upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[9].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
var_list[9].units = 'dbar'
elif platform_name == 'CE04OSPD' and node == 'PROFILER' and instrument_class == 'VEL3D' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSPD/DP01B/02-VEL3DA105/recovered_inst/dpc_acm_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_a_eastward_velocity'
var_list[2].name = 'vel3d_a_northward_velocity'
var_list[3].name = 'vel3d_a_upward_velocity_ascending'
var_list[4].name = 'vel3d_a_upward_velocity_descending'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'm/s'
var_list[5].units = 'dbar'
elif platform_name == 'CE04OSPD' and node == 'PROFILER' and instrument_class == 'VEL3D' and method == 'RecoveredWFP':
uframe_dataset_name = 'CE04OSPD/DP01B/02-VEL3DA105/recovered_wfp/dpc_acm_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_a_eastward_velocity'
var_list[2].name = 'vel3d_a_northward_velocity'
var_list[3].name = 'vel3d_a_upward_velocity_ascending'
var_list[4].name = 'vel3d_a_upward_velocity_descending'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'm/s'
var_list[5].units = 'dbar'
elif platform_name == 'CE04OSPS' and node == 'PLATFORM200M' and instrument_class == 'CTD' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/PC01B/4A-CTDPFA109/streamed/ctdpf_optode_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'seawater_pressure'
var_list[5].name = 'seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE04OSPS' and node == 'PLATFORM200M' and instrument_class == 'DOSTA' and method == 'Streamed':
#uframe_dataset_name = 'CE04OSPS/PC01B/4A-DOSTAD109/streamed/ctdpf_optode_sample'
uframe_dataset_name = 'CE04OSPS/PC01B/4A-CTDPFA109/streamed/ctdpf_optode_sample'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'seawater_pressure' #also use this for the '4A-DOSTAD109/streamed/ctdpf_optode_sample' stream
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'dbar'
elif platform_name == 'CE04OSPS' and node == 'PLATFORM200M' and instrument_class == 'PHSEN' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/PC01B/4B-PHSENA106/streamed/phsen_data_record'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE04OSPS' and node == 'PLATFORM200M' and instrument_class == 'PCO2W' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/PC01B/4D-PCO2WA105/streamed/pco2w_a_sami_data_record'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
#Coastal Pioneer CSM Data Streams
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD11/06-METBKA000/telemetered/metbk_a_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'METBK2' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD12/06-METBKA000/telemetered/metbk_a_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD11/06-METBKA000/recovered_host/metbk_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'METBK2' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD12/06-METBKA000/recovered_host/metbk_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CP03ISSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/SBD11/06-METBKA000/telemetered/metbk_a_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CP03ISSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/SBD11/06-METBKA000/recovered_host/metbk_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CP04OSSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/SBD11/06-METBKA000/telemetered/metbk_a_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CP04OSSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/SBD11/06-METBKA000/recovered_host/metbk_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
#WAVSS
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_statistics'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_statistics_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_mean_directional'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_mean_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_non_directional'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_non_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_motion'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_motion_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_fourier'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_fourier_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
#PCO2A
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD12/04-PCO2AA000/telemetered/pco2a_a_dcl_instrument_water'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CP03ISSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/SBD12/04-PCO2AA000/telemetered/pco2a_a_dcl_instrument_water'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CP04OSSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/SBD12/04-PCO2AA000/telemetered/pco2a_a_dcl_instrument_water'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
#PCO2A
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD12/04-PCO2AA000/recovered_host/pco2a_a_dcl_instrument_water_recovered'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CP03ISSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/SBD12/04-PCO2AA000/recovered_host/pco2a_a_dcl_instrument_water_recovered'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CP04OSSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/SBD12/04-PCO2AA000/recovered_host/pco2a_a_dcl_instrument_water_recovered'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
#FDCHP
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'FDCHP' and method == 'RecoveredInst':
uframe_dataset_name = 'CP01CNSM/SBD12/08-FDCHPA000/recovered_inst/fdchp_a_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'FDCHP' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD12/08-FDCHPA000/telemetered/fdchp_a_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'FDCHP' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD12/08-FDCHPA000/recovered_host/fdchp_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD11/06-METBKA000/telemetered/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD11/06-METBKA000/recovered_host/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CP03ISSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/SBD11/06-METBKA000/telemetered/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CP03ISSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/SBD11/06-METBKA000/recovered_host/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CP04OSSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/SBD11/06-METBKA000/telemetered/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CP04OSSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/SBD11/06-METBKA000/recovered_host/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'METBK2-hr' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD12/06-METBKA000/telemetered/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'METBK2-hr' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD12/06-METBKA000/recovered_host/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/RID27/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/RID27/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CP01CNSM/RID27/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/RID27/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/RID27/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CP03ISSM/RID27/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/RID27/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/RID27/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CP04OSSM/RID27/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/MFD37/03-CTDBPE000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/MFD37/03-CTDBPE000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CP04OSSM/MFD37/03-CTDBPE000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/MFD37/03-CTDBPD000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/MFD37/03-CTDBPD000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CP03ISSM/MFD37/03-CTDBPD000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/MFD37/03-CTDBPD000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/MFD37/03-CTDBPD000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CP01CNSM/MFD37/03-CTDBPD000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/RID27/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/MFD37/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/RID27/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP01CNSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/MFD37/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/RID27/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/MFD37/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/RID27/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP03ISSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/MFD37/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/RID27/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/MFD37/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/RID27/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/MFD37/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CP01CNSM/RID26/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CP03ISSM/RID26/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CP04OSSM/RID26/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = | np.array([]) | numpy.array |
#!/usr/bin/env python
# coding: utf8
"""MMM-Facial-Recognition - MagicMirror Module
Face Recognition Training Script
The MIT License (MIT)
Copyright (c) 2016 <NAME> (MIT License)
Based on work by <NAME> (Copyright 2013) (MIT License)
Run this script to train the face recognition system with training images from multiple people.
The face recognition model is based on the eigen faces algorithm implemented in OpenCV.
You can find more details on the algorithm and face recognition here:
http://docs.opencv.org/modules/contrib/doc/facerec/facerec_tutorial.html
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import fnmatch
import os
# to install builtins run `pip install future`
from builtins import input
import cv2
import numpy as np
import lib.config as config
import lib.face as face
print("Which algorithm do you want to use?")
print("[1] LBPHF (recommended)")
print("[2] Fisherfaces")
print("[3] Eigenfaces")
algorithm_choice = int(input("--> "))
print('')
def walk_files(directory, match='*'):
"""Generator function to iterate through all files in a directory recursively
which match the given filename match parameter.
"""
for root, dirs, files in os.walk(directory):
for filename in fnmatch.filter(files, match):
yield os.path.join(root, filename)
def prepare_image(filename):
"""Read an image as grayscale and resize it to the appropriate size for
training the face recognition model.
"""
return face.resize(cv2.imread(filename, cv2.IMREAD_GRAYSCALE))
def normalize(X, low, high, dtype=None):
"""Normalizes a given array in X to a value between low and high.
Adapted from python OpenCV face recognition example at:
https://github.com/Itseez/opencv/blob/2.4/samples/python2/facerec_demo.py
"""
X = np.asarray(X)
minX, maxX = np.min(X), np.max(X)
# normalize to [0...1].
X = X - float(minX)
X = X / float((maxX - minX))
# scale to [low...high].
X = X * (high - low)
X = X + low
if dtype is None:
return np.asarray(X)
return np.asarray(X, dtype=dtype)
if __name__ == '__main__':
print("Reading training images...")
print('-' * 20)
faces = []
labels = []
IMAGE_DIRS_WITH_LABEL = [[0, "negative"]]
IMAGE_DIRS = os.listdir(config.TRAINING_DIR)
IMAGE_DIRS = [x for x in IMAGE_DIRS if not x.startswith('.') and not x.startswith('negative')]
pos_count = 0
for i in range(len(IMAGE_DIRS)):
print("Assign label " + str(i + 1) + " to " + IMAGE_DIRS[i])
IMAGE_DIRS_WITH_LABEL.append([i + 1, IMAGE_DIRS[i]])
print('-' * 20)
print('')
# Für jedes Label/Namen Paar:
# for every label/name pair:
for j in range(0, len(IMAGE_DIRS_WITH_LABEL)):
# Label zu den Labels hinzufügen / Bilder zu den Gesichtern
for filename in walk_files(config.TRAINING_DIR + str(IMAGE_DIRS_WITH_LABEL[j][1]), '*.pgm'):
faces.append(prepare_image(filename))
labels.append(IMAGE_DIRS_WITH_LABEL[j][0])
if IMAGE_DIRS_WITH_LABEL[j][0] != 0:
pos_count += 1
# Print statistic on how many pictures per person we have collected
print('Read', pos_count, 'positive images and', labels.count(0), 'negative images.')
print('')
for j in range(1, max(labels) + 1):
print(str(labels.count(j)) + " images from subject " + IMAGE_DIRS[j - 1])
# Train model
print('-' * 20)
print('')
print('Training model type {0} with threshold {1}'
.format(config.RECOGNITION_ALGORITHM, config.POSITIVE_THRESHOLD))
model = config.model(config.RECOGNITION_ALGORITHM, config.POSITIVE_THRESHOLD)
model.train(np.asarray(faces), | np.asarray(labels) | numpy.asarray |
import deeplift
import numpy as np
def deeplift_zero_ref(X,score_func,batch_size=200,task_idx=0):
# use a 40% GC reference
input_references = [np.array([0.0, 0.0, 0.0, 0.0])[None, None, None, :]]
# get deeplift scores
deeplift_scores = score_func(
task_idx=task_idx,
input_data_list=[X],
batch_size=batch_size,
progress_update=None,
input_references_list=input_references)
return deeplift_scores
def deeplift_gc_ref(X,score_func,batch_size=200,task_idx=0):
# use a 40% GC reference
input_references = [np.array([0.3, 0.2, 0.2, 0.3])[None, None, None, :]]
# get deeplift scores
deeplift_scores = score_func(
task_idx=task_idx,
input_data_list=[X],
batch_size=batch_size,
progress_update=None,
input_references_list=input_references)
return deeplift_scores
def deeplift_shuffled_ref(X,score_func,batch_size=200,task_idx=0,num_refs_per_seq=10):
deeplift_scores=score_func(
task_idx=task_idx,
input_data_sequences=X,
num_refs_per_seq=num_refs_per_seq,
batch_size=batch_size)
return deeplift_scores
def get_deeplift_scoring_function(model,target_layer_idx=-2,task_idx=0, num_refs_per_seq=10,reference="shuffled_ref",one_hot_func=None):
"""
Arguments:
model -- a string containing the path to the hdf5 exported model
target_layer_idx -- Layer in the model whose outputs will be interpreted. For classification models we \
interpret the logit (input to the sigmoid), which is the output of layer -2.
For regression models we intepret the model output, which is the output of layer -1.
reference -- one of 'shuffled_ref','gc_ref','zero_ref'
one_hot_func -- one hot function to use for encoding FASTA string inputs; if the inputs are already one-hot-encoded, use the default of None
Returns:
deepLIFT scoring function
"""
assert reference in ["shuffled_ref","gc_ref","zero_ref"]
from deeplift.conversion import kerasapi_conversion as kc
deeplift_model = kc.convert_model_from_saved_files(model,verbose=False)
#get the deeplift score with respect to the logit
score_func = deeplift_model.get_target_contribs_func(
find_scores_layer_idx=0,
target_layer_idx=target_layer_idx)
if reference=="shuffled_ref":
from deeplift.util import get_shuffle_seq_ref_function
from deeplift.dinuc_shuffle import dinuc_shuffle
score_func=get_shuffle_seq_ref_function(
score_computation_function=score_func,
shuffle_func=dinuc_shuffle,
one_hot_func=one_hot_func)
return score_func
def deeplift(score_func, X, batch_size=200,task_idx=0, num_refs_per_seq=10,reference="shuffled_ref",one_hot_func=None):
"""
Arguments:
score_func -- deepLIFT scoring function
X -- numpy array with shape (n_samples, 1, n_bases_in_sample,4) or list of FASTA sequences
batch_size -- number of samples to interpret at once
task_idx -- index indicating which task to perform interpretation on
reference -- one of 'shuffled_ref','gc_ref','zero_ref'
num_refs_per_seq -- integer indicating number of references to use for each input sequence if the reference is set to 'shuffled_ref';if 'zero_ref' or 'gc_ref' is used, this argument is ignored.
one_hot_func -- one hot function to use for encoding FASTA string inputs; if the inputs are already one-hot-encoded, use the default of None
Returns:
(num_task, num_samples, 1, num_bases, sequence_length) deeplift score array.
"""
assert reference in ["shuffled_ref","gc_ref","zero_ref"]
if one_hot_func==None:
#check that dataset has been one-hot-encoded
assert len(np.shape(X)) == 4 and | np.shape(X) | numpy.shape |
import numpy as np
import cmath
from math import sqrt
def pau_x():
p_x= | np.array([[0,1],[1,0]]) | numpy.array |
# pylint: disable-msg=W0611, W0612, W0511,R0201
"""Tests suite for MaskedArray.
Adapted from the original test_ma by <NAME>
:author: <NAME> & <NAME>
:contact: pierregm_at_uga_dot_edu & mattknox_ca_at_hotmail_dot_com
:version: $Id: test_timeseries.py 3836 2008-01-15 13:09:03Z <EMAIL> $
"""
__author__ = "<NAME> & <NAME> ($Author: <EMAIL> $)"
__revision__ = "$Revision: 3836 $"
__date__ = '$Date: 2008-01-15 08:09:03 -0500 (Tue, 15 Jan 2008) $'
import numpy as np
from numpy import bool_, complex_, float_, int_, object_
from numpy.testing import *
import numpy.ma as ma
from numpy.ma import MaskedArray, masked, nomask
from numpy.ma.testutils import *
import scikits.timeseries as ts
from scikits.timeseries import \
TimeSeries, TimeSeriesError, TimeSeriesCompatibilityError, \
tseries, Date, date_array, now, time_series, \
adjust_endpoints, align_series, align_with, \
concatenate, fill_missing_dates, find_duplicated_dates, \
remove_duplicated_dates, split, stack
get_varshape = tseries.get_varshape
_timeseriescompat_multiple = tseries._timeseriescompat_multiple
#------------------------------------------------------------------------------
class TestCreation(TestCase):
"Base test class for MaskedArrays."
def __init__(self, *args, **kwds):
TestCase.__init__(self, *args, **kwds)
dlist = ['2007-01-%02i' % i for i in range(1, 16)]
dates = date_array(dlist, freq='D')
data = ma.array(np.arange(15), mask=[1, 0, 0, 0, 0] * 3)
self.d = (dlist, dates, data)
def test_fromlist (self):
"Test the creation of a TimeSeries w/ a list of dates as input dates."
(dlist, dates, data) = self.d
series = time_series(data, dlist, freq='D')
self.failUnless(isinstance(series, TimeSeries))
assert_equal(series.mask, [1, 0, 0, 0, 0] * 3)
assert_equal(series.series, data)
assert_equal(series.dates, dates)
assert_equal(series.freqstr, 'D')
def test_fromrange (self):
"Test the creation of a TimeSeries w/ a starting date."
(dlist, dates, data) = self.d
series = time_series(data, start_date=dates[0])
self.failUnless(isinstance(series, TimeSeries))
assert_equal(series.mask, [1, 0, 0, 0, 0] * 3)
assert_equal(series.series, data)
assert_equal(series.dates, dates)
assert_equal(series.freqstr, 'D')
def test_fromseries (self):
"Test the creation of a TimeSeries w/ a time series as input data."
(dlist, dates, data) = self.d
series = time_series(data, dlist, freq='D')
dates = dates + 15
series = time_series(series, dates)
self.failUnless(isinstance(series, TimeSeries))
assert_equal(series.mask, [1, 0, 0, 0, 0] * 3)
assert_equal(series.series, data)
assert_equal(series.dates, dates)
assert_equal(series.freqstr, 'D')
def test_fromdatearray(self):
"Tests the creation of a series with a DateArray as input data."
(_, dates, _) = self.d
data = dates
#
series = time_series(data, dates)
self.failUnless(isinstance(series, TimeSeries))
assert_equal(series.dates, dates)
assert_equal(series.data, data)
assert_equal(series.freqstr, 'D')
#
series[5] = masked
# ensure that series can be represented by a string after masking a value
# (there was a bug before that prevented this from working when using a
# DateArray for the data)
strrep = str(series)
def test_datafromlist(self):
"Test the creation of a series w/ a list as input data."
(_, dates, _) = self.d
data = list(range(15))
series = time_series(data, dates)
assert_equal(series._data.size, 15)
def test_unsorted(self):
"Tests that the data are properly sorted along the dates."
dlist = ['2007-01-%02i' % i for i in (3, 2, 1)]
data = [10, 20, 30]
series = time_series(data, dlist, freq='D')
assert_equal(series.data, [30, 20, 10])
#
dates = date_array(dlist, freq='D')
series = TimeSeries(data, dates)
assert_equal(series.data, [30, 20, 10])
#
series = time_series(data, dlist, freq='D', mask=[1, 0, 0])
assert_equal(series.mask, [0, 0, 1])
#
data = ma.array([10, 20, 30], mask=[1, 0, 0])
series = time_series(data, dlist, freq='D')
assert_equal(series._mask, [0, 0, 1])
def test_unsorted_w_datearray(self):
"Tests that the data are properly sorted along the dates."
dlist = ['2007-01-%02i' % i for i in (3, 2, 1)]
data = [10, 20, 30]
dates = date_array(dlist, freq='D')
self.failUnless(dates._unsorted is not None)
#
series = time_series(data, dates=dates)
assert_equal(series.data, [30, 20, 10])
self.failUnless(dates._unsorted is not None)
self.failUnless(series.dates._unsorted is None)
#
series = time_series(data, dates=dates)
assert_equal(series.data, [30, 20, 10])
self.failUnless(series.dates._unsorted is None)
def test_setdates(self):
"Tests setting the dates of a series."
(dlist, dates, data) = self.d
reference = time_series(data, dates=dates)
# Set with a DateArray: that should work
test_series = data.view(TimeSeries)
test_series.dates = dates
assert_equal(test_series.dates, reference.dates)
def test_setdates_asndarray(self):
"Tests setting the dates as a ndarray."
(dlist, dates, data) = self.d
test_series = data.view(TimeSeries)
# Set with a ndarray: that shouldn't work
test_dates = np.array(dates, copy=False, subok=False)
try:
test_series._dates = test_dates
except TypeError:
pass
else:
err_msg = "Dates shouldn't be set as basic ndarrays."
raise TimeSeriesError(err_msg)
def test_setdates_asdate(self):
"Tests setting the dates as a Date"
(dlist, dates, data) = self.d
series = data.view(TimeSeries)
try:
series.dates = ts.now('D')
except TypeError:
pass
else:
err_msg = "Dates shouldn't be set as a Date objects."
raise TimeSeriesError(err_msg)
def test_setdates_with_incompatible_size(self):
"Tests setting the dates w/ a DateArray of incompatible size"
(dlist, dates, data) = self.d
series = data.view(TimeSeries)
try:
series.dates = dates[:len(dates) // 2]
except ts.TimeSeriesCompatibilityError:
pass
else:
err_msg = "Dates size should match the input."
raise TimeSeriesError(err_msg)
def test_setdates_with_autoreshape(self):
"Tests the automatic reshaping of dates."
(dlist, dates, data) = self.d
reference = time_series(data, dates=dates)
test_series = data.view(TimeSeries)
# Set with a datearray w/ a different size than expected: should fail
test_dates = dates[:-1]
try:
test_series.dates = test_dates
except TimeSeriesCompatibilityError:
pass
else:
err_msg = "Dates should have a size compatible with data"
raise TimeSeriesError(err_msg)
# Set w/ a date of a different shape: should work, but the shape changes
test_dates = dates.reshape(-1, 1)
test_series._dates = test_dates
assert_equal(test_series.dates, reference.dates)
assert_equal(test_series.dates.shape, test_series.shape)
test_dates = np.array(dates, copy=False, subok=True, ndmin=2)
test_series._dates = test_dates
assert_equal(test_series.dates, reference.dates)
assert_equal(test_series.dates.shape, test_series.shape)
def test_setdates_unsorted_basic(self):
"Test automatic sorting when setting dates - 1D case."
dates = date_array([ts.Date('D',
'2001-01-%02i' % _) for _ in (4, 3, 2, 1)])
a = np.array((4, 3, 2, 1), dtype=float)
series = a.view(ts.TimeSeries)
assert_equal(series.dates, [])
assert_equal(series, (4, 3, 2, 1))
#
series._dates = dates
series.sort_chronologically()
assert_equal(series, (1, 2, 3, 4))
def test_setdates_unsorted_reshaped(self):
"Test automatic sorting when setting dates - 1D case reshaped to nD."
dates = date_array([ts.Date('D',
'2001-01-%02i' % _) for _ in (4, 3, 2, 1)])
a = np.array([[4., 3.], [2., 1.]], dtype=float)
series = a.view(TimeSeries)
series._dates = dates
series.sort_chronologically()
assert_equal(series, [[1., 2.], [3., 4.]])
def test_setdates_unsorted_2D(self):
"Test automatic sorting when setting dates - 1D case reshaped to nD."
dates = date_array([ts.Date('D',
'2001-01-%02i' % _) for _ in (4, 3, 2, 1)])
a = np.arange(12).reshape(4, 3)
series = a.view(TimeSeries)
series._dates = dates
series.sort_chronologically()
assert_equal(series, [[ 9., 10., 11.],
[ 6., 7., 8.],
[ 3., 4., 5.],
[ 0., 1., 2.]])
def test_copy(self):
"Tests the creation of a timeseries with copy=True"
dlist = ['2007-01-%02i' % i for i in range(1, 16)]
dates = date_array(dlist, freq='D')
data = ma.array(np.arange(15), mask=[1, 0, 0, 0, 0] * 3)
series = time_series(data, dates)
assert_equal(series.dates.ctypes.data, dates.ctypes.data)
assert_equal(series.data.ctypes.data, data.data.ctypes.data)
assert_equal(series.mask.ctypes.data, data.mask.ctypes.data)
#
series = time_series(data, dates, copy=True)
assert_not_equal(series.dates.ctypes.data, dates.ctypes.data)
assert_not_equal(series.data.ctypes.data, data.data.ctypes.data)
assert_not_equal(series.mask.ctypes.data, data.mask.ctypes.data)
def test_using_length(self):
"Test using the `length` parameter of time_series."
start = ts.Date('M', '1955-01')
data = np.random.uniform(0, 1, 50 * 12).reshape(50, 12)
# Default : the dates should be (50,)
series = ts.time_series(data, start_date=start)
assert_equal(series.shape, (50, 12))
assert_equal(series.dates.shape, (50,))
assert_equal(series.varshape, (12,))
# Forcing dates to be 2D
series = ts.time_series(data, start_date=start, length=600)
assert_equal(series.shape, (50, 12))
assert_equal(series.dates.shape, (50, 12))
assert_equal(series.varshape, ())
# Forcing dates to 1D
series = ts.time_series(data, start_date=start, length=50)
assert_equal(series.shape, (50, 12))
assert_equal(series.dates.shape, (50,))
assert_equal(series.varshape, (12,))
# Make sure we raise an exception if something goes wrong....
try:
series = ts.time_series(data, start_date=start, length=100)
except ts.TimeSeriesCompatibilityError:
pass
else:
errmsg = "The should not be dates/data compatibility in this case."
raise TimeSeriesCompatibilityError(errmsg)
def test_varshape(self):
"Test some corner case of varshape"
test = ts.time_series(np.ones((10, 2)), start_date=ts.now('d'))
assert_equal(test.varshape, (2,))
#
test = ts.time_series(np.ones((10, 1)), start_date=ts.now('d'))
assert_equal(test.varshape, (1,))
#
test = ts.time_series(np.ones((10,)), start_date=ts.now('d'))
assert_equal(test.varshape, ())
#------------------------------------------------------------------------------
class TestArithmetics(TestCase):
"Some basic arithmetic tests"
def __init__(self, *args, **kwds):
TestCase.__init__(self, *args, **kwds)
dlist = ['2007-01-%02i' % i for i in range(1, 16)]
dates = date_array(dlist, freq='D')
data = ma.array(np.arange(15), mask=[1, 0, 0, 0, 0] * 3)
self.d = (time_series(data, dlist, freq='D'), data)
def test_intfloat(self):
"Test arithmetic timeseries/integers"
(series, data) = self.d
#
nseries = series + 1
self.failUnless(isinstance(nseries, TimeSeries))
assert_equal(nseries.mask, [1, 0, 0, 0, 0] * 3)
assert_equal(nseries.series, data + 1)
assert_equal(nseries.dates, series.dates)
#
nseries = series - 1
self.failUnless(isinstance(nseries, TimeSeries))
assert_equal(nseries.mask, [1, 0, 0, 0, 0] * 3)
assert_equal(nseries.series, data - 1)
assert_equal(nseries.dates, series.dates)
#
nseries = series * 1
self.failUnless(isinstance(nseries, TimeSeries))
assert_equal(nseries.mask, [1, 0, 0, 0, 0] * 3)
assert_equal(nseries.series, data * 1)
assert_equal(nseries.dates, series.dates)
#
nseries = series / 1.
self.failUnless(isinstance(nseries, TimeSeries))
assert_equal(nseries.mask, [1, 0, 0, 0, 0] * 3)
assert_equal(nseries.series, data / 1.)
assert_equal(nseries.dates, series.dates)
def test_intfloat_inplace(self):
"Test int/float arithmetics in place."
(series, data) = self.d
nseries = series.astype(float_)
idini = id(nseries)
data = data.astype(float_)
#
nseries += 1.
self.failUnless(isinstance(nseries, TimeSeries))
assert_equal(nseries.mask, [1, 0, 0, 0, 0] * 3)
assert_equal(nseries.series, data + 1.)
assert_equal(nseries.dates, series.dates)
assert_equal(id(nseries), idini)
#
nseries -= 1.
self.failUnless(isinstance(nseries, TimeSeries))
assert_equal(nseries.mask, [1, 0, 0, 0, 0] * 3)
assert_equal(nseries.series, data)
assert_equal(nseries.dates, series.dates)
assert_equal(id(nseries), idini)
#
nseries *= 2.
self.failUnless(isinstance(nseries, TimeSeries))
assert_equal(nseries.mask, [1, 0, 0, 0, 0] * 3)
assert_equal(nseries.series, data * 2.)
assert_equal(nseries.dates, series.dates)
assert_equal(id(nseries), idini)
#
nseries /= 2.
self.failUnless(isinstance(nseries, TimeSeries))
assert_equal(nseries.mask, [1, 0, 0, 0, 0] * 3)
assert_equal(nseries.series, data)
assert_equal(nseries.dates, series.dates)
assert_equal(id(nseries), idini)
def test_updatemask(self):
"Checks modification of mask."
(series, data) = self.d
assert_equal(series.mask, [1, 0, 0, 0, 0] * 3)
series.mask = nomask
self.failUnless(not series.mask.any())
self.failUnless(not series.series.mask.any())
#series._series.mask = [1,0,0]*5
series.mask = [1, 0, 0] * 5
assert_equal(series.mask, [1, 0, 0] * 5)
assert_equal(series.series.mask, [1, 0, 0] * 5)
series[2] = masked
assert_equal(series.mask, [1, 0, 1] + [1, 0, 0] * 4)
assert_equal(series.series.mask, [1, 0, 1] + [1, 0, 0] * 4)
def test_ismasked(self):
"Checks checks on masked"
(series, data) = self.d
self.failUnless(series._series[0] is masked)
#!!!:... and of course, masked doesn't have a _series attribute
# self.failUnless(series[0]._series is masked)
def test_incompatible_dates(self):
"""
Test operations on two series with same dimensions but incompatible dates
"""
(series, data) = self.d
a, b = series[1:], series[:-1]
result = a + b
self.failUnless(not isinstance(result, TimeSeries))
assert_equal(result.ndim, a.ndim)
assert_equal(result.size, a.size)
#------------------------------------------------------------------------------
class TestGetitem(TestCase):
"Some getitem tests"
def setUp(self):
dates = date_array(['2007-01-%02i' % i for i in range(1, 16)], freq='D')
data1D = ma.array(np.arange(15), mask=[1, 0, 0, 0, 0] * 3, dtype=float_)
data3V = ma.array([[10, 11, 12], [20, 21, 22], [30, 31, 32]],
mask=[[1, 0, 0, ], [0, 0, 0], [0, 0, 1]])
data2D = ma.array(np.random.rand(60).reshape(3, 4, 5))
for i in range(3):
data2D[i, i, i] = masked
#.........................
series1D = time_series(data1D, dates, freq='D')
series3V = time_series(data3V, dates[:len(data3V)], freq='D')
series2D = time_series(data2D, dates[:len(data2D)], freq='D')
self.info = locals()
del(self.info['i'])
self.__dict__.update(self.info)
return
def test_with_integers(self):
# 1D series ..............
(series1D, data1D) = (self.series1D, self.data1D)
self.failUnless(series1D[0] is masked)
test = series1D[-1]
assert_equal(test, data1D[-1])
self.failUnless(not isinstance(test, TimeSeries))
# nV series ..............
(series3V, data3V) = (self.series3V, self.data3V)
test = series3V[-1]
assert_equal(test, data3V[-1])
assert_equal(test.mask, [0, 0, 1])
self.failUnless(not isinstance(test, TimeSeries))
# 2D series ..............
(series2D, data2D) = (self.series2D, self.data2D)
test = series2D[-1]
assert_equal(test, data2D[-1].squeeze())
self.failUnless(not isinstance(test, TimeSeries))
def test_with_slices(self):
"Tests __getitem__ w/ slices."
def _wslice(series, data, dates):
test = series[1:2]
self.failUnless(isinstance(test, TimeSeries))
assert_equal(test._varshape, series._varshape)
assert_equal(test.series, data[1:2])
assert_equal(test.dates, dates[1:2])
assert_equal(test.mask, data.mask[1:2])
assert_equal(test.freq, dates.freq)
#
test = series[:3]
self.failUnless(isinstance(test, TimeSeries))
test_series = test.series
assert_equal(test_series.data, data[:3].data)
assert_equal(test_series.mask, data[:3].mask)
assert_equal(test.dates, dates[:3])
#.....
dates = self.dates
(series1D, data1D) = (self.series1D, self.data1D)
_wslice(series1D, data1D, dates)
(series3V, data3V) = (self.series3V, self.data3V)
_wslice(series3V, data3V, dates)
(series2D, data2D) = (self.series2D, self.data2D)
_wslice(series2D, data2D, dates)
def test_with_slices_on_nD(self):
(series3V, data3V) = (self.series3V, self.data3V)
#
test = series3V[0, :]
self.failUnless(not isinstance(test, TimeSeries))
assert_equal(test, data3V[0, :])
assert_equal(test.mask, data3V[0, :].mask)
#
test = series3V[:, 0]
self.failUnless(isinstance(test, TimeSeries))
assert_equal(test, data3V[:, 0])
assert_equal(test.mask, data3V[:, 0].mask)
assert_equal(test._varshape, ())
assert_equal(test.dates, series3V.dates)
#
(series2D, data2D) = (self.series2D, self.data2D)
test = series2D[0]
self.failUnless(not isinstance(test, TimeSeries))
assert_equal(test.shape, (4, 5))
assert_equal(test, data2D[0])
#
test = series2D[:, :, 0]
self.failUnless(isinstance(test, TimeSeries))
assert_equal(test, series2D.data[:, :, 0])
assert_equal(test.dates, series2D.dates)
def test_with_list(self):
"Tests __getitem__ w/ list."
def _wlist(series, data, dates):
test = series[[0, 1, -1]]
control = data[[0, 1, -1]]
self.failUnless(isinstance(test, TimeSeries))
assert_equal(test.series, control)
assert_equal(test.mask, control.mask)
assert_equal(test.dates, dates[[0, 1, -1]])
#.....
dates = self.dates
(series1D, data1D) = (self.series1D, self.data1D)
_wlist(series1D, data1D, dates)
(series3V, data3V) = (self.series3V, self.data3V)
_wlist(series3V, data3V, dates[:3])
(series2D, data2D) = (self.series2D, self.data2D)
_wlist(series2D, data2D, dates[:3])
def test_with_dates(self):
"Tests __getitem__ w/ dates."
def _wdates(series, data, dates):
# Single date
test = series[dates[0]]
assert_equal(test, data[0])
assert_equal(test.mask, data[0].mask)
self.failUnless(not isinstance(test, TimeSeries))
# Multiple dates as a date_array
test = series[dates[[0, -1]]]
assert_equal(test, data[[0, -1]])
self.failUnless(isinstance(test, TimeSeries))
assert_equal(test.dates, dates[[0, -1]])
# Multiple dates as a list
test = series[[dates[0], dates[-1]]]
assert_equal(test, data[[0, -1]])
self.failUnless(isinstance(test, TimeSeries))
# Multiple dates as a slice
dslice = slice(dates[1], None, None)
test = series[dslice]
assert_equal(test, data[1:])
self.failUnless(isinstance(test, TimeSeries))
#.....
dates = self.dates
(series1D, data1D) = (self.series1D, self.data1D)
_wdates(series1D, data1D, dates)
(series3V, data3V) = (self.series3V, self.data3V)
_wdates(series3V, data3V, dates[:3])
(series2D, data2D) = (self.series2D, self.data2D)
_wdates(series2D, data2D, dates[:3])
def test_slicing_with_dates(self):
"Tests __getitem__ w/ date based slices"
def _testslice(series):
sd, ed = series.start_date, series.end_date
# full range of series
assert_equal(series, series[sd:ed + 1])
# exclude first and last point of series
assert_equal(series[1:-1], series[sd + 1:ed])
# slice with dates beyond the start and end dates
assert_equal(series, series[sd - 10:ed + 10])
# slice with dates before the series start date
assert_equal(series[0:0], series[sd - 10:sd - 5])
#.....
series = self.series1D
_testslice(series)
# Now try slicing on a series with missing dates
series = series[::2]
_testslice(series)
def test_with_dates_as_str(self):
"Test using a string corresponding to a date as index."
def _wdates(series, data):
date = self.dates[0].strfmt("%Y-%m-%d")
# Single date
test = series[date]
assert_equal(test, data[0])
assert_equal(test.mask, data[0].mask)
self.failUnless(not isinstance(test, TimeSeries))
#.....
(series1D, data1D) = (self.series1D, self.data1D)
_wdates(series1D, data1D)
(series3V, data3V) = (self.series3V, self.data3V)
_wdates(series3V, data3V)
(series2D, data2D) = (self.series2D, self.data2D)
_wdates(series2D, data2D)
#
test = series1D[['2007-01-01', '2007-01-15']]
control = series1D[[0, -1]]
assert_equal(test, control)
assert_equal(test.mask, control.mask)
assert_equal(test.dates, control.dates)
def test_on1D_reshaped(self):
trick = time_series(self.data1D.reshape(3, 5),
dates=self.dates.reshape(3, 5), freq='D')
test = trick[0, 0]
self.failUnless(not isinstance(test, TimeSeries))
self.failUnless(test is masked)
#
test = trick[-1, -1]
self.failUnless(not isinstance(test, TimeSeries))
assert_equal(test, 14)
#
test = trick[0]
self.failUnless(isinstance(test, TimeSeries))
assert_equal(test._varshape, ())
assert_equal(test, trick.series[0])
assert_equal(test.dates, trick.dates[0])
def test_wtimeseries(self):
"Tests getitem w/ TimeSeries as index"
series1D = self.series1D
# Testing a basic condition on data
cond = (series1D < 8).filled(False)
dseries = series1D[cond]
assert_equal(dseries.data, [1, 2, 3, 4, 6, 7])
assert_equal(dseries.dates, series1D.dates[[1, 2, 3, 4, 6, 7]])
assert_equal(dseries.mask, nomask)
# Testing a basic condition on dates
series1D[series1D.dates < Date('D', string='2007-01-06')] = masked
assert_equal(series1D[:5].series.mask, [1, 1, 1, 1, 1])
def test_on2d(self):
"Tests getitem on a 2D series"
(a, b, d) = ([1, 2, 3], [3, 2, 1], date_array(now('M'), length=3))
ser_x = time_series(np.column_stack((a, b)), dates=d)
assert_equal(ser_x[0, 0], time_series(a[0], d[0]))
assert_equal(ser_x[0, :], (a[0], b[0]))
assert_equal(ser_x[:, 0], time_series(a, d))
assert_equal(ser_x[:, :], ser_x)
def test_slicing_and_keeping_additional_attributes(self):
series1D = self.series1D
series1D.fill_value = -9999
series1D._basedict['info'] = '???'
piece = series1D[:5]
assert_equal(piece._fill_value, -9999)
assert_equal(piece[:5]._basedict['info'], '???')
#------------------------------------------------------------------------------
class TestSetItem(TestCase):
#
def setUp(self):
dlist = ['2007-01-%02i' % i for i in range(1, 6)]
dates = date_array(dlist, freq='D')
data = ma.array(np.arange(5), mask=[1, 0, 0, 0, 0], dtype=float)
self.series = time_series(data, dates)
self.dates = dates
#
def test_with_integers(self):
"Tests setitem with integers"
series = self.series
series[0] = 1
assert_equal(series.data, [1, 1, 2, 3, 4])
assert_equal(series.mask, [0, 0, 0, 0, 0])
series[0] = masked
assert_equal(series.data, [1, 1, 2, 3, 4])
assert_equal(series.mask, [1, 0, 0, 0, 0])
try:
series[10] = -999
except IndexError:
pass
#
def test_with_dates(self):
"Test setitem w/ dates"
(series, dates) = (self.series, self.dates)
#
last_date = dates[-1]
series[last_date] = 5
assert_equal(series.data, [0, 1, 2, 3, 5])
assert_equal(series.mask, [1, 0, 0, 0, 0])
#
last_date += 10
try:
series[last_date] = -999
except IndexError:
pass
# With dates as string
series['2007-01-01'] = 5
assert_equal(series.data, [5, 1, 2, 3, 5])
assert_equal(series.mask, [0, 0, 0, 0, 0])
# test for bug fixed in r1203
x, y = ts.now('b'), ts.now('b') + 1
a = ts.time_series([1], start_date=x)
b = ts.time_series([4, 5], start_date=x)
b[x:y] = a[x:y]
assert_equal(b[0], 1)
def test_with_datearray(self):
"Test setitem w/ a date_array"
(series, dates) = (self.series, self.dates)
# Test with date array
series[dates[[0, -1]]] = 0
assert_equal(series.data, [0, 1, 2, 3, 0])
assert_equal(series.mask, [0, 0, 0, 0, 0])
# Test with date as list ofstring
series[['2007-01-01', '2007-01-02']] = 10
assert_equal(series.data, [10, 10, 2, 3, 0])
assert_equal(series.mask, [ 0, 0, 0, 0, 0])
#------------------------------------------------------------------------------
class TestTimeSeriesMethods(TestCase):
def setUp(self):
dates = date_array(['2007-01-%02i' % i for i in (1, 2, 3)], freq='D')
data1D = ma.array([1, 2, 3], mask=[1, 0, 0, ])
data3V = ma.array([[10, 11, 12], [20, 21, 22], [30, 31, 32]],
mask=[[1, 0, 0, ], [0, 0, 0], [0, 0, 1]])
data2D = np.random.rand(60).reshape(3, 4, 5)
series1D = time_series(data1D, dates, freq='D')
series3V = time_series(data3V, dates, freq='D')
series2D = time_series(data2D, dates, freq='D')
self.info = locals()
del(self.info['i'])
return
def test_torecords_1D(self):
"Test conversion to records on 1D series"
series = ts.time_series([1, 2, 3],
start_date=ts.Date('M', '2001-01-01'),
mask=[0, 1, 0])
ndtype = [('_dates', int), ('_data', int), ('_mask', bool)]
control = np.array([(24001, 1, False),
(24002, 2, True),
(24003, 3, False)], dtype=ndtype)
test = series.torecords()
assert_equal(test, control)
def test_torecords_2D(self):
"Test torecords on 2D series"
series = ts.time_series([[1, 1], [2, 2], [3, 3]],
start_date=ts.Date('M', '2001-01-01'),
mask=[[0, 1], [0, 0], [1, 0]])
ndtype = [('_dates', int),
('_data', (int, (2,))),
('_mask', (bool, (2,)))]
control = np.array([(24001, [1, 1], [False, True]),
(24002, [2, 2], [False, False]),
(24003, [3, 3], [True, False])], dtype=ndtype)
test = series.torecords()
assert_equal_records(test, control)
def test_torecords_structured(self):
"Test torecords on structured array"
series = ts.time_series([(1, 1), (2, 2), (3, 3)],
start_date=ts.Date('M', '2001-01-01'),
mask=[(0, 1), (0, 0), (1, 0)],
dtype=[('a', int), ('b', float)])
ndtype = [('_dates', int),
('_data', [('a', int), ('b', float)]),
('_mask', [('a', bool), ('b', bool)])]
control = np.array([(24001, (1, 1), (False, True)),
(24002, (2, 2), (False, False)),
(24003, (3, 3), (True, False))], dtype=ndtype)
test = series.torecords()
assert_equal_records(test, control)
def test_reshape_1D(self):
"Test reshape on data w/ 1 variables"
start = ts.Date('M', '2001-01')
series = ts.time_series([1, 2, 3, 4], mask=[0, 0, 1, 0],
start_date=start)
test = series.reshape(2, 2)
control = ts.time_series([[1, 2], [3, 4]], mask=[[0, 0], [1, 0]],
dates=ts.date_array(start_date=start,
length=4).reshape(2, 2))
assert_equal(test, control)
assert_equal(test.mask, control.mask)
assert_equal(test.dates, control.dates)
assert_equal(test.varshape, series.varshape)
#
test = series.copy()
test.shape = (2, 2)
assert_equal(test, control)
assert_equal(test.mask, control.mask)
assert_equal(test.dates, control.dates)
assert_equal(test.varshape, series.varshape)
def test_reshape_1V(self):
"Test reshape on series w/ 2 variables"
series = ts.time_series([[1, 2], [3, 4]],
mask=[[0, 0], [1, 0]],
start_date=ts.Date('M', '2001-01'))
test = series.reshape((-1, 1))
control = ts.time_series([[[1, 2]], [[3, 4]]],
mask=[[[0, 0]], [[1, 0]]],
dates=series.dates.reshape((-1, 1)))
assert_equal(test, control)
assert_equal(test.mask, control.mask)
assert_equal(test.dates, control.dates)
assert_equal(test.varshape, control.varshape)
#
test = series.reshape((1, -1, 1))
control = ts.time_series([[[[1, 2]], [[3, 4]]]],
mask=[[[[0, 0]], [[1, 0]]]],
dates=series.dates.reshape((1, -1, 1)))
assert_equal(test, control)
assert_equal(test.mask, control.mask)
assert_equal(test.dates, control.dates)
def test_reshaping_1D(self):
"Tests the reshaping of a 1D series."
series1D = self.info['series1D']
newshape = (3, 1)
test1D = series1D.reshape(newshape)
assert_equal(test1D.shape, newshape)
assert_equal(test1D.series.shape, newshape)
assert_equal(test1D.dates.shape, newshape)
assert_equal(test1D.varshape, series1D.varshape)
# Make sure we haven't propagated the new shape
self.failUnless(test1D.shape != series1D.shape)
self.failUnless(test1D.dates.shape != series1D.dates.shape)
# Using .shape
test1D = series1D.copy()
test1D.shape = newshape
assert_equal(test1D.shape, newshape)
assert_equal(test1D.series.shape, newshape)
assert_equal(test1D.dates.shape, newshape)
self.failUnless(series1D.dates.shape != newshape)
assert_equal(test1D.varshape, series1D.varshape)
# Using multiple args
test1D = series1D.reshape(*newshape)
assert_equal(test1D.shape, newshape)
assert_equal(test1D.varshape, series1D.varshape)
def test_reshape_batch(self):
"Test a succession of reshape"
a = ts.time_series([1, 2, 3], start_date=ts.now('D'))
test = a.reshape(-1, 1)
assert_equal(test.shape, (3, 1))
assert_equal(test.varshape, ())
test = a.reshape(-1, 1).reshape(-1)
assert_equal(test.shape, (3,))
assert_equal(test.varshape, ())
def test_reshaping_2D(self):
"Tests the reshaping of a nV/nD series."
series3V = self.info['series3V']
newshape = (1, 3, 3)
try:
test3V = series3V.reshape(newshape)
assert_equal(test3V.shape, newshape)
assert_equal(test3V.series.shape, newshape)
assert_equal(test3V.dates.shape, (1, 3))
assert_equal(test3V.varshape, series3V.varshape)
except NotImplementedError:
pass
else:
raise Exception("Reshaping nV/nD series should be implemented!")
# Using .shape
try:
test3V = series3V.copy()
test3V.shape = newshape
assert_equal(test3V.shape, newshape)
assert_equal(test3V.series.shape, newshape)
assert_equal(test3V.dates.shape, (1, 3))
assert_equal(test3V.varshape, series3V.varshape)
except NotImplementedError:
pass
else:
raise Exception("Reshaping nV/nD series should be implemented!")
def test_ravel_1D(self):
"Test .ravel on 1D data"
series = ts.time_series([1, 2, 3, 4],
mask=[0, 0, 1, 0],
start_date=ts.Date('M', '2009-01'))
test = series.ravel()
assert_equal(test, series)
assert_equal(test.mask, series.mask)
assert_equal(test.dates, series.dates)
assert_equal(test.varshape, series.varshape)
def test_ravel_1V(self):
"Test .ravel on nD/1V data"
dates = ts.date_array(start_date=ts.Date('M', '2009-01'),
length=4)
series = ts.time_series([[1, 2], [3, 4]],
mask=[[0, 0], [1, 0]],
dates=dates)
test = series.ravel()
assert_equal(test.data, series.data.ravel())
assert_equal(test.mask, series.mask.ravel())
assert_equal(test.dates, series.dates.ravel())
assert_equal(test.varshape, series.varshape)
assert_equal(test.varshape, ())
def test_ravel_2V(self):
"Test .ravel on 2V data"
series = ts.time_series([[1, 2], [3, 4]],
mask=[[0, 0], [1, 0]],
start_date=ts.Date('M', '2009-01'),)
test = series.ravel()
assert_equal(test.data, series.data)
assert_equal(test.mask, series.mask)
assert_equal(test.dates, series.dates)
assert_equal(test.varshape, series.varshape)
#
dates = ts.date_array(start_date=ts.Date('M', '2009-01'),
length=2)
series = ts.time_series([[[1, 2]], [[3, 4]]],
mask=[[[0, 0]], [[1, 0]]],
dates=dates.reshape(1, 2))
test = series.ravel()
assert_equal(test.data, [[1, 2], [3, 4]])
assert_equal(test.mask, [[0, 0], [1, 0]])
assert_equal(test.dates, series.dates.ravel())
assert_equal(test.varshape, (2,))
#------------------------------------------------------------------------------
class TestFunctions(TestCase):
"Some getitem tests"
def __init__(self, *args, **kwds):
TestCase.__init__(self, *args, **kwds)
dlist = ['2007-01-%02i' % i for i in range(1, 16)]
dates = date_array(dlist, freq='D')
data = ma.array(np.arange(15), mask=[1, 0, 0, 0, 0] * 3)
self.d = (time_series(data, dates), data, dates)
#
def test_adjustendpoints(self):
"Tests adjust_endpoints"
(series, data, dates) = self.d
dseries = adjust_endpoints(series, series.dates[0], series.dates[-1])
assert_equal(dseries, series)
dseries = adjust_endpoints(series, series.dates[3], series.dates[-3])
assert_equal(dseries, series[3:-2])
dseries = adjust_endpoints(series,
end_date=Date('D', string='2007-01-31'))
assert_equal(dseries.size, 31)
assert_equal(dseries._mask, np.r_[series.mask, [1] * 16])
dseries = adjust_endpoints(series,
end_date=Date('D', string='2007-01-06'))
assert_equal(dseries.size, 6)
assert_equal(dseries, series[:6])
dseries = adjust_endpoints(series,
start_date=Date('D', string='2007-01-06'),
end_date=Date('D', string='2007-01-31'))
assert_equal(dseries.size, 26)
assert_equal(dseries._mask, np.r_[series.mask[5:], [1] * 16])
#
def test_adjustendpoints_withdatestrings(self):
"Tests adjust_endpoints w/ string dates"
(series, data, dates) = self.d
dseries = adjust_endpoints(series, end_date='2007-01-31')
assert_equal(dseries.size, 31)
assert_equal(dseries._mask, np.r_[series.mask, [1] * 16])
dseries = adjust_endpoints(series, end_date='2007-01-06')
assert_equal(dseries.size, 6)
assert_equal(dseries, series[:6])
dseries = adjust_endpoints(series,
start_date='2007-01-06',
end_date='2007-01-31')
assert_equal(dseries.size, 26)
assert_equal(dseries._mask, np.r_[series.mask[5:], [1] * 16])
#
def test_alignseries(self):
"Tests align_series & align_with"
(series, data, dates) = self.d
#
empty_series = time_series([], freq='d')
a, b = align_series(series, empty_series)
assert_equal(a.start_date, b.start_date)
assert_equal(a.end_date, b.end_date)
#
aseries = time_series(data, dates + 10)
bseries = time_series(data, dates - 10)
(a, b) = align_with(series, aseries, bseries)
assert_equal(a.dates, series.dates)
assert_equal(b.dates, series.dates)
assert_equal(a[-5:], series[:5])
assert_equal(b[:5], series[-5:])
#
def test_tshift(self):
"Test tshift function"
series = self.d[0]
shift_negative = series.tshift(-1)
result_data = [999] + [0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13]
result_mask = [ 1] + [1, 0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0]
shift_negative_result = time_series(result_data,
dates=series.dates,
mask=result_mask)
shift_positive = series.tshift(1)
result_data = [1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14] + [999]
result_mask = [0, 0, 0, 0, 1, 0, 0, 0, 0, 1, 0, 0, 0, 0] + [ 1]
shift_positive_result = time_series(result_data,
dates=series.dates,
mask=result_mask)
assert_array_equal(shift_negative, shift_negative_result)
assert_array_equal(shift_positive, shift_positive_result)
#
def test_split(self):
"""Test the split function."""
ms = time_series(np.arange(62).reshape(31, 2),
start_date=Date(freq='d', year=2005, month=7, day=1))
d1, d2 = split(ms)
assert_array_equal(d1.data, ms.data[:, 0])
assert_array_equal(d1.dates, ms.dates)
assert_array_equal(d2.data, ms.data[:, 1])
series = self.d[0]
ss = split(series)[0]
assert_array_equal(series, ss)
def test_convert(self):
"""Test convert function
Just check basic functionality. The details of the actual
date conversion algorithms already tested by asfreq in the
test_dates test suite.
"""
June2005M = Date(freq='M', year=2005, month=6)
lowFreqSeries = time_series(np.arange(10), start_date=June2005M)
# Conversion to same frequency
assert_array_equal(lowFreqSeries, lowFreqSeries.convert("M"))
# Conversion to higher frequency - position=START
lowToHigh_start = lowFreqSeries.convert('B', position='START')
assert_equal(lowToHigh_start.start_date,
June2005M.asfreq("B", relation="START"))
assert_equal(lowToHigh_start.end_date,
(June2005M + 9).asfreq("B", relation="END"))
assert_equal(lowToHigh_start.mask[0], False)
assert_equal(lowToHigh_start.mask[-1], True)
# Conversion to higher frequencyt - position=END
lowToHigh_end = lowFreqSeries.convert('B', position='END')
assert_equal(lowToHigh_end.start_date,
June2005M.asfreq("B", relation="START"))
assert_equal(lowToHigh_end.end_date,
(June2005M + 9).asfreq("B", relation="END"))
assert_equal(lowToHigh_end.mask[0], True)
assert_equal(lowToHigh_end.mask[-1], False)
# ensure that position argument is not case sensitive
lowToHigh_start_lowercase = lowFreqSeries.convert('B', position='start')
assert_array_equal(lowToHigh_start, lowToHigh_start_lowercase)
#
# Conversion to lower frequency
June2005B = Date(freq='b', year=2005, month=6, day=1)
highFreqSeries = time_series(np.arange(100), start_date=June2005B)
highToLow = highFreqSeries.convert('M', func=None)
assert_equal(highToLow.ndim, 2)
assert_equal(highToLow.shape[1], 23)
assert_equal(highToLow.start_date, June2005B.asfreq('M'))
assert_equal(highToLow.end_date, (June2005B + 99).asfreq('M'))
def test_convert_with_func(self):
"Test convert w/ function on 1D series"
mdata = ts.time_series(np.arange(24),
mask=[1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 1,
0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1],
start_date=ts.Date('M', '2001-01'))
test = mdata.convert('A', func=ts.last_unmasked_val)
control = ts.time_series([7, 22], start_date=ts.Date('A', '2001'))
assert_equal(test, control)
def test_convert_nd_with_func(self):
"Test convert w/ function on nD series"
ndseries = time_series(np.arange(124).reshape(62, 2),
start_date=Date(freq='D', string='2005-07-01'))
assert_equal(ndseries.convert('M', sum), [[930, 961], [2852, 2883]])
def test_fill_missing_dates(self):
"""Test fill_missing_dates function"""
_start = Date(freq='m', year=2005, month=1)
_end = Date(freq='m', year=2005, month=4)
#
dates = date_array([_start, _end], freq='M')
series = time_series([1, 2], dates)
filled_ser = fill_missing_dates(series)
#
assert_equal(filled_ser.start_date, _start)
assert_equal(filled_ser.end_date, _end)
self.failUnless(filled_ser.is_full())
self.failUnless(not filled_ser.has_duplicated_dates())
assert_equal(filled_ser.size, _end - _start + 1)
#
data = np.arange(5 * 24).reshape(5, 24)
datelist = ['2007-07-0%i' % i for i in (1, 2, 3, 5, 6)]
dates = date_array(datelist, freq='D')
dseries = time_series(data, dates)
ndates = date_array(start_date=dates[0], end_date=dates[-2])
#
fseries = fill_missing_dates(dseries)
assert_equal(fseries.shape, (6, 24))
assert_equal(fseries.mask[:, 0], [0, 0, 0, 1, 0, 0])
#
fseries = fill_missing_dates(dseries[:, 0])
assert_equal(fseries.shape, (6,))
assert_equal(fseries.mask, [0, 0, 0, 1, 0, 0])
#
series = time_series(data.ravel()[:4].reshape(2, 2), dates=dates[:-1])
fseries = fill_missing_dates(series)
assert_equal(fseries.shape, (5,))
assert_equal(fseries.mask, [0, 0, 0, 1, 0, ])
def test_fill_missing_dates_structured_arrays(self):
"Test fill_missing_dates on structured arrays"
ndtype = [('a', float), ('b', float)]
series = ts.time_series([(1, 1), (2, 2), (3, 3), ],
dtype=ndtype,
dates=['2001-%02i' % i for i in (1, 2, 6)],
freq='M')
test = series.fill_missing_dates()
control = ts.time_series([(1, 1), (2, 2), (0, 0),
(0, 0), (0, 0), (3, 3), ],
mask=[False, False, True, True, True, False],
dtype=ndtype,
start_date=ts.Date('M', '2001-01'))
assert_equal(test, control)
#
def test_fill_missing_dates_undefined(self):
"Test fill_missing_dates on undefined frequencies."
ndtype = [('a', float), ('b', float)]
series = ts.time_series([(1, 1), (2, 2), (3, 3), ],
dtype=ndtype,
dates=[1, 2, 6],
freq='U')
test = series.fill_missing_dates()
control = ts.time_series([(1, 1), (2, 2), (0, 0),
(0, 0), (0, 0), (3, 3), ],
mask=[False, False, True, True, True, False],
dtype=ndtype,
start_date=ts.Date('U', 1))
assert_equal(test, control)
def test_pickling(self):
"Tests pickling/unpickling"
(series, data, dates) = self.d
import cPickle
series_pickled = cPickle.loads(series.dumps())
assert_equal(series_pickled.dates, series.dates)
assert_equal(series_pickled.data, series.data)
assert_equal(series_pickled.mask, series.mask)
#
data = ma.array(np.matrix(range(10)).T, mask=[1, 0, 0, 0, 0] * 2)
dates = date_array(start_date=now('D'), length=10)
series = time_series(data, dates=dates)
series_pickled = cPickle.loads(series.dumps())
assert_equal(series_pickled.dates, series.dates)
assert_equal(series_pickled.data, series.data)
assert_equal(series_pickled.mask, series.mask)
self.failUnless(isinstance(series_pickled._data, np.matrix))
#
def test_pickling_memo(self):
"Test the conservation of _optinfo"
import cPickle
control = ts.time_series(np.arange(10), start_date=ts.Date('A', 2001))
control._optinfo['memo'] = "Control information"
test = cPickle.loads(cPickle.dumps(control))
assert_equal(test._dates, control._dates)
assert_equal(test, control)
assert_equal(test._optinfo, control._optinfo)
#
# def test_pickling_oddity(self):
# "Test some pickling oddity (bug #97)"
# import cPickle
# control = ts.time_series([{'a':1}], start_date=ts.Date('A', 2001))
# if tuple(map(int, np.version.version.split('.')[:2])) > (1, 4):
# test = cPickle.loads(cPickle.dumps(control))
# assert_equal(test, control)
# assert_equal(test.dates, control.dates)
def test_empty_timeseries(self):
"Tests that empty TimeSeries are handled properly"
empty_ts = time_series([], freq='b')
assert_array_equal(empty_ts, empty_ts + 1)
assert_array_equal(empty_ts, empty_ts + empty_ts)
assert_equal(empty_ts.start_date, None)
assert_equal(empty_ts.end_date, None)
def test__timeseriescompat_multiple(self):
"Tests the compatibility of multiple time series."
newyearsday = Date('D', '2005-01-01')
aprilsfool = Date('D', '2005-04-01')
seriesM_10 = time_series(np.arange(10),
date_array(start_date=newyearsday.asfreq('M'),
length=10))
seriesD_10 = time_series(np.arange(10),
date_array(start_date=newyearsday, length=10))
seriesD_5 = time_series(np.arange(5),
date_array(start_date=newyearsday, length=5))
seriesD_5_apr = time_series(np.arange(5),
date_array(start_date=aprilsfool, length=5))
self.failUnless(tseries._timeseriescompat_multiple(seriesM_10, seriesM_10, seriesM_10))
exception = False
try:
tseries._timeseriescompat_multiple(seriesM_10, seriesD_10)
except ts.TimeSeriesCompatibilityError:
exception = True
self.failUnless(exception)
exception = False
try:
tseries._timeseriescompat_multiple(seriesD_5, seriesD_10)
except ts.TimeSeriesCompatibilityError:
exception = True
self.failUnless(exception)
exception = False
try:
tseries._timeseriescompat_multiple(seriesD_5, seriesD_5_apr)
except ts.TimeSeriesCompatibilityError:
exception = True
self.failUnless(exception)
def test_compressed(self):
"Tests compress"
dlist = ['2007-01-%02i' % i for i in range(1, 16)]
dates = date_array(dlist, freq='D')
data = ma.array(np.arange(15), mask=[1, 0, 0, 0, 0] * 3, dtype=float_)
series = time_series(data, dlist, freq='D')
#
keeper = np.array([0, 1, 1, 1, 1] * 3, dtype=bool_)
c_series = series.compressed()
assert_equal(c_series.data, [1, 2, 3, 4, 6, 7, 8, 9, 11, 12, 13, 14])
assert_equal(c_series.mask, nomask)
assert_equal(c_series.dates, dates[keeper])
#
series_st = time_series(ma.column_stack((data, data[::-1])),
dates=dates)
c_series = series_st.compressed()
d = [1, 2, 3, 6, 7, 8, 11, 12, 13]
assert_equal(c_series.data, np.c_[(d, list(reversed(d)))])
assert_equal(c_series.mask, nomask)
assert_equal(c_series.dates, dates[d])
def test_concatenate(self):
"Tests concatenate"
dlist = ['2007-%02i' % i for i in range(1, 6)]
_dates = date_array(dlist, freq='M')
data = ma.array(np.arange(5), mask=[1, 0, 0, 0, 0], dtype=float_)
#
ser_1 = time_series(data, _dates)
ser_2 = time_series(data, dates=_dates + 10)
newseries = concatenate((ser_1, ser_2), fill_missing=True)
assert_equal(newseries._series, [0, 1, 2, 3, 4,
0, 0, 0, 0, 0,
0, 1, 2, 3, 4])
assert_equal(newseries._mask, [1, 0, 0, 0, 0] + [1] * 5 + [1, 0, 0, 0, 0])
assert ~ (newseries.has_missing_dates())
#
ser_1 = time_series(data, _dates)
ser_2 = time_series(data, dates=_dates + 10)
newseries = concatenate((ser_1, ser_2))
assert_equal(newseries._data, [0, 1, 2, 3, 4, 0, 1, 2, 3, 4])
assert_equal(newseries._mask, [1, 0, 0, 0, 0] + [1, 0, 0, 0, 0])
assert newseries.has_missing_dates()
#
ser_2 = time_series(data, dates=_dates + 3)
newseries = concatenate((ser_1, ser_2))
assert_equal(newseries._data, [0, 1, 2, 3, 4, 2, 3, 4])
assert_equal(newseries._mask, [1, 0, 0, 0, 0, 0, 0, 0])
#
newseries = concatenate((ser_1, ser_1[::-1]))
assert_equal(newseries, ser_1)
#
def test_concatenate_remove_duplicates(self):
"Test concatenate w/ remove_duplicates"
first = Date("D", "2009-01-01")
a = time_series([1, 2, 3, ], start_date=first)
b = time_series([10, 20, 30, 40, 50], start_date=first)
#
test = ts.concatenate((a, b), remove_duplicates=True)
ctrl = time_series([1, 2, 3, 40, 50], start_date=first)
assert_equal(test, ctrl)
assert_equal(test.dates, ctrl.dates)
#
test = ts.concatenate((b, a), remove_duplicates=True)
ctrl = time_series([10, 20, 30, 40, 50], start_date=first)
assert_equal(test, ctrl)
assert_equal(test.dates, ctrl.dates)
#
c = time_series(100 * np.arange(1, 8), start_date=first + 2)
test = ts.concatenate((a, b, c), remove_duplicates=True)
ctrl = time_series([1, 2, 3, 40, 50, 400, 500, 600, 700],
start_date=first)
assert_equal(test, ctrl)
assert_equal(test.dates, ctrl.dates)
test = ts.concatenate((c, a, b), remove_duplicates=True)
ctrl = time_series([1, 2, 100, 200, 300, 400, 500, 600, 700],
start_date=first)
assert_equal(test, ctrl)
assert_equal(test.dates, ctrl.dates)
def test_concatenate_2D(self):
"Test concatenate on 2D"
adata = ma.array([[1, 2], [2, 4], [3, 8]], mask=[[0, 0], [1, 0], [0, 1]])
bdata = ma.array([[10, 20], [30, 40], [50, 60], [70, 80]])
a = time_series(adata, start_date=ts.Date('D', '01-Jan-2009'))
b = time_series(bdata, start_date=ts.Date('D', '05-Jan-2009'))
#
test = ts.concatenate([a, b], axis=0, remove_duplicates=True)
ctrl = ma.array([[1, 2], [2, 4], [3, 8],
[10, 20], [30, 40], [50, 60], [70, 80]],
mask=[[0, 0], [1, 0], [0, 1],
[0, 0], [0, 0], [0, 0], [0, 0]])
assert_equal(test.series, ctrl)
assert_equal(test.dates, np.concatenate((a.dates, b.dates)))
#
test = ts.concatenate([a, b], axis=0, remove_duplicates=False)
assert_equal(test.series, ctrl)
assert_equal(test.dates, np.concatenate((a.dates, b.dates)))
#
b.dates -= 2
test = ts.concatenate([a, b], axis=0, remove_duplicates=False)
ctrl = ts.time_series([[1, 2], [2, 4], [3, 8],
[10, 20], [30, 40], [50, 60], [70, 80]],
mask=[[0, 0], [1, 0], [0, 1],
[0, 0], [0, 0], [0, 0], [0, 0]],
dates=np.concatenate((a.dates, b.dates)),
freq='D')
assert_equal(test.series, ctrl)
assert_equal(test.dates, ctrl.dates)
test = ts.concatenate([a, b], axis=0, remove_duplicates=True)
ctrl = ts.time_series([[1, 2], [2, 4], [3, 8],
[30, 40], [50, 60], [70, 80]],
mask=[[0, 0], [1, 0], [0, 1],
[0, 0], [0, 0], [0, 0]],
start_date=a.dates[0])
assert_equal(test.series, ctrl)
assert_equal(test.dates, ctrl.dates)
#
def test_maxmin(self):
"Test min/max"
series = time_series(np.arange(10), start_date=now('D'))
smax = series.max()
#!!!: Used to be a TimeSeries, now is only a scalar
# self.failUnless(isinstance(smax, TimeSeries))
# assert_equal(smax._dates, date_array(series._dates[-1]))
self.failUnless(not isinstance(smax, TimeSeries))
assert_equal(smax, 9)
#
smin = series.min()
#!!!: Used to be a TimeSeries, now is only a scalar
# self.failUnless(isinstance(smin, TimeSeries))
# assert_equal(smin._dates, date_array(series._dates[0]))
assert_equal(smin, 0)
#
series = time_series([[0, 1, 2, 3, 4], [9, 8, 7, 6, 5]],
start_date=now('D'))
smax = series.max(0)
assert_equal(smax.series, [9, 8, 7, 6, 5])
assert_equal(smax.dates, date_array([series.dates[1]] * 5))
smax = series.max(1)
assert_equal(smax.series, [4, 9])
assert_equal(smax.dates, series.dates)
smax = series.max()
assert_equal(smax.series, [9])
assert_equal(smax.dates, date_array(series.dates[1]))
ser_m = ts.time_series(range(10), freq='M', start_date='2008-01-01')
ser_q = ser_m.convert(freq='Q')
mx = ser_q.max(-1)
assert_equal(mx, ma.array([2, 5, 8, 9]))
self.failUnless(isinstance(mx, TimeSeries))
#
def test_pct(self):
series = time_series(np.arange(1, 10), start_date=now('D'))
_pct = series.pct()
assert_equal(_pct.dtype, np.dtype('d'))
assert_equal(series.start_date, _pct.start_date)
assert_equal(series.end_date, _pct.end_date)
self.failUnless(_pct[0] is masked)
assert_equal(_pct[1], 1.0)
assert_equal(_pct[2], 0.5)
series = ts.time_series([2., 1., 2., 3.],
start_date=ts.Date(freq='A', year=2005))
# standard pct
result = series.pct()
assert_almost_equal(result,
ma.array([999, -0.5, 1.0, 0.5], mask=[1, 0, 0, 0])
)
result = series.pct(2)
assert_almost_equal(
result,
ma.array([999, 999, 0.0, 2.0], mask=[1, 1, 0, 0])
)
# log pct
result = series.pct_log()
assert_almost_equal(
result,
ma.array(
[999, -0.69314718056, 0.69314718056, 0.405465108108],
mask=[1, 0, 0, 0])
)
result = series.pct_log(2)
assert_almost_equal(
result,
ma.array([999, 999, 0.0, 1.09861228867], mask=[1, 1, 0, 0])
)
# symmetric pct
result = series.pct_symmetric()
assert_almost_equal(
result,
ma.array(
[999, -0.666666666667, 0.666666666667, 0.4], mask=[1, 0, 0, 0])
)
result = series.pct_symmetric(2)
assert_almost_equal(
result,
ma.array([999, 999, 0.0, 1.0], mask=[1, 1, 0, 0])
)
def test_find_duplicated_dates(self):
"Test find_duplicated_dates"
years = ['2000', '2001', '2002', '2003', '2003',
'2003', '2004', '2005', '2005', '2006']
series = time_series(np.arange(len(years)), dates=years, freq='A')
test = find_duplicated_dates(series)
control = {Date('A', '2003'): (np.array([3, 4, 5]),),
Date('A', '2005'): (np.array([7, 8]),), }
assert_equal(test, control)
#
def test_find_duplicated_dates_allduplicated(self):
"Test find_duplicated_dates w/all duplicates"
series = time_series([0, 1, 2, 3, 4],
dates=[2000, 2000, 2000, 2000, 2000], freq='A')
test = find_duplicated_dates(series)
control = {Date('A', '2000'): (np.array([0, 1, 2, 3, 4]),), }
assert_equal(test, control)
#
def test_find_duplicated_dates_noduplicates(self):
"Test find_duplicated_dates w/o duplicates"
series = time_series(np.arange(5), start_date=Date('A', '2001'))
test = find_duplicated_dates(series)
assert_equal(test, {})
def test_remove_duplicated_dates(self):
"Test remove_duplicated_dates"
years = ['2000', '2001', '2002', '2003', '2003',
'2003', '2004', '2005', '2005', '2006']
series = time_series(np.arange(len(years)), dates=years, freq='A')
test = remove_duplicated_dates(series)
control = time_series([0, 1, 2, 3, 6, 7, 9],
start_date=Date('A', '2000'))
assert_equal(test, control)
assert_equal(test._dates, control._dates)
#
def test_remove_duplicated_dates_allduplicates(self):
"Test remove_duplicated_dates w/ all duplicates"
years = ['2000', '2000', '2000', '2000', '2000']
series = time_series(np.arange(len(years)), dates=years, freq='A')
test = remove_duplicated_dates(series)
control = time_series([0, ],
start_date=Date('A', '2000'))
assert_equal(test, control)
assert_equal(test._dates, control._dates)
#
def test_remove_duplicated_dates_noduplicates(self):
"Test remove_duplicated_dates w/o duplicates"
series = time_series(np.arange(5), start_date=Date('A', '2001'))
test = remove_duplicated_dates(series)
assert_equal(test, series)
assert_equal(test._dates, series._dates)
#
def test_remove_duplicated_dates_nonchrono(self):
"Test remove_duplicated_dates on non-chronological series"
series = time_series([0, 1, 2, 3, 4, 5, 6],
dates=[2005, 2005, 2004, 2003, 2002, 2002, 2002],
freq='A',
autosort=False)
test = remove_duplicated_dates(series)
control = time_series([0, 2, 3, 4],
dates=[2005, 2004, 2003, 2002], freq='A',
autosort=True)
assert_equal(test, control)
assert_equal(test._dates, control._dates)
#------------------------------------------------------------------------------
class TestMisc(TestCase):
def test_ma_ufuncs(self):
a = time_series([-2, -1, 0, 1, 2], start_date=now('D'))
z = ma.sqrt(a)
self.failUnless(isinstance(z, TimeSeries))
assert_equal(z, [1, 1, 0, 1, np.sqrt(2)])
assert_equal(z.mask, [1, 1, 0, 0, 0])
assert_equal(z.dates, a.dates)
def test_emptylike(self):
x = time_series([1, 2, 3, 4, 5], mask=[1, 0, 0, 0, 0],
start_date=now('D'))
y = ts.empty_like(x)
# Basic checks
assert_equal(x.dtype, y.dtype)
assert_equal(x.shape, y.shape)
#
y.flat = 0
assert_equal(x.mask, [1, 0, 0, 0, 0])
assert_equal(y.mask, nomask)
#
x.mask = nomask
y = ts.empty_like(x)
assert_equal(y.mask, nomask)
def test_compatibility_shape(self):
"Tests shape compatibility."
data = np.arange(2 * 3 * 4 * 5,)
dates = np.empty((2 * 3 * 4 * 5,))
assert_equal(get_varshape(data, dates), ())
#
dates.shape = (2, 3, 4, 5)
assert_equal(get_varshape(data, dates), ())
#
dates = np.empty((2 * 3 * 4,))
try:
assert_equal(get_varshape(data, dates), None)
except TimeSeriesCompatibilityError:
pass
#
dates = np.empty((3 * 3 * 5,))
try:
assert_equal(get_varshape(data, dates), None)
except TimeSeriesCompatibilityError:
pass
#
data.shape = (2 * 3 * 4, 5)
dates = np.empty((2 * 3 * 4,))
assert_equal(get_varshape(data, dates), (5,))
data.shape = (2 * 3, 4 * 5)
dates = | np.empty((2 * 3 * 4,)) | numpy.empty |
#!/usr/bin/env python
# encoding: utf-8
#
# Copyright SAS Institute
#
# Licensed under the Apache License, Version 2.0 (the License);
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
''' Write ONNX model '''
from onnx import defs
from onnx import helper, numpy_helper
from onnx import TensorProto
import numpy as np
class OnnxWriteError(ValueError):
'''
Used to indicate an error in parsing ONNX model definition
'''
def sas_to_onnx(layers, model_table, model_weights):
'''
Convert DLPy model to ONNX
Parameters
----------
layers : iter-of-Layers
Specifies the layers defining the model.
model_table : :class:`CASTable`
Specifies the CASTable of the model.
model_weights : :class:`pandas.DataFrame` or :class:`CASTable`
DataFrame or CASTable containing the model weights.
If this is a CASTable, the weights will be fetched from
the CAS server. This may take a long time if
the model has many weights.
Returns
-------
Loaded in-memory ModelProto
'''
nodes = []
inputs = []
outputs = []
initializer = []
import pandas as pd
if isinstance(model_weights, pd.DataFrame):
fetch = False
else:
fetch = True
model_name = model_table.query('_DLKey1_ = "modeltype"') \
.fetch()['Fetch']['_DLKey0_'][0]
for layer in layers:
if layer.type == 'input':
H = int(layer.config['height'])
W = int(layer.config['width'])
C = int(layer.config['n_channels'])
value_info = helper.make_tensor_value_info(name=layer.name,
elem_type=TensorProto.FLOAT,
shape=[1, C, H, W])
inputs.append(value_info)
elif layer.type == 'convo' or layer.type == 'groupconvo':
H = int(layer.config['height'])
W = int(layer.config['width'])
M = int(layer.config['n_filters'])
# get group
group = 1
if 'n_groups' in layer.config:
group = layer.config['n_groups']
# set stride
S_h, S_w = get_strides(layer)
# set padding
padding = get_padding(layer)
bias = layer.config['include_bias']
if bias is None:
bias = True
dropout = layer.config['dropout']
act = layer.config['act']
if act in [None, 'AUTO']:
act = 'RECTIFIER'
# inputs to conv op
conv_input = [l.name for l in layer.src_layers]
conv_input.append(layer.name + '_w')
if bias:
conv_input.append(layer.name + '_b')
# create names of node input/output
if not dropout and act.lower() == 'identity':
conv_output = [layer.name]
elif not dropout:
conv_output = [layer.name + '_conv_out']
act_input = conv_output
act_output = [layer.name]
elif dropout and act.lower() == 'identity':
conv_output = [layer.name + '_conv_out']
dropout_input = conv_output
dropout_output = [layer.name]
else:
conv_output = [layer.name + '_conv_out']
act_input = conv_output
act_output = [layer.name + '_act_out']
dropout_input = act_output
dropout_output = [layer.name]
conv_op = helper.make_node(op_type='Conv',
inputs=conv_input,
outputs=conv_output,
pads=padding,
kernel_shape=[H, W],
strides=[S_h, S_w],
group=group)
nodes.append(conv_op)
# activation op
if act.lower() != 'identity':
act_op = make_onnx_activation(act, act_input, act_output)
nodes.append(act_op)
# dropout op
if dropout:
dropout_op = helper.make_node(op_type='Dropout',
inputs=dropout_input,
outputs=dropout_output,
ratio=dropout)
nodes.append(dropout_op)
# create weight tensors
layer_id = get_layer_id(model_table, layer.name)
if fetch:
weights = fetch_weights(model_weights, layer_id)
else:
weights = get_weights_from_dataframe(model_weights, layer_id)
if bias:
conv_weights = np.array(weights[:-M], dtype=np.float32)
bias_weights = | np.array(weights[-M:], dtype=np.float32) | numpy.array |
import numpy as np
import matplotlib.pyplot as plt
def plothist(hist, bin_edges, baseline=None, histtype="bar", axis=None, **kwargs):
"""Plot a histogram from the hist and bin edges as returned by numpy.histogram.
"""
if axis is None:
axis = plt.gca()
if not histtype == "bar":
raise NotImplementedError
zero = np.zeros(1, dtype=hist.dtype)
x = | np.concatenate([bin_edges, [bin_edges[-1]]]) | numpy.concatenate |
"""
Tests for :mod:`numpy.core.numeric`.
Does not include tests which fall under ``array_constructors``.
"""
from typing import List
import numpy as np
class SubClass(np.ndarray):
...
i8: np.int64
A: np.ndarray
B: List[int]
C: SubClass
reveal_type(np.count_nonzero(i8)) # E: int
reveal_type(np.count_nonzero(A)) # E: int
reveal_type(np.count_nonzero(B)) # E: int
reveal_type(np.count_nonzero(A, keepdims=True)) # E: Union[numpy.signedinteger[Any], numpy.ndarray[Any, Any]]
reveal_type(np.count_nonzero(A, axis=0)) # E: Union[numpy.signedinteger[Any], numpy.ndarray[Any, Any]]
reveal_type(np.isfortran(i8)) # E: bool
reveal_type(np.isfortran(A)) # E: bool
reveal_type(np.argwhere(i8)) # E: numpy.ndarray[Any, Any]
reveal_type(np.argwhere(A)) # E: numpy.ndarray[Any, Any]
reveal_type(np.flatnonzero(i8)) # E: numpy.ndarray[Any, Any]
reveal_type(np.flatnonzero(A)) # E: numpy.ndarray[Any, Any]
reveal_type(np.correlate(B, A, mode="valid")) # E: numpy.ndarray[Any, Any]
reveal_type(np.correlate(A, A, mode="same")) # E: numpy.ndarray[Any, Any]
reveal_type(np.convolve(B, A, mode="valid")) # E: numpy.ndarray[Any, Any]
reveal_type(np.convolve(A, A, mode="same")) # E: numpy.ndarray[Any, Any]
reveal_type(np.outer(i8, A)) # E: numpy.ndarray[Any, Any]
reveal_type(np.outer(B, A)) # E: numpy.ndarray[Any, Any]
reveal_type(np.outer(A, A)) # E: numpy.ndarray[Any, Any]
reveal_type(np.outer(A, A, out=C)) # E: SubClass
reveal_type(np.tensordot(B, A)) # E: numpy.ndarray[Any, Any]
reveal_type(np.tensordot(A, A)) # E: numpy.ndarray[Any, Any]
reveal_type(np.tensordot(A, A, axes=0)) # E: numpy.ndarray[Any, Any]
reveal_type(np.tensordot(A, A, axes=(0, 1))) # E: numpy.ndarray[Any, Any]
reveal_type(np.isscalar(i8)) # E: bool
reveal_type(np.isscalar(A)) # E: bool
reveal_type(np.isscalar(B)) # E: bool
reveal_type(np.roll(A, 1)) # E: numpy.ndarray[Any, Any]
reveal_type(np.roll(A, (1, 2))) # E: numpy.ndarray[Any, Any]
reveal_type( | np.roll(B, 1) | numpy.roll |
import os, sys
import argparse
import numpy as np
import cv2
from skimage import filters
import torch
import torch.nn.functional as F
import torchvision.transforms as transforms
from linefiller.thinning import thinning
from linefiller.trappedball_fill import trapped_ball_fill_multi, flood_fill_multi, mark_fill, build_fill_map, merge_fill, \
show_fill_map, my_merge_fill
# for super pixelpooling
from torch_scatter import scatter_mean
from torch_scatter import scatter_add
import softsplat
from forward_warp2 import ForwardWarp
from my_models import create_VGGFeatNet
from vis_flow import flow_to_color
def dline_of(x, low_thr=1, high_thr=20, bf_args=[30,40,30]):
xm = cv2.medianBlur(x, 5)
# xga = cv2.GaussianBlur(x,(5, 5),cv2.BORDER_DEFAULT)
xb = cv2.bilateralFilter(x, bf_args[0], bf_args[1], bf_args[2])
# xb = cv2.bilateralFilter(xb, 20, 60, 10 )
xg = cv2.cvtColor(xb, cv2.COLOR_RGB2GRAY)
xl = cv2.Laplacian(xb, ddepth = cv2.CV_32F, ksize=5)
xgg = xl
xgg = xgg.astype(np.float32) * (255. / (xgg.astype(np.float32).max() * 1.0))
xh = filters.apply_hysteresis_threshold(xgg, low_thr, high_thr)
xgg[xh == False] = 0
# xgg[xh == True] = 255
xgg1 = xgg.copy() * 20
xgg1 = np.max(xgg1, axis=2)
return np.clip(255 - xgg1, 0, 255)
def squeeze_label_map(label_map):
ret_label_map = label_map.copy()
labels, counts = np.unique(ret_label_map, return_counts=True)
label_orders = np.argsort(counts)
for ord_id, ord_val in enumerate(label_orders):
mask = (label_map == labels[ord_val])
ret_label_map[mask] = ord_id
return ret_label_map
def trapped_ball_processed(binary, in_image=None, do_merge=True):
fills = []
result = binary
fill = trapped_ball_fill_multi(result, 3, method='max')
fills += fill
result = mark_fill(result, fill)
print('result num 3: ', len(fills))
fill = trapped_ball_fill_multi(result, 2, method=None)
fills += fill
result = mark_fill(result, fill)
print('result num 2: ', len(fills))
fill = trapped_ball_fill_multi(result, 1, method=None)
fills += fill
result = mark_fill(result, fill)
print('result num 1: ', len(fills))
fill = flood_fill_multi(result)
fills += fill
print('flood_fill_multi num 1: ', len(fills))
fillmap = build_fill_map(result, fills)
# print('fillmap num: ', len(np.unique(fillmap)))
if do_merge:
if in_image is None:
fillmap = merge_fill(fillmap, max_iter=10)
else:
fillmap = my_merge_fill(in_image, fillmap)
fillmap = thinning(fillmap)
return fillmap
def superpixel_count(label_map):
_, pixelCounts = np.unique(label_map, return_counts=True)
return pixelCounts
def mutual_matching(corrMap, descending = True):
sortedCorrMap_1, ranks_1 = corrMap.sort(dim=1, descending=descending)
sortedCorrMap_2, ranks_2 = corrMap.sort(dim=0, descending=descending)
_, idxRanks_1 = ranks_1.sort(dim=1, descending=False)
_, idxRanks_2 = ranks_2.sort(dim=0, descending=False)
# print(idxRanks_1.shape)
# print(idxRanks_2.shape)
mutualRanks = idxRanks_1 + idxRanks_2
rankSum_1to2, matching_1to2 = mutualRanks.min(dim=1)
rankSum_2to1, matching_2to1 = mutualRanks.min(dim=0)
return (rankSum_1to2, matching_1to2, sortedCorrMap_1,
rankSum_2to1, matching_2to1, sortedCorrMap_2)
def superpixel_pooling(feat_map, label_map, use_gpu=False):
fC,fH,fW = feat_map.shape
lH,lW = label_map.shape
if fH != lH or fW != lW:
print('feature map and label map do not match')
return
feat_flat = feat_map.reshape([fC,fH*fW])
label_flat = torch.tensor(label_map.reshape(fH*fW)).long()
# print('max label: ', torch.max(label_flat).item())
# print('superpxiel num: ', len(torch.unique(label_flat)))
if use_gpu:
feat_flat = feat_flat.cuda()
label_flat = label_flat.cuda()
poolMean = scatter_mean(feat_flat, label_flat, dim=1)
return poolMean
def get_bounding_rect(points):
"""Get a bounding rect of points.
# Arguments
points: array of points.
# Returns
rect coord
"""
x1, y1, x2, y2 = np.min(points[1]), np.min(points[0]), np.max(points[1]), np.max(points[0])
return x1, y1, x2, y2
def get_deformable_flow(flowObj, img1, mask_1, box_1,
img2, mask_2, box_2,
warp_func=None, use_gpu=False):
mask1_patch = mask_1[box_1[1]:box_1[3]+1, box_1[0]:box_1[2] +1]
mask2_patch = mask_2[box_2[1]:box_2[3]+1, box_2[0]:box_2[2]+1 ]
gray1 = cv2.cvtColor(img1, cv2.COLOR_BGR2GRAY)
img1_patch = gray1[box_1[1]:box_1[3]+1, box_1[0]:box_1[2]+1]
img1_patch[np.invert(mask1_patch)] = 0
if np.mean(img1_patch[mask1_patch]) < 20:
img1_patch[mask1_patch] = 0.8*img1_patch[mask1_patch] + 0.2*200
gray2 = cv2.cvtColor(img2, cv2.COLOR_BGR2GRAY)
img2_patch = gray2[box_2[1]:box_2[3]+1, box_2[0]:box_2[2]+1 ]
img2_patch[np.invert(mask2_patch)] = 0
if np.mean(img2_patch[mask2_patch]) < 20:
img2_patch[mask2_patch] = 0.8*img2_patch[mask2_patch] + 0.2*200
# the black border should be larger than 8 required by DIS
tarH = max(img1_patch.shape[0], img2_patch.shape[0], 16) + 10
tarW = max(img1_patch.shape[1], img2_patch.shape[1], 16) + 10
H_front_pad = int((tarH - img1_patch.shape[0])//2)
H_back_pad = tarH - H_front_pad -img1_patch.shape[0]
W_front_pad = int((tarW - img1_patch.shape[1])//2)
W_back_pad = tarW - W_front_pad - img1_patch.shape[1]
img1_patch_pad = np.pad(img1_patch, ([H_front_pad, H_back_pad], [W_front_pad, W_back_pad]), mode='constant')
mask1_patch_pad = np.pad(mask1_patch.astype(np.uint8), ([H_front_pad, H_back_pad], [W_front_pad, W_back_pad]), mode='constant')
H_front_pad2 = int((tarH - img2_patch.shape[0])//2)
H_back_pad2 = tarH - H_front_pad2 -img2_patch.shape[0]
W_front_pad2 = int((tarW - img2_patch.shape[1])//2)
W_back_pad2 = tarW - W_front_pad2 - img2_patch.shape[1]
img2_patch_pad = np.pad(img2_patch, ([H_front_pad2, H_back_pad2], [W_front_pad2, W_back_pad2]), mode='constant')
mask2_patch_pad = np.pad(mask2_patch.astype(np.uint8), ([H_front_pad2, H_back_pad2], [W_front_pad2, W_back_pad2]), mode='constant')
# compute flow between patches
patch_flow = flowObj.calc(img1_patch_pad, img2_patch_pad, None)
union_rate = 1
if warp_func is not None:
patch_flow_tensor = torch.Tensor(patch_flow.transpose([2,0,1])).unsqueeze(0)
fwarp_mask_tensor = torch.Tensor(mask1_patch_pad).unsqueeze(0).unsqueeze(0)
if use_gpu:
# use softsplat forward warp
fwarp_mask_tensor = warp_func(fwarp_mask_tensor.cuda(), patch_flow_tensor.cuda())
else:
fwarp_mask_tensor, norm = warp_func(fwarp_mask_tensor, patch_flow_tensor)
fwarp_mask_tensor[norm > 0] = fwarp_mask_tensor[norm > 0] / norm[norm > 0]
fwarp_mask = fwarp_mask_tensor[0][0].cpu().numpy()
kernel = np.ones((5,5), np.uint8)
# fwarp_mask_close = cv2.morphologyEx(fwarp_mask, cv2.MORPH_CLOSE, kernel)
fwarp_mask_close = fwarp_mask
fwarp_mask_close[fwarp_mask_close<0.05] = 0
union_region = np.logical_and(fwarp_mask_close.astype(np.bool), mask2_patch_pad.astype(np.bool))
union_rate = np.sum(union_region.astype(np.bool))/np.sum(fwarp_mask_close.astype(np.bool))
###
mask1_patch_pad = np.pad(mask1_patch, ([H_front_pad, H_back_pad], [W_front_pad, W_back_pad]), mode='constant')
mask_tmp = np.repeat(mask1_patch_pad[:,:,np.newaxis], 2, axis=2)
points_in_patch = np.where(mask1_patch_pad)
return patch_flow, points_in_patch, union_rate
def get_guidance_flow(label_map1, label_map2, img1, img2,
rank_sum, matching, sorted_corrMap,
mean_X_A, mean_Y_A, mean_X_B, mean_Y_B,
rank_sum_thr=0, use_gpu=False):
lH, lW = label_map1.shape
labelNum = len(np.unique(label_map1))
pixelCounts = superpixel_count(label_map1)
pixelCounts_2 = superpixel_count(label_map2)
guideflow_X = np.zeros([lH, lW])
guideflow_Y = np.zeros([lH, lW])
color_patch1 = show_fill_map(label_map1)
color_patch2 = show_fill_map(label_map2)
flowObj = cv2.optflow.createOptFlow_DIS(cv2.optflow.DISOpticalFlow_PRESET_MEDIUM)
# flowObj.setUseMeanNormalization(False)
# flowObj.setUseSpatialPropagation(False)
flowObj.setVariationalRefinementIterations(25)
# flowObj.setPatchSize(8)
# flowObj.setPatchStride(8)
flowObj.setFinestScale(0) # max 6
flowObj.setGradientDescentIterations(50)
if use_gpu:
func_fwarp2 = softsplat.ModuleSoftsplat('average')
else:
func_fwarp2 = ForwardWarp()
for l_id_1 in range(labelNum):
# labelMask = (label_map1 == l_id_1)
pixelNum = pixelCounts[l_id_1]
l_id_2 = matching[l_id_1].item()
curFlowX = mean_X_B[l_id_2] - mean_X_A[l_id_1]
curFlowY = mean_Y_B[l_id_2] - mean_Y_A[l_id_1]
flowLen = np.linalg.norm([curFlowX, curFlowY])
labelMask = (label_map1 == l_id_1)
labelMask2 = (label_map2 == l_id_2)
pixelNum_2 = pixelCounts_2[l_id_2]
isAreaValid = (max(pixelNum/pixelNum_2, pixelNum_2/pixelNum) < 3)
isValidPatch = (rank_sum[l_id_1] <= rank_sum_thr and flowLen <= 250 and
pixelNum < maxPixNum*0.12 and pixelNum > 50 and
isAreaValid)
if not isValidPatch:
guideflow_X[labelMask] = 0
guideflow_Y[labelMask] = 0
for cc in range(3):
color_patch1[:, :, cc][labelMask] = 255
else:
points_1 = np.where(labelMask)
points_2 = np.where(labelMask2)
box_1 = get_bounding_rect(points_1)
box_2 = get_bounding_rect(points_2)
patch_flow, points_in_patch, union_rate = get_deformable_flow(flowObj,
img1, labelMask, box_1,
img2, labelMask2, box_2,
warp_func=func_fwarp2,
use_gpu=use_gpu)
if union_rate > 0.8:
patch_flow_X = patch_flow[:,:,0]
patch_flow_Y = patch_flow[:,:,1]
guideflow_X[points_1] = (box_2[0] + box_2[2] - box_1[0] - box_1[2])/2 + patch_flow_X[points_in_patch]
guideflow_Y[points_1] = (box_2[1] + box_2[3] - box_1[1] - box_1[3])/2 + patch_flow_Y[points_in_patch]
for cc in range(3):
color_patch1[:, :, cc][labelMask] = color_patch2[:, :, cc][labelMask2][0]
else:
guideflow_X[labelMask] = 0
guideflow_Y[labelMask] = 0
for cc in range(3):
color_patch1[:, :, cc][labelMask] = 255
guideflow = np.concatenate((guideflow_X[np.newaxis,:,:], guideflow_Y[np.newaxis,:,:]), axis=0)
matching_color_patch = np.hstack((color_patch1, color_patch2)).astype(np.uint8)
return guideflow, matching_color_patch
def get_ctx_feature(label_map, featx1, featx2, featx4, featx8):
labelNum = len(np.unique(label_map))
featx1_pad = F.pad(featx1, [64, 64, 64, 64])
featx2_pad = F.pad(featx2, [32, 32, 32, 32])
featx4_pad = F.pad(featx4, [16, 16, 16, 16])
# featx8_pad = F.pad(featx8, [8, 8, 8, 8])
for l_idx in range(labelNum):
mask = (label_map == l_idx)
points = np.where(mask)
box = get_bounding_rect(points)
# same recepetive field
box_h = box[3] - box[1] + 64
box_w = box[2] - box[0] + 64
featx1_patch = featx1_pad[:,:,box[1]:box[1]+box_h+1, box[0]:box[0]+box_w+1]
featx2_patch = featx2_pad[:,:,box[1]//2:(box[1]+box_h)//2+1, box[0]//2:(box[0]+box_w)//2+1]
featx4_patch = featx4_pad[:,:,box[1]//4:(box[1]+box_h)//4+1, box[0]//4:(box[0]+box_w)//4+1]
# featx8_patch = featx8_pad[:,:,box[1]//8:(box[1]+box_h)//8+1, box[0]//8:(box[0]+box_w)//8+1]
# average whole patch
featx1_patch_flat = featx1_patch.flatten(start_dim=2, end_dim=-1).mean(dim=-1)
featx2_patch_flat = featx2_patch.flatten(start_dim=2, end_dim=-1).mean(dim=-1)
featx4_patch_flat = featx4_patch.flatten(start_dim=2, end_dim=-1).mean(dim=-1)
# featx8_patch7x7 = featx8_patch.flatten(start_dim=2, end_dim=-1).mean(dim=-1)
feat_patch_flat = torch.cat([featx1_patch_flat, featx2_patch_flat, featx4_patch_flat], dim=1)
if l_idx == 0:
ctxFeat = feat_patch_flat
else:
ctxFeat = torch.cat([ctxFeat, feat_patch_flat],dim=0)
return ctxFeat
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument('input_root')
parser.add_argument('output_root')
parser.add_argument('--label_root', default=None, help='root for label maps')
parser.add_argument('--start_idx', default=0,
help='threshold to differ motion regions from static')
parser.add_argument('--end_idx', default=None,
help='threshold to differ motion regions from static')
parser.add_argument('--rank_sum_thr', default=0,
help='threshold for rank sum')
parser.add_argument('--height', default=960,
help='height of the generated flow, default: 960')
parser.add_argument('--width', default=540,
help='width of the generated flow, default: 540')
parser.add_argument('--use_gpu', action='store_true')
args = parser.parse_args()
######
folder_root = args.input_root
save_root = args.output_root
label_root = args.label_root
start_idx = int(args.start_idx)
end_idx = None if args.end_idx is None else int(args.end_idx)
use_gpu = args.use_gpu
# tar_size = (1280, 720)
tar_size = (args.height, args.width)
# tar_size = (640, 360)
rankSumThr = int(args.rank_sum_thr)
######
print('use label maps from %s'%label_root)
print('use gpu: ', use_gpu)
sys.stdout.flush()
if not os.path.exists(save_root):
os.makedirs(save_root)
## make models
vggNet = create_VGGFeatNet()
if use_gpu:
vggNet = vggNet.cuda()
toTensor = transforms.ToTensor()
normalize = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
totalMatchCount = 0
folderList = sorted(os.listdir(folder_root))
if end_idx is None:
end_idx = len(folderList)
for f_idx, folder in enumerate(folderList[start_idx:end_idx]):
f_idx += start_idx
# if f_idx > 1 + start_idx:
# break
input_subfolder = os.path.join(folder_root, folder)
imgFileNames = sorted(os.listdir(input_subfolder))
print('-- [%d/%d] %s'%(f_idx, end_idx-1, folder))
print(imgFileNames)
sys.stdout.flush()
img1 = cv2.imread(os.path.join(input_subfolder, imgFileNames[0]))
img3 = cv2.imread(os.path.join(input_subfolder, imgFileNames[-1]))
# segmentation
img1_rs = cv2.resize(img1, tar_size)
img3_rs = cv2.resize(img3, tar_size)
if label_root is None:
if 'Japan' in folder:
boundImg1 = dline_of(img1_rs, 2, 20, [10,10,10]).astype(np.uint8)
boundImg3 = dline_of(img3_rs, 2, 20, [10,10,10]).astype(np.uint8)
else:
boundImg1 = dline_of(img1_rs, 1, 20, [30,40,30]).astype(np.uint8)
boundImg3 = dline_of(img3_rs, 1, 20, [30,40,30]).astype(np.uint8)
ret, binMap1 = cv2.threshold(boundImg1, 220, 255, cv2.THRESH_BINARY)
ret, binMap3 = cv2.threshold(boundImg3, 220, 255, cv2.THRESH_BINARY)
print('- trapped_ball_processed()')
sys.stdout.flush()
fillMap1 = trapped_ball_processed(binMap1, img1_rs)
fillMap3 = trapped_ball_processed(binMap3, img3_rs)
labelMap1 = squeeze_label_map(fillMap1)
labelMap3 = squeeze_label_map(fillMap3)
else:
print('- load labelmap')
sys.stdout.flush()
print(os.path.join(label_root, folder, 'labelmap_1.npy'))
print(os.path.join(label_root, folder, 'labelmap_3.npy'))
labelMap1 = np.load(os.path.join(label_root, folder, 'labelmap_1.npy'))
print(labelMap1.shape)
labelMap3 = np.load(os.path.join(label_root, folder, 'labelmap_3.npy'))
print(labelMap3.shape)
# VGG features
img1_rgb = cv2.cvtColor(img1_rs, cv2.COLOR_BGR2RGB)
img3_rgb = cv2.cvtColor(img3_rs, cv2.COLOR_BGR2RGB)
img1_tensor = normalize(toTensor(img1_rgb/255.).float())
img1_tensor = img1_tensor.unsqueeze(dim=0)
img3_tensor = normalize(toTensor(img3_rgb/255.).float())
img3_tensor = img3_tensor.unsqueeze(dim=0)
if use_gpu:
img1_tensor = img1_tensor.cuda()
img3_tensor = img3_tensor.cuda()
# featx1_1 = vggNet.slice1(img1_tensor)
# featx1_3 = vggNet.slice1(img3_tensor)
featx1_1, featx2_1, featx4_1, featx8_1, featx16_1 = vggNet(img1_tensor)
featx1_3, featx2_3, featx4_3, featx8_3, featx16_3 = vggNet(img3_tensor)
print('- compute correlation map')
sys.stdout.flush()
# superpixel pooling
labelMap1_x2 = labelMap1[1::2,1::2]
labelMap1_x4 = labelMap1_x2[1::2,1::2]
labelMap1_x8 = labelMap1_x4[1::2,1::2]
# labelMap1_x16 = labelMap1_x8[fc00:e968:6179::de52:7100,1::2]
labelMap3_x2 = labelMap3[1::2,1::2]
labelMap3_x4 = labelMap3_x2[1::2,1::2]
labelMap3_x8 = labelMap3_x4[1::2,1::2]
# labelMap3_x16 = labelMap3_x8[1::2,1::2]
featx1_pool_1 = superpixel_pooling(featx1_1[0], labelMap1, use_gpu)
featx2_pool_1 = superpixel_pooling(featx2_1[0], labelMap1_x2, use_gpu)
featx4_pool_1 = superpixel_pooling(featx4_1[0], labelMap1_x4, use_gpu)
featx8_pool_1 = superpixel_pooling(featx8_1[0], labelMap1_x8, use_gpu)
# featx16_pool_1 = superpixel_pooling(featx16_1[0], labelMap1_x16, use_gpu)
featx1_pool_3 = superpixel_pooling(featx1_3[0], labelMap3, use_gpu)
featx2_pool_3 = superpixel_pooling(featx2_3[0], labelMap3_x2, use_gpu)
featx4_pool_3 = superpixel_pooling(featx4_3[0], labelMap3_x4, use_gpu)
featx8_pool_3 = superpixel_pooling(featx8_3[0], labelMap3_x8, use_gpu)
# featx16_pool_3 = superpixel_pooling(featx16_3[0], labelMap3_x16, use_gpu)
feat_pool_1 = torch.cat([featx1_pool_1, featx2_pool_1, featx4_pool_1, featx8_pool_1], dim=0)
feat_pool_3 = torch.cat([featx1_pool_3, featx2_pool_3, featx4_pool_3, featx8_pool_3], dim=0)
# normalization
feat_p1_tmp = feat_pool_1 - feat_pool_1.min(dim=0)[0]
feat_p1_norm = feat_p1_tmp/feat_p1_tmp.sum(dim=0)
feat_p3_tmp = feat_pool_3 - feat_pool_3.min(dim=0)[0]
feat_p3_norm = feat_p3_tmp/feat_p3_tmp.sum(dim=0)
# for pixel distance
lH, lW = labelMap1.shape
gridX, gridY = np.meshgrid(np.arange(lW), | np.arange(lH) | numpy.arange |
from abc import ABC, abstractmethod
from autofit.graphical.utils import numerical_jacobian
from autofit.mapper.operator import MultiVecOuterProduct
from functools import wraps
from typing import Type, Union, Tuple
import numpy as np
from scipy.special import ndtr, ndtri
from scipy.stats._continuous_distns import _norm_pdf
from ...mapper.operator import (
DiagonalMatrix,
LinearOperator,
ShermanMorrison
)
from ..factor_graphs import transform
class AbstractDensityTransform(ABC):
"""
This class allows the transformation of a probability density function, p(x)
whilst preserving the measure of the distribution, i.e.
\int p(x) dx = 1
p'(f) = p(f(x)) * |df/dx|
\inf p'(f) df = 1
Methods
-------
transform
calculates f(x)
inv_transform
calculates f^{-1}(y)
jacobian
calculates df/dx
log_det
calculates log |df/dx|
log_det_grad
calculates |df/dx|, d log_det/dx
transform_det
calculates f(x), |df/dx|
transform_jac
calculates f(x), df/dx
transform_det_jac
calculates f(x), log_det, d log_det/dx, df/dx
These final 3 functions are defined so that child classes
can define custom methods that avoid recalculation of intermediate
values that are needed to calculate multiple versions of the quantities
"""
@abstractmethod
def transform(self, x):
pass
@abstractmethod
def inv_transform(self, x):
pass
@abstractmethod
def jacobian(self, x: np.ndarray) -> LinearOperator:
pass
@abstractmethod
def log_det(self, x: np.ndarray) -> np.ndarray:
pass
@abstractmethod
def log_det_grad(self, x: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
pass
def log_det_hess(self, x: np.ndarray) -> np.ndarray:
return numerical_jacobian(
x, lambda x: self.log_det_grad(x)[1].sum(0)
)
def transform_det(self, x) -> Tuple[np.ndarray, np.ndarray]:
return self.transform(x), self.log_det(x)
def transform_jac(self, x) -> Tuple[np.ndarray, LinearOperator]:
return self.transform(x), self.jacobian(x)
def transform_det_jac(
self, x
) -> Tuple[np.ndarray, np.ndarray, np.ndarray, LinearOperator]:
return (
self.transform(x),
*self.log_det_grad(x),
self.jacobian(x)
)
def transform_func(self, func):
@wraps(func)
def transformed_func(*args, **kwargs):
x, *args = args
x = self.transform(x)
return func(x, *args, **kwargs)
transformed_func.transform = self
return transformed_func
def transform_func_grad(self, func_grad):
@wraps(func_grad)
def transformed_func_grad(*args, **kwargs):
x, *args = args
x, jac = self.transform_jac(x)
val, grad = func_grad(x, *args, **kwargs)
return x, grad * jac
transformed_func_grad.transform = self
return transformed_func_grad
def transform_func_grad_hess(self, func_grad_hess):
@wraps(func_grad_hess)
def transformed_func_grad_hess(*args, **kwargs):
x, *args = args
x, jac = self.transform_jac(x)
val, grad, hess = func_grad_hess(x, *args, **kwargs)
return val, grad * jac, jac.quad(hess)
transformed_func_grad_hess.transform = self
return transformed_func_grad_hess
class LinearTransform(AbstractDensityTransform):
def __init__(self, linear: LinearOperator):
self.linear = linear
def transform(self, x: np.ndarray) -> np.ndarray:
return self.linear * x
def inv_transform(self, x: np.ndarray) -> np.ndarray:
return self.linear.ldiv(x)
def jacobian(self, x: np.ndarray) -> np.ndarray:
return self.linear
def log_det(self, x: np.ndarray) -> np.ndarray:
return self.linear.log_det
def log_det_grad(self, x: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
return self.log_det(x), 0
class LinearShiftTransform(LinearTransform):
def __init__(self, shift: float = 0, scale: float = 1):
self.shift = shift
self.scale = scale
self.linear = DiagonalMatrix(np.reciprocal(self.scale))
def inv_transform(self, x: np.ndarray) -> np.ndarray:
return x * self.scale + self.shift
def transform(self, x: np.ndarray) -> np.ndarray:
return (x - self.shift) / self.scale
def log_det(self, x: np.ndarray) -> np.ndarray:
return - np.log(self.scale) * np.ones_like(x)
class FunctionTransform(AbstractDensityTransform):
def __init__(self, func, inv_func, grad, hess=None, args=(), func_grad_hess=None):
self.func = func
self.inv_func = inv_func
self.grad = grad
self.hess = hess
self.args = args
self.func_grad_hess = func_grad_hess
def transform(self, x):
return self.func(x, *self.args)
def inv_transform(self, x):
return self.inv_func(x, *self.args)
def jacobian(self, x):
return DiagonalMatrix(self.grad(x, *self.args))
def log_det(self, x: np.ndarray) -> np.ndarray:
gs = self.grad(x, *self.args)
return np.log(gs)
def log_det_grad(self, x: np.ndarray) -> Tuple[np.ndarray, np.ndarray]:
if self.func_grad_hess:
x0, gs, hs = self.func_grad_hess(x, *self.args)
else:
x0 = self.func(x, *self.args)
gs = self.grad(x, *self.args)
hs = self.hess(x, *self.args)
return np.log(gs), hs/gs
def transform_det_jac(
self, x
) -> Tuple[np.ndarray, np.ndarray, np.ndarray, LinearOperator]:
if self.func_grad_hess:
x0, gs, hs = self.func_grad_hess(x, *self.args)
else:
x0 = self.func(x, *self.args)
gs = self.grad(x, *self.args)
hs = self.hess(x, *self.args)
return x0, np.log(gs), hs/gs, DiagonalMatrix(gs)
def exp3(x):
expx = np.exp(x)
return (expx, ) * 3
exp_transform = FunctionTransform(np.exp, np.log, np.exp, func_grad_hess=exp3)
def log3(x):
ix = np.reciprocal(x)
return np.log(x), ix, -np.square(ix)
log_transform = FunctionTransform(
np.log, np.exp, np.reciprocal, func_grad_hess=log3)
def sigmoid(x, scale=1, shift=0):
return scale / (1 + np.exp(-x)) + shift
def logit(x, scale=1, shift=0):
x = (x - shift) / scale
return np.log(x) - np.log1p(-x)
def sigmoid_grad(x, scale=1, shift=0):
expx = np.exp(-x)
return scale * expx / np.square(1 + expx)
def logit_grad(x, scale=1, shift=0):
x = (x - shift) / scale
return (np.reciprocal(x) + np.reciprocal(1 - x)) / scale
def logit_hess(x, scale=1, shift=0):
x = (x - shift) / scale
return np.reciprocal(1-x) - np.reciprocal(x)
def logit_grad_hess(x, scale=1, shift=0):
x = (x - shift) / scale
ix = np.reciprocal(x)
ix1 = np.reciprocal(1 - x)
ix2 = np.square(ix)
ix12 = np.square(ix1)
return (np.log(x) - np.log1p(-x), (ix + ix1)/scale, (ix12 - ix2)/scale**2)
logistic_transform = FunctionTransform(
logit, sigmoid, logit_grad, func_grad_hess=logit_grad_hess)
def shifted_logistic(shift=0, scale=1):
return FunctionTransform(logit, sigmoid, logit_grad, func_grad_hess=logit_grad_hess, args=(scale, shift))
def ndtri_grad(x):
return np.reciprocal(_norm_pdf(ndtri(x)))
def ndtri_grad_hess(x):
f = ndtri(x)
phi = _norm_pdf(f)
grad = np.reciprocal(phi)
hess = grad**2 * f
return f, grad, hess
phi_transform = FunctionTransform(
ndtri, ndtr, ndtri_grad, func_grad_hess=ndtri_grad_hess)
class MultinomialLogitTransform(AbstractDensityTransform):
"""
makes multinomial logististic transform from the p to x, where,
x_i = log(p_i / (1 - sum(p)))
p_i = exp(x_i) / (sum(exp(x_j) for x_j in x) + 1)
When p's n-simplex is defined by,
all(0 <= p_i <= 1 for p_i in p) and sum(p) < 1
"""
def __init__(self, axis=-1):
self.axis = axis
def _validate(self, p):
p = np.asanyarray(p)
keepdims = np.ndim(p) == self.ndim + 1
if not (keepdims or np.ndim(p) == self.ndim):
raise ValueError(
f"dimension of input must be {self.ndim} or {self.ndim + 1}")
return p, keepdims
def transform(self, p):
p = np.asanyarray(p)
lnp1 = np.log(1 - np.sum(p, axis=self.axis, keepdims=True))
lnp = np.log(p)
return lnp - lnp1
def inv_transform(self, x):
expx = np.exp(x)
return expx / (expx.sum(axis=self.axis, keepdims=True) + 1)
def jacobian(self, p):
p = np.asanyarray(p)
pn1 = 1 - np.sum(p, axis=-1, keepdims=True)
ln1p = np.log(pn1)
lnp = np.log(p)
jac = ShermanMorrison(
DiagonalMatrix(1/p),
1/np.sqrt(pn1) * np.ones_like(p)
)
def log_det(self, p):
p = np.asanyarray(p)
p1 = 1 - np.sum(p, axis=self.axis, keepdims=True)
# Hack to make sure summation broadcasting works correctly
log_d = (
- np.log(p).sum(axis=self.axis, keepdims=True) - np.log(p1)
) * np.full_like(p, p1.size/p.size)
return log_d
def log_det_grad(self, p):
p = np.asanyarray(p)
p1 = 1 - np.sum(p, axis=self.axis, keepdims=True)
# Hack to make sure summation broadcasting works correctly
log_d = (
- np.log(p).sum(axis=self.axis, keepdims=True) - np.log(p1)
) * np.full_like(p, p1.size/p.size)
return log_d, 1/p1 - 1/p
def transform_det_jac(
self, p
) -> Tuple[np.ndarray, np.ndarray, np.ndarray, LinearOperator]:
p = np.asanyarray(p)
pn1 = 1 - np.sum(p, axis=self.axis, keepdims=True)
ln1p = np.log(pn1)
lnp = | np.log(p) | numpy.log |
import numpy as np
import pdb
import h5py
import os
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
from recursive_planning.infra.datasets.save_util.record_saver import HDF5SaverBase
from utils import AttrDict
def pad_traj_timesteps(traj, max_num_actions):
"""
pad images and actions with zeros
:param traj:
:param max_num_actions:
:return:
"""
im_shape = traj.images.shape
ac_shape = traj.actions.shape
if ac_shape[0] < max_num_actions:
zeros = np.zeros([max_num_actions - im_shape[0] + 1, im_shape[1], im_shape[2], im_shape[3], im_shape[4]], dtype=np.uint8)
traj.images = np.concatenate([traj.images, zeros])
if len(ac_shape) > 1:
zeros = np.zeros([max_num_actions - ac_shape[0], ac_shape[1]])
else:
zeros = np.zeros([max_num_actions - ac_shape[0]])
traj.actions = | np.concatenate([traj.actions, zeros]) | numpy.concatenate |
# Authors: <NAME> <<EMAIL>>
# <NAME>
#
# License: BSD (3-clause)
import logging
import warnings
import numpy as np
from scipy import linalg
from numpy.linalg import pinv
from .asr_utils import (geometric_median, fit_eeg_distribution, yulewalk,
yulewalk_filter, ma_filter, block_covariance)
class ASR():
"""Artifact Subspace Reconstruction.
Artifact subspace reconstruction (ASR) is an automated, online,
component-based artifact removal method for removing transient or
large-amplitude artifacts in multi-channel EEG recordings [1]_.
Parameters
----------
sfreq : float
Sampling rate of the data, in Hz.
cutoff: float
Standard deviation cutoff for rejection. X portions whose variance
is larger than this threshold relative to the calibration data are
considered missing data and will be removed. The most aggressive value
that can be used without losing too much EEG is 2.5. Recommended to
use with more conservative values ranging from 20 - 30.
Defaults to 20.
blocksize : int
Block size for calculating the robust data covariance and thresholds,
in samples; allows to reduce the memory and time requirements of the
robust estimators by this factor (down to Channels x Channels x Samples
x 16 / Blocksize bytes) (default=100).
win_len : float
Window length (s) that is used to check the data for artifact content.
This is ideally as long as the expected time scale of the artifacts but
not shorter than half a cycle of the high-pass filter that was used
(default=0.5).
win_overlap : float
Window overlap fraction. The fraction of two successive windows that
overlaps. Higher overlap ensures that fewer artifact portions are going
to be missed, but is slower (default=0.66).
max_dropout_fraction : float
Maximum fraction of windows that can be subject to signal dropouts
(e.g., sensor unplugged), used for threshold estimation (default=0.1).
min_clean_fraction : float
Minimum fraction of windows that need to be clean, used for threshold
estimation (default=0.25).
ab : 2-tuple | None
Coefficients (A, B) of an IIR filter that is used to shape the
spectrum of the signal when calculating artifact statistics. The
output signal does not go through this filter. This is an optional way
to tune the sensitivity of the algorithm to each frequency component
of the signal. The default filter is less sensitive at alpha and beta
frequencies and more sensitive at delta (blinks) and gamma (muscle)
frequencies. Defaults to None.
max_bad_chans : float
The maximum number or fraction of bad channels that a retained window
may still contain (more than this and it is removed). Reasonable range
is 0.05 (very clean output) to 0.3 (very lax cleaning of only coarse
artifacts) (default=0.2).
method : {'riemann', 'euclid'}
Method to use. If riemann, use the riemannian-modified version of
ASR [2]_. Currently, only euclidean ASR is supported. Defaults to
"euclid".
Attributes
----------
sfreq: array, shape=(n_channels, filter_order)
Filter initial conditions.
cutoff: float
Standard deviation cutoff for rejection.
blocksize : int
Block size for calculating the robust data covariance and thresholds.
win_len : float
Window length (s) that is used to check the data for artifact content.
win_overlap : float
Window overlap fraction.
max_dropout_fraction : float
Maximum fraction of windows that can be subject to signal dropouts.
min_clean_fraction : float
Minimum fraction of windows.
max_bad_chans : float
The maximum fraction of bad channels.
method : {'riemann', 'euclid'}
Method to use.
A, B: arrays
Coefficients of an IIR filter that is used to shape the spectrum of the
signal when calculating artifact statistics. The output signal does not
go through this filter. This is an optional way to tune the sensitivity
of the algorithm to each frequency component of the signal. The default
filter is less sensitive at alpha and beta frequencies and more
sensitive at delta (blinks) and gamma (muscle) frequencies.
M : array, shape=(channels, channels)
The mixing matrix to fit ASR data.
T : array, shape=(channels, channels)
The mixing matrix to fit ASR data.
References
----------
.. [1] <NAME>., & <NAME>. (2016). U.S. Patent Application No.
14/895,440. https://patents.google.com/patent/US20160113587A1/en
.. [2] <NAME>., <NAME>., <NAME>., & <NAME>.
(2019). A Riemannian Modification of Artifact Subspace Reconstruction
for EEG Artifact Handling. Frontiers in Human Neuroscience, 13.
https://doi.org/10.3389/fnhum.2019.00141
"""
def __init__(self, sfreq, cutoff=20, blocksize=100, win_len=0.5,
win_overlap=0.66, max_dropout_fraction=0.1,
min_clean_fraction=0.25, ab=None, max_bad_chans=0.1,
method="euclid"):
# set attributes
self.sfreq = sfreq
self.cutoff = cutoff
self.blocksize = blocksize
self.win_len = win_len
self.win_overlap = win_overlap
self.max_dropout_fraction = max_dropout_fraction
self.min_clean_fraction = min_clean_fraction
self.max_bad_chans = max_bad_chans
self.method = "euclid" # NOTE: riemann is not yet available
self._fitted = False
# set default yule-walker filter
if ab is None:
yw_f = np.array([0, 2, 3, 13, 16, 40,
np.minimum(80.0, (self.sfreq / 2.0) - 1.0),
self.sfreq / 2.0]) * 2.0 / self.sfreq
yw_m = | np.array([3, 0.75, 0.33, 0.33, 1, 1, 3, 3]) | numpy.array |
import torch
import torchvision
import torchvision.transforms as tvt
import torch.nn as nn
import matplotlib.pyplot as plt
import numpy as np
from torch import optim
import torch.nn.functional as F
import math as m
import time
import os
#from google.colab import drive
import random
from PIL import Image
from torch.autograd import Variable, variable
from PIL import Image
import numpy
import tensorflow as tf
from pathlib import Path
import pickle
import numpy as np
import torch
import torchvision
import torch.nn.functional as F
import text_model
import test_retrieval
import torch_functions
#import datasets
from tqdm import tqdm as tqdm
import PIL
import argparse
import datasets
import img_text_composition_models
Path1=r"C:\MMaster\Files"
Path1=r"D:\personal\master\MyCode\files"
#Path1=r"C:\MMaster\Files"
################# Support Functions Section #################
def dataset(batch_size_all):
trainset = Fashion200k(
path=Path1,
split='train',
transform=torchvision.transforms.Compose([
torchvision.transforms.Resize(224),
torchvision.transforms.CenterCrop(224),
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize([0.485, 0.456, 0.406],
[0.229, 0.224, 0.225])
]))
trainloader = torch.utils.data.DataLoader(trainset, batch_size=batch_size_all,
shuffle=False, num_workers=2)
return trainset,trainloader
def euclideandistance(signature,signatureimg):
from scipy.spatial import distance
return distance.euclidean(signature, signatureimg)
#.detach().numpy()
def testvaluessame():
train = datasets.Fashion200k(
path=Path1,
split='train',
transform=torchvision.transforms.Compose([
torchvision.transforms.Resize(224),
torchvision.transforms.CenterCrop(224),
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize([0.485, 0.456, 0.406],
[0.229, 0.224, 0.225])
]))
transform=torchvision.transforms.Compose([
torchvision.transforms.Resize(224),
torchvision.transforms.CenterCrop(224),
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize([0.485, 0.456, 0.406],
[0.229, 0.224, 0.225])
])
trig= img_text_composition_models.TIRG([t.encode().decode('utf-8') for t in train.get_all_texts()],512)
trig.load_state_dict(torch.load(Path1+r'\fashion200k.tirg.iter160k.pth' , map_location=torch.device('cpu') )['model_state_dict'])
trig.eval()
query='women/tops/blouses/91422080/91422080_0.jpeg'
qttext='replace sunrise with pleat-neck'
target='women/tops/sleeveless_and_tank_tops/90068628/90068628_0.jpeg'
text=[]
text.append(qttext)
text.append(qttext)
img = Image.open(Path1+'/'+query)
img = img.convert('RGB')
img=transform(img)
img2 = Image.open(Path1+'/'+target)
img2 = img2.convert('RGB')
img2=transform(img2)
img=img.unsqueeze_(0)
img2=img2.unsqueeze_(0)
images=torch.cat([img, img2], dim=0)
trigdataQ=trig.compose_img_text(images,text)
trigdataQ1=trig.compose_img_text(images,text)
print('...........')
print(trigdataQ)
print(trigdataQ1)
def getbetatrainNot():
train = datasets.Fashion200k(
path=Path1,
split='train',
transform=torchvision.transforms.Compose([
torchvision.transforms.Resize(224),
torchvision.transforms.CenterCrop(224),
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize([0.485, 0.456, 0.406],
[0.229, 0.224, 0.225])
]))
trig= img_text_composition_models.TIRG([t.encode().decode('utf-8') for t in train.get_all_texts()],512)
trig.load_state_dict(torch.load(Path1+r'\fashion200k.tirg.iter160k.pth' , map_location=torch.device('cpu') )['model_state_dict'])
trig.eval()
imgs = []
mods = []
trigdata=[]
target=[]
imgdata=[]
for Data in tqdm(train):
imgs += [Data['source_img_data']]
mods += [Data['mod']['str']]
target +=[Data['target_img_data']]
imgs = torch.stack(imgs).float()
imgs = torch.autograd.Variable(imgs)
f = trig.compose_img_text(imgs, mods).data.cpu().numpy()
target = torch.stack(target).float()
target = torch.autograd.Variable(target)
f2 = trig.extract_img_feature(target).data.cpu().numpy()
trigdata.append(f[0])
imgdata.append(f2[0])
imgs = []
mods = []
target = []
trigdata=np.array(trigdata)
imgdata=np.array(imgdata)
Ntrigdata=trigdata
Nimgdata=imgdata
Ntrig2=[]
for i in range(Ntrigdata.shape[0]):
Ntrigdata[i, :] /= np.linalg.norm(Ntrigdata[i, :])
for i in range(Nimgdata.shape[0]):
Nimgdata[i, :] /= np.linalg.norm(Nimgdata[i, :])
for i in range(Ntrigdata.shape[0]):
Ntrig2.append(np.insert(Ntrigdata[i],0, 1))
Ntrig2=np.array(Ntrig2)
Ntrigdata1=Ntrig2.transpose()
X1=np.matmul(Ntrigdata1,Ntrig2)
X2=np.linalg.inv(X1)
X3=np.matmul(X2,Ntrigdata1)
Nbeta=np.matmul(X3,Nimgdata)
with open(Path1+r"/"+'BetaNot.txt', 'wb') as fp:
pickle.dump(Nbeta, fp)
def GetValuestrain15time():
with open (Path1+"/trainBetaNormalized.txt", 'rb') as fp:
BetaNormalize = pickle.load(fp)
trainset = datasets.Fashion200k(
path=Path1,
split='train',
transform=torchvision.transforms.Compose([
torchvision.transforms.Resize(224),
torchvision.transforms.CenterCrop(224),
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize([0.485, 0.456, 0.406],
[0.229, 0.224, 0.225])
]))
trainloader = trainset.get_loader(
batch_size=2,
shuffle=True,
drop_last=True,
num_workers=0)
testset = TestFashion200k(
path=Path1,
split='test',
transform=torchvision.transforms.Compose([
torchvision.transforms.Resize(224),
torchvision.transforms.CenterCrop(224),
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize([0.485, 0.456, 0.406],
[0.229, 0.224, 0.225])
]))
trig= TIRG([t.encode().decode('utf-8') for t in trainset.get_all_texts()],512)
trig.load_state_dict(torch.load(Path1+r'\checkpoint_fashion200k.pth' , map_location=torch.device('cpu') )['model_state_dict'])
opt = argparse.ArgumentParser()
opt.add_argument('--batch_size', type=int, default=2)
opt.add_argument('--dataset', type=str, default='fashion200k')
opt.batch_size =1
opt.dataset='fashion200k'
Results=[]
for i in range(15):
for name, dataset in [ ('train', trainset)]: #,('test', testset)]:
# betaNor="['1 ---> 5.27', '5 ---> 14.39', '10 ---> 21.6', '50 ---> 43.830000000000005', '100 ---> 55.33']"
# Results.append('No.'+str(i)+' DataSet='+name+' Type= BetaNormalized '+' Result=' +betaNor)
try:
betaNor = test_retrieval.testbetanormalizednot(opt, trig, dataset,BetaNormalize)
print(name,' BetaNormalized: ',betaNor)
Results.append('No.'+str(i)+' DataSet='+name+' Type= BetaNormalized '+' Result=' +betaNor)
except:
print('ERROR')
try:
asbook = test_retrieval.test(opt, trig, dataset)
print(name,' As PaPer: ',asbook)
Results.append('No.'+str(i)+' DataSet='+name+' Type= As PaPer '+' Result=' +betaNor)
except:
print('ERROR')
with open(Path1+r"/"+'Results15time.txt', 'wb') as fp:
pickle.dump(Results, fp)
def distanceBetaand():
with open (Path1+"/Beta.txt", 'rb') as fp:
Beta = pickle.load(fp)
trainset = datasets.Fashion200k(
path=Path1,
split='train',
transform=torchvision.transforms.Compose([
torchvision.transforms.Resize(224),
torchvision.transforms.CenterCrop(224),
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize([0.485, 0.456, 0.406],
[0.229, 0.224, 0.225])
]))
test = datasets.Fashion200k(
path=Path1,
split='test',
transform=torchvision.transforms.Compose([
torchvision.transforms.Resize(224),
torchvision.transforms.CenterCrop(224),
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize([0.485, 0.456, 0.406],
[0.229, 0.224, 0.225])
]))
trig= img_text_composition_models.TIRG([t.encode().decode('utf-8') for t in trainset.get_all_texts()],512)
trig.load_state_dict(torch.load(Path1+r'\fashion200k.tirg.iter160k.pth' , map_location=torch.device('cpu') )['model_state_dict'])
trig.eval()
imgs = []
mods = []
target = []
batchsize=2
Distance=[]
sourceid=[]
targetid=[]
countbeta=0
counttrig=0
for Data in tqdm(trainset):
imgs += [Data['source_img_data']]
mods += [Data['mod']['str']]
target +=[Data['target_img_data']]
sourceid.append(Data['source_img_id'])
targetid.append(Data['target_img_id'])
imgs = torch.stack(imgs).float()
imgs = torch.autograd.Variable(imgs)
f = trig.compose_img_text(imgs, mods).data.cpu().numpy()
target = torch.stack(target).float()
target = torch.autograd.Variable(target)
f2 = trig.extract_img_feature(target).data.cpu().numpy()
trigdata=f[0]
trigbeta = np.insert(trigdata,0, 1)
trigbeta=np.matmul(trigbeta,Beta)
Targetdata = f2[0]
SourceTarget=euclideandistance(trigdata,Targetdata)
betaTarget=euclideandistance(trigbeta,Targetdata)
if(SourceTarget > betaTarget):
countbeta= countbeta+1
else:
counttrig=counttrig+1
# opsig={'source':sourceid[0],'target':targetid[0],'disbeta':betaTarget,'disorig':SourceTarget}
# Distance.append(opsig )
imgs = []
mods = []
target = []
sourceid=[]
targetid=[]
with open(Path1+r"/"+'Distance.txt', 'wb') as fp:
pickle.dump(Distance, fp)
print('Train Data :Count beta less:',countbeta , ' ,countbeta bigger:',counttrig)
imgs = []
mods = []
target = []
batchsize=2
Distance=[]
sourceid=[]
targetid=[]
countbeta=0
counttrig=0
for Data in tqdm(test.get_test_queries()):
imgs += [test.get_img(Data['source_img_id'])]
mods += [Data['mod']['str']]
target +=[test.get_img(Data['target_id'])]
imgs = torch.stack(imgs).float()
imgs = torch.autograd.Variable(imgs)
f = trig.compose_img_text(imgs, mods).data.cpu().numpy()
target = torch.stack(target).float()
target = torch.autograd.Variable(target)
f2 = trig.extract_img_feature(target).data.cpu().numpy()
trigdata=f[0]
trigbeta = np.insert(trigdata,0, 1)
trigbeta=np.matmul(trigbeta,Beta)
Targetdata = f2[0]
SourceTarget=euclideandistance(trigdata,Targetdata)
betaTarget=euclideandistance(trigbeta,Targetdata)
if(SourceTarget > betaTarget):
countbeta= countbeta+1
else:
counttrig=counttrig+1
imgs = []
mods = []
target = []
sourceid=[]
targetid=[]
print('Test Data :Count beta less:',countbeta , ' ,countbeta bigger:',counttrig)
################# Beta From Test Set Section #################
def getbeta():
train = datasets.Fashion200k(
path=Path1,
split='train',
transform=torchvision.transforms.Compose([
torchvision.transforms.Resize(224),
torchvision.transforms.CenterCrop(224),
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize([0.485, 0.456, 0.406],
[0.229, 0.224, 0.225])
]))
test = datasets.Fashion200k(
path=Path1,
split='test',
transform=torchvision.transforms.Compose([
torchvision.transforms.Resize(224),
torchvision.transforms.CenterCrop(224),
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize([0.485, 0.456, 0.406],
[0.229, 0.224, 0.225])
]))
trig= img_text_composition_models.TIRG([t.encode().decode('utf-8') for t in train.get_all_texts()],512)
trig.load_state_dict(torch.load(Path1+r'\fashion200k.tirg.iter160k.pth' , map_location=torch.device('cpu') )['model_state_dict'])
trig.eval()
imgs = []
mods = []
trigdata=[]
target=[]
imgdata=[]
all_source_captions=[]
all_target_captions=[]
for Data in tqdm(test.get_test_queries()):
imgs += [test.get_img(Data['source_img_id'])]
mods += [Data['mod']['str']]
target +=[test.get_img(Data['target_id'])]
all_source_captions +=Data['source_caption']
all_target_captions +=Data['target_caption']
imgs = torch.stack(imgs).float()
imgs = torch.autograd.Variable(imgs)
f = trig.compose_img_text(imgs, mods).data.cpu().numpy()
target = torch.stack(target).float()
target = torch.autograd.Variable(target)
f2 = trig.extract_img_feature(target).data.cpu().numpy()
trigdata.append(f[0])
imgdata.append(f2[0])
imgs = []
mods = []
target = []
with open(Path1+r"/"+'test_all_source_captionsG.pkl', 'wb') as fp:
pickle.dump(all_source_captions, fp)
with open(Path1+r"/"+'test_all_target_captionsG.pkl', 'wb') as fp:
pickle.dump(all_target_captions, fp)
trigdata=np.array(trigdata)
imgdata=np.array(imgdata)
with open(Path1+r"/"+'test_all_queriesG.pkl', 'wb') as fp:
pickle.dump(trigdata, fp)
with open(Path1+r"/"+'test_all_imgsG.pkl', 'wb') as fp:
pickle.dump(imgdata, fp)
Ntrigdata=trigdata
Nimgdata=imgdata
Ntrig2=[]
trigdata2=[]
for i in range(Ntrigdata.shape[0]):
Ntrigdata[i, :] /= np.linalg.norm(Ntrigdata[i, :])
for i in range(Nimgdata.shape[0]):
Nimgdata[i, :] /= np.linalg.norm(Nimgdata[i, :])
for i in range(Ntrigdata.shape[0]):
Ntrig2.append(np.insert(Ntrigdata[i],0, 1))
Ntrig2=np.array(Ntrig2)
Ntrigdata1=Ntrig2.transpose()
X1=np.matmul(Ntrigdata1,Ntrig2)
X2=np.linalg.inv(X1)
X3=np.matmul(X2,Ntrigdata1)
Nbeta=np.matmul(X3,Nimgdata)
with open(Path1+r"/"+'testBetaNormalizedG.txt', 'wb') as fp:
pickle.dump(Nbeta, fp)
def GetValues():
with open (Path1+"/testBetaNormalized.txt", 'rb') as fp:
Nbeta = pickle.load(fp)
train = datasets.Fashion200k(
path=Path1,
split='train',
transform=torchvision.transforms.Compose([
torchvision.transforms.Resize(224),
torchvision.transforms.CenterCrop(224),
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize([0.485, 0.456, 0.406],
[0.229, 0.224, 0.225])
]))
test = datasets.Fashion200k(
path=Path1,
split='test',
transform=torchvision.transforms.Compose([
torchvision.transforms.Resize(224),
torchvision.transforms.CenterCrop(224),
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize([0.485, 0.456, 0.406],
[0.229, 0.224, 0.225])
]))
trig= img_text_composition_models.TIRG([t.encode().decode('utf-8') for t in train.get_all_texts()],512)
trig.load_state_dict(torch.load(Path1+r'\fashion200k.tirg.iter160k.pth' , map_location=torch.device('cpu') )['model_state_dict'])
opt = argparse.ArgumentParser()
opt.add_argument('--batch_size', type=int, default=2)
opt.add_argument('--dataset', type=str, default='fashion200k')
opt.batch_size =1
opt.dataset='fashion200k'
for name, dataset in [ ('train', train),('test', test)]: #('train', trainset),
betaNor = test_retrieval.testWbeta(opt, trig, dataset,Nbeta)
print(name,' BetaNormalized: ',betaNor)
asbook = test_retrieval.test(opt, trig, dataset)
print(name,' As PaPer: ',asbook)
################# Beta From Train Set Section #################
def getbetatrain():
train = datasets.Fashion200k(
path=Path1,
split='train',
transform=torchvision.transforms.Compose([
torchvision.transforms.Resize(224),
torchvision.transforms.CenterCrop(224),
torchvision.transforms.ToTensor(),
torchvision.transforms.Normalize([0.485, 0.456, 0.406],
[0.229, 0.224, 0.225])
]))
trig= img_text_composition_models.TIRG([t.encode().decode('utf-8') for t in train.get_all_texts()],512)
trig.load_state_dict(torch.load(Path1+r'\fashion200k.tirg.iter160k.pth' , map_location=torch.device('cpu') )['model_state_dict'])
trig.eval()
imgs = []
mods = []
trigdata=[]
target=[]
imgdata=[]
#m = nn.ReLU()
for i in range(172048): #172048
print('get images=',i,end='\r')
item = train[i]
imgs += [item['source_img_data']]
mods += [item['mod']['str']]
target += [item['target_img_data']]
imgs = torch.stack(imgs).float()
imgs = torch.autograd.Variable(imgs)
f = trig.compose_img_text(imgs, mods).data.cpu().numpy()
target = torch.stack(target).float()
target = torch.autograd.Variable(target)
f2 = trig.extract_img_feature(target).data.cpu().numpy()
trigdata.append(f[0])
imgdata.append(f2[0])
imgs = []
mods = []
target = []
trigdata=np.array(trigdata)
imgdata=np.array(imgdata)
Ntrig2=[]
for i in range(trigdata.shape[0]):
trigdata[i, :] /= np.linalg.norm(trigdata[i, :])
for i in range(imgdata.shape[0]):
imgdata[i, :] /= np.linalg.norm(imgdata[i, :])
for i in range(trigdata.shape[0]):
Ntrig2.append(np.insert(trigdata[i],0, 1))
print("Ntrig2 shape %d first elemnt %d",Ntrig2[0] )
Ntrig2=np.array(Ntrig2)
Ntrigdata1=Ntrig2.transpose()
X1= | np.matmul(Ntrigdata1,Ntrig2) | numpy.matmul |
from tqdm import tqdm
from src import simulation, var_iv
import pandas as pd
import numpy as np
from multiprocessing import Pool, cpu_count
MSE = lambda x: (x**2).sum()
# Set dimensions
dI = 3
dX = 2
dH = 1
dY = 1
d = dI + dX + dH + dY
tol = 0.1
# Get indices for data
id_I, id_X, id_H, id_Y = np.split(np.arange(d), np.cumsum([dI, dX, dH, dY]))[:4]
# Get skeleton of matrix A
skel = simulation.get_skeleton(id_I, id_X, id_H, id_Y)
def sample_alphas(skel):
return skel * | np.random.uniform(0.1, 0.9, size=skel.shape) | numpy.random.uniform |
import os
import shutil
import zipfile
import urllib.request
import keras
import skimage
import numpy as np
import skimage.io as ski_io
from pycocotools_m.coco import COCO
from pycocotools_m import mask as maskUtils
from mrcnn import utils
DEFAULT_DATASET_YEAR = "2017"
class CocoDataset(utils.Dataset):
def load_coco(self, dataset_dir, subset, year=DEFAULT_DATASET_YEAR, class_ids=None, cat_nms=None, return_coco=False, auto_download=False):
"""Load a subset of the COCO dataset.
dataset_dir: The root directory of the COCO dataset.
subset: What to load (train, val, minival, valminusminival)
year: What dataset year to load (2014, 2017) as a string, not an integer
class_ids: If provided, only loads images that have the given classes.
class_map: TODO: Not implemented yet. Supports maping classes from
different datasets to the same class ID.
return_coco: If True, returns the COCO object.
auto_download: Automatically download and unzip MS-COCO images and annotations
"""
if auto_download is True:
self.auto_download(dataset_dir, subset, year)
coco = COCO(
"{}/annotations/instances_{}{}.json".format(dataset_dir, subset, year))
if subset == "minival" or subset == "valminusminival":
subset = "val"
image_dir = "{}/{}{}".format(dataset_dir, subset, year)
if cat_nms:
class_ids = coco.getCatIds(catNms=cat_nms)
# Load all classes or a subset?
if not class_ids:
# All classes
class_ids = sorted(coco.getCatIds())
self.class_ids = class_ids
# All images or a subset?
if class_ids:
image_ids = []
for _id in class_ids:
image_ids.extend(list(coco.getImgIds(catIds=[_id])))
# Remove duplicates
image_ids = list(set(image_ids))
else:
# All images
image_ids = list(coco.imgs.keys())
# Add classes
for i in class_ids:
self.add_class("coco", i, coco.loadCats(i)[0]["name"])
# Add images
for i in image_ids:
self.add_image(
"coco", image_id=i,
path=os.path.join(image_dir, coco.imgs[i]['file_name']),
url=coco.imgs[i]['coco_url'],
width=coco.imgs[i]["width"],
height=coco.imgs[i]["height"],
annotations=coco.loadAnns(coco.getAnnIds(
imgIds=[i], catIds=class_ids, iscrowd=None)))
if return_coco:
return coco
def auto_download_annotations(self, dataDir, dataType, dataYear):
"""Download the COCO annotations if requested.
dataDir: The root directory of the COCO dataset.
dataType: What to load (train, val, minival, valminusminival)
dataYear: What dataset year to load (2014, 2017) as a string, not an integer
Note:
For 2014, use "train", "val", "minival", or "valminusminival"
For 2017, only "train" and "val" annotations are available
"""
# Create main folder if it doesn't exist yet
if not os.path.exists(dataDir):
os.makedirs(dataDir)
# Setup annotations data paths
annDir = "{}/annotations".format(dataDir)
if dataType == "minival":
annZipFile = "{}/instances_minival2014.json.zip".format(dataDir)
annFile = "{}/instances_minival2014.json".format(annDir)
annURL = "https://dl.dropboxusercontent.com/s/o43o90bna78omob/instances_minival2014.json.zip?dl=0"
unZipDir = annDir
elif dataType == "valminusminival":
annZipFile = "{}/instances_valminusminival2014.json.zip".format(
dataDir)
annFile = "{}/instances_valminusminival2014.json".format(annDir)
annURL = "https://dl.dropboxusercontent.com/s/s3tw5zcg7395368/instances_valminusminival2014.json.zip?dl=0"
unZipDir = annDir
else:
annZipFile = "{}/annotations_trainval{}.zip".format(
dataDir, dataYear)
annFile = "{}/instances_{}{}.json".format(
annDir, dataType, dataYear)
annURL = "http://images.cocodataset.org/annotations/annotations_trainval{}.zip".format(
dataYear)
unZipDir = dataDir
# print("Annotations paths:"); print(annDir); print(annFile); print(annZipFile); print(annURL)
# Download annotations if not available locally
if not os.path.exists(annDir):
os.makedirs(annDir)
if not os.path.exists(annFile):
if not os.path.exists(annZipFile):
print("Downloading zipped annotations to " + annZipFile + " ...")
with urllib.request.urlopen(annURL) as resp, open(annZipFile, 'wb') as out:
shutil.copyfileobj(resp, out)
print("... done downloading.")
print("Unzipping " + annZipFile)
with zipfile.ZipFile(annZipFile, "r") as zip_ref:
zip_ref.extractall(unZipDir)
print("... done unzipping")
print("Will use annotations in " + annFile)
def auto_download(self, dataDir, dataType, dataYear):
"""Download the COCO dataset/annotations if requested.
dataDir: The root directory of the COCO dataset.
dataType: What to load (train, val, minival, valminusminival)
dataYear: What dataset year to load (2014, 2017) as a string, not an integer
Note:
For 2014, use "train", "val", "minival", or "valminusminival"
For 2017, only "train" and "val" annotations are available
"""
# Setup paths and file names
if dataType == "minival" or dataType == "valminusminival":
imgDir = "{}/{}{}".format(dataDir, "val", dataYear)
imgZipFile = "{}/{}{}.zip".format(dataDir, "val", dataYear)
imgURL = "http://images.cocodataset.org/zips/{}{}.zip".format(
"val", dataYear)
else:
imgDir = "{}/{}{}".format(dataDir, dataType, dataYear)
imgZipFile = "{}/{}{}.zip".format(dataDir, dataType, dataYear)
imgURL = "http://images.cocodataset.org/zips/{}{}.zip".format(
dataType, dataYear)
# print("Image paths:"); print(imgDir); print(imgZipFile); print(imgURL)
# Create main folder if it doesn't exist yet
if not os.path.exists(dataDir):
os.makedirs(dataDir)
# Download images if not available locally
if not os.path.exists(imgDir):
os.makedirs(imgDir)
print("Downloading images to " + imgZipFile + " ...")
with urllib.request.urlopen(imgURL) as resp, open(imgZipFile, 'wb') as out:
shutil.copyfileobj(resp, out)
print("... done downloading.")
print("Unzipping " + imgZipFile)
with zipfile.ZipFile(imgZipFile, "r") as zip_ref:
zip_ref.extractall(dataDir)
print("... done unzipping")
print("Will use images in " + imgDir)
# Setup annotations data paths
annDir = "{}/annotations".format(dataDir)
if dataType == "minival":
annZipFile = "{}/instances_minival2014.json.zip".format(dataDir)
annFile = "{}/instances_minival2014.json".format(annDir)
annURL = "https://dl.dropboxusercontent.com/s/o43o90bna78omob/instances_minival2014.json.zip?dl=0"
unZipDir = annDir
elif dataType == "valminusminival":
annZipFile = "{}/instances_valminusminival2014.json.zip".format(
dataDir)
annFile = "{}/instances_valminusminival2014.json".format(annDir)
annURL = "https://dl.dropboxusercontent.com/s/s3tw5zcg7395368/instances_valminusminival2014.json.zip?dl=0"
unZipDir = annDir
else:
annZipFile = "{}/annotations_trainval{}.zip".format(
dataDir, dataYear)
annFile = "{}/instances_{}{}.json".format(
annDir, dataType, dataYear)
annURL = "http://images.cocodataset.org/annotations/annotations_trainval{}.zip".format(
dataYear)
unZipDir = dataDir
# print("Annotations paths:"); print(annDir); print(annFile); print(annZipFile); print(annURL)
# Download annotations if not available locally
if not os.path.exists(annDir):
os.makedirs(annDir)
if not os.path.exists(annFile):
if not os.path.exists(annZipFile):
print("Downloading zipped annotations to " + annZipFile + " ...")
with urllib.request.urlopen(annURL) as resp, open(annZipFile, 'wb') as out:
shutil.copyfileobj(resp, out)
print("... done downloading.")
print("Unzipping " + annZipFile)
with zipfile.ZipFile(annZipFile, "r") as zip_ref:
zip_ref.extractall(unZipDir)
print("... done unzipping")
print("Will use annotations in " + annFile)
def load_mask_one_hot(self, image_id):
mask, class_ids = self.load_mask(image_id)
output = np.zeros([len(self.class_ids), mask.shape[0],
mask.shape[1]], dtype=np.float32)
mask = np.moveaxis(mask, -1, 0)
for det in range(len(mask)):
detection = np.array(mask[det])
cl = class_ids[det]
output[cl] = output[cl] + detection
output = np.ceil(output / np.max(output))
return np.moveaxis(output, 0, -1)
def load_mask(self, image_id):
"""Load instance masks for the given image.
Different datasets use different ways to store masks. This
function converts the different mask format to one format
in the form of a bitmap [height, width, instances].
Returns:
masks: A bool array of shape [height, width, instance count] with
one mask per instance.
class_ids: a 1D array of class IDs of the instance masks.
"""
# If not a COCO image, delegate to parent class.
image_info = self.image_info[image_id]
if image_info["source"] != "coco":
return super(CocoDataset, self).load_mask(image_id)
instance_masks = []
class_ids = []
annotations = self.image_info[image_id]["annotations"]
# Build mask of shape [height, width, instance_count] and list
# of class IDs that correspond to each channel of the mask.
for annotation in annotations:
class_id = self.map_source_class_id(
"coco.{}".format(annotation['category_id']))
if class_id:
m = self.annToMask(annotation, image_info["height"],
image_info["width"])
# Some objects are so small that they're less than 1 pixel area
# and end up rounded out. Skip those objects.
if m.max() < 1:
continue
# Is it a crowd? If so, use a negative class ID.
if annotation['iscrowd']:
# Use negative class ID for crowds
class_id *= -1
# For crowd masks, annToMask() sometimes returns a mask
# smaller than the given dimensions. If so, resize it.
if m.shape[0] != image_info["height"] or m.shape[1] != image_info["width"]:
m = np.ones(
[image_info["height"], image_info["width"]], dtype=bool)
instance_masks.append(m)
class_ids.append(class_id)
# Pack instance masks into an array
if class_ids:
mask = np.stack(instance_masks, axis=2).astype(np.bool)
class_ids = np.array(class_ids, dtype=np.int32)
return mask, class_ids
else:
# Call super class to return an empty mask
return super(CocoDataset, self).load_mask(image_id)
def load_image_url(self, image_id):
return ski_io.imread(self.image_info[image_id]['url'])
def image_reference(self, image_id):
"""Return a link to the image in the COCO Website."""
info = self.image_info[image_id]
if info["source"] == "coco":
return "http://cocodataset.org/#explore?id={}".format(info["id"])
else:
super(CocoDataset, self).image_reference(image_id)
# The following two functions are from pycocotools with a few changes.
def annToRLE(self, ann, height, width):
"""
Convert annotation which can be polygons, uncompressed RLE to RLE.
:return: binary mask (numpy 2D array)
"""
segm = ann['segmentation']
if isinstance(segm, list):
# polygon -- a single object might consist of multiple parts
# we merge all parts into one mask rle code
rles = maskUtils.frPyObjects(segm, height, width)
rle = maskUtils.merge(rles)
elif isinstance(segm['counts'], list):
# uncompressed RLE
rle = maskUtils.frPyObjects(segm, height, width)
else:
# rle
rle = ann['segmentation']
return rle
def annToMask(self, ann, height, width):
"""
Convert annotation which can be polygons, uncompressed RLE, or RLE to binary mask.
:return: binary mask (numpy 2D array)
"""
rle = self.annToRLE(ann, height, width)
m = maskUtils.decode(rle)
return m
class DataGenerator(keras.utils.Sequence):
'Generates data for Keras'
def __init__(self, cat_nms, path='./data/coco', subset='train', batch_size=32, dim=(224, 224), shuffle=True, n_channels=3):
'Initialization'
self.dim = dim
self.batch_size = batch_size
self.cat_nms = cat_nms
self.subset = subset
self.n_classes = len(cat_nms) + 1
self.shuffle = shuffle
self.n_channels = n_channels
coco_dataset = CocoDataset()
coco_dataset.load_coco(path, subset, year='2017',
auto_download=True, cat_nms=cat_nms)
coco_dataset.prepare()
self.coco_dataset = coco_dataset
self.image_ids = coco_dataset.image_ids
self.on_epoch_end()
def __len__(self):
'Denotes the number of batches per epoch'
return int(np.floor(len(self.image_ids) / self.batch_size))
def __getitem__(self, index):
'Generate one batch of data'
# Generate indexes of the batch
indexes = self.indexes[index *
self.batch_size:(index + 1) * self.batch_size]
# Find list of IDs
image_ids_temp = [self.image_ids[k] for k in indexes]
# Generate data
X, y = self.__data_generation(image_ids_temp)
return X, y
def on_epoch_end(self):
'Updates indexes after each epoch'
self.indexes = np.arange(len(self.image_ids))
if self.shuffle == True:
| np.random.shuffle(self.indexes) | numpy.random.shuffle |
import matplotlib as mpl
import matplotlib.pyplot as plt
import numpy as np
import scipy.stats as stats
from scipy.optimize import root, brentq
from scipy.interpolate import interp1d
__all__ = ['estimate_quantile']
def estimate_quantile(data, q, alpha=0.05, method='exact', weights=None, nsamples=10000):
"""Calculate lower and upper CI of a given quantile using exact method,
based on beta distribution
<NAME> (1999) Calculating nonparametric confidence intervals
for quantiles using fractional order statistics, Journal of
Applied Statistics, 26:3, 343-353, DOI: 10.1080/02664769922458
<NAME>, <NAME>, <NAME>. An Investigation of Quantile Function
Estimators Relative to Quantile Confidence Interval Coverage.
Commun Stat Theory Methods. 2015;44(10):2107-2135.
doi: 10.1080/03610926.2013.775304. PMID: 26924881;
PMCID: PMC4768491.
Parameters
----------
data : np.array
Data
q : float, [0, 1]
Quantile
alpha : float
Desired significance level
method :str
"exact" or "approximate"
Returns
-------
Lower and upper bound of the quantile
"""
def _est_bound(n, q, b):
"""Function to estimate the upper and lower bound
b is targeted lower or upper CI bound"""
return brentq(lambda x: stats.beta.cdf(q, (n+1)*x, (n+1)*(1-x)) - b, 1e-8, 1-1e-8)
if not weights is None:
if method != 'bootstrap':
print('Using bootstrap method to accomodate weights!')
method = 'bootstrap'
n = len(data)
if q > (1 - 1e-7):
q = 1 - 1e-7
if q < 1e-7:
q = 1e-7
if method == 'exact':
lb = _est_bound(n, q, 1 - (alpha/2))
ub = _est_bound(n, q, alpha/2)
estx, lx, ux = np.quantile(data, [q, lb, ub], interpolation='linear')
elif method == 'approximate':
pn = (n+1) * q
qn = (n+1) * (1-q)
lb = stats.beta.ppf(alpha/2, pn, qn)
ub = stats.beta.ppf(1 - alpha/2, pn, qn)
estx, lx, ux = np.quantile(data, [q, lb, ub], interpolation='linear')
elif method == 'bootstrap':
bsamp = np.zeros(nsamples)
ndata = len(data)
if weights is None:
for i in range(nsamples):
bsamp[i] = np.quantile(np.random.choice(data, size=ndata, replace=True), q)
estx = np.quantile(data, q, interpolation='linear')
else:
w = weights / weights.sum()
for i in range(nsamples):
rind = np.random.choice(np.arange(ndata), size=ndata, replace=True)
bsamp[i] = weighted_quantile(data[rind], q, weights=w[rind])
estx = weighted_quantile(data, q, weights=w)
lx, ux = np.quantile(bsamp, [alpha/2, 1 - alpha/2])
lb, ub = np.nan, np.nan
return estx, lx, ux, q, lb, ub
def ecdf(x, weights=None, reverse=True, make_step=False):
"""
For reverse = True:
Y is proportion of samples >= X or Pr(X>=x)
For reverse = False:
Y is proportion of samples <= X or Pr(X<=x)
"""
if weights is None:
weights = np.zeros(len(x))
x = np.array(x, copy=True)
x.sort()
if reverse:
x = x[::-1]
nobs = len(x)
y = np.linspace(1./nobs, 1, nobs)
if make_step:
x = np.concatenate(([x[0]], np.repeat(x[1:].ravel(), 2)))
y = np.repeat(y.ravel(), 2)[:-1]
return x, y
def weighted_quantile(data, q, inverse=False, weights=None, reverse=False):
"""
q : quantile in [0-1]!
weights
inverse : bool
If True then q is treated as a new data point and its corresponding quantile will be returned.
https://stackoverflow.com/questions/21844024/weighted-percentile-using-numpy
"""
if weights is None:
weights = np.ones(len(data))
ix = np.argsort(data)
if reverse:
ix = ix[::-1]
data = data[ix] # sort data
weights = weights[ix] # sort weights
cdf = (np.cumsum(weights) - 0.5 * weights) / np.sum(weights) # 'like' a CDF function
if not inverse:
out = np.interp(q, cdf, data)
else:
out = np.interp(q, data, cdf, left=np.min(cdf)/2, right=1 - np.min(cdf)/2)
return out
def plot_recdfs(data, quantiles=None, keys=None, logscale=True, make_step=False, alpha=0.05, method='exact', palette=None):
"""
SLOW for large datasets because it computes the CI at every data point.
Could easily speed this up if needed.
"""
if keys is None:
keys = data.keys()
if palette is None:
palette = mpl.cm.Set3.colors
figh = plt.figure(figsize=(9, 7))
axh = figh.add_axes([0.1, 0.1, 0.7, 0.8], xscale='log' if logscale else 'linear')
for k, color in zip(keys, palette):
dat = data[k]
dat = dat[~np.isnan(dat)]
x, y = ecdf(dat)
if quantiles is None:
qvec = y
else:
qvec = quantiles
n = len(qvec)
estx = np.zeros(n)
lq = np.zeros(n)
uq = np.zeros(n)
for yi, yy in enumerate(qvec):
estx[yi], lx, ux, estq, lq[yi], uq[yi] = estimate_quantile(dat, 1 - yy, alpha=alpha, method=method)
plt.fill_between(estx, y1=1 - lq, y2=1 - uq, color=color, alpha=0.3)
plt.plot(x, y, '-', color=color, label=k)
plt.ylabel('Pr(X\u2265x)')
plt.ylim((0, 1))
plt.yticks(np.arange(11)/10)
plt.legend(loc='upper left', bbox_to_anchor=[1, 1])
return figh
def test_plot(n1=20, n2=10000):
data = {'A1':np.random.normal(40, 5, size=n1),
'A2':np.random.normal(40, 5, size=n2),
'B1':np.random.lognormal(0.5, 0, size=n1),
'B2':np.random.lognormal(0.5, 0, size=n2)}
"""Plot AVG of 10 ECDFs based on n1 and see if it looks like n2 ECDF to check for bias"""
xmat = []
for i in range(5000):
x, y = ecdf(np.random.normal(40, 5, size=n1))
xmat.append(x[:,None])
x1 = np.mean(np.concatenate(xmat, axis=1), axis=1)
plt.figure(figsize=(10,10))
plt.plot(x1, y)
x2, y2 = ecdf(np.random.normal(40, 5, size=n2))
plt.plot(x2, y2, '-r')
plt.grid('both')
plt.yticks( | np.arange(21) | numpy.arange |
from __future__ import print_function
import argparse
from collections import OrderedDict
import json
import os
import logging
from keras.callbacks import EarlyStopping
from sklearn.preprocessing import normalize
from sklearn.metrics import roc_curve, auc, roc_auc_score, precision_score, recall_score, f1_score, accuracy_score, average_precision_score
from scipy.sparse import csr_matrix
from keras.utils.io_utils import HDF5Matrix
#from keras.utils.visualize_util import plot
from keras.optimizers import SGD, Adam
from sklearn.metrics import r2_score
import numpy as np
import theano.tensor as tt
import pandas as pd
import random
import common
import models
from predict import obtain_predictions
from eval import do_eval
import h5py
class Config(object):
"""Configuration for the training process."""
def __init__(self, params, normalize=False, whiten=True):
self.model_id = common.get_next_model_id()
self.norm = normalize
self.whiten = whiten
self.x_path = '%s_%sx%s' % (params['dataset']['dataset'],params['dataset']['npatches'],params['dataset']['window'])
self.y_path = '%s_%s_%s' % (params['dataset']['fact'],params['dataset']['dim'],params['dataset']['dataset'])
self.dataset_settings = params['dataset']
self.training_params = params['training']
self.model_arch = params['cnn']
self.predicting_params = params['predicting']
def get_dict(self):
object_dict = self.__dict__
first_key = "model_id"
conf_dict = OrderedDict({first_key: object_dict[first_key]})
conf_dict.update(object_dict)
return conf_dict
def _squared_magnitude(x):
return tt.sqr(x).sum(axis=-1)
def _magnitude(x):
return tt.sqrt(tt.maximum(_squared_magnitude(x), np.finfo(x.dtype).tiny))
def cosine(x, y):
return tt.clip((1 - (x * y).sum(axis=-1) /
(_magnitude(x) * _magnitude(y))) / 2, 0, 1)
def load_sparse_csr(filename):
loader = np.load(filename)
return csr_matrix(( loader['data'], loader['indices'], loader['indptr']),
shape = loader['shape'])
def build_model(config):
"""Builds the cnn."""
params = config.model_arch
get_model = getattr(models, 'get_model_'+str(params['architecture']))
model = get_model(params)
#model = model_kenun.build_convnet_model(params)
# Learning setup
t_params = config.training_params
sgd = SGD(lr=t_params["learning_rate"], decay=t_params["decay"],
momentum=t_params["momentum"], nesterov=t_params["nesterov"])
adam = Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08)
optimizer = eval(t_params['optimizer'])
metrics = ['mean_squared_error']
if config.model_arch["final_activation"] == 'softmax':
metrics.append('categorical_accuracy')
if t_params['loss_func'] == 'cosine':
loss_func = eval(t_params['loss_func'])
else:
loss_func = t_params['loss_func']
model.compile(loss=loss_func, optimizer=optimizer,metrics=metrics)
return model
def load_data_preprocesed(params, X_path, Y_path, dataset, val_percent, test_percent, n_samples, with_metadata=False, only_metadata=False, metadata_source='rovi'):
factors = np.load(common.DATASETS_DIR+'/y_train_'+Y_path+'.npy') # OJO remove S
index_factors = open(common.DATASETS_DIR+'/items_index_train_'+dataset+'.tsv').read().splitlines()
if not only_metadata:
all_X = np.load(common.TRAINDATA_DIR+'/X_train_'+X_path+'.npy')
index_train = open(common.TRAINDATA_DIR+'/index_train_%s.tsv' % (X_path)).read().splitlines()
all_Y = np.zeros((len(index_train),factors.shape[1]))
index_factors_inv = dict()
for i,item in enumerate(index_factors):
index_factors_inv[item] = i
for i,item in enumerate(index_train):
all_Y[i,:] = factors[index_factors_inv[item]]
else:
all_Y = factors
if with_metadata:
if 'w2v' in metadata_source:
all_X_meta = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (metadata_source,dataset))[:,:int(params['cnn']['sequence_length'])]
elif 'model' in metadata_source or not params['dataset']['sparse']:
all_X_meta = np.load(common.TRAINDATA_DIR+'/X_train_%s_%s.npy' % (metadata_source,dataset))
else:
all_X_meta = load_sparse_csr(common.TRAINDATA_DIR+'/X_train_%s_%s.npz' % (metadata_source,dataset)).todense()
all_X_in_meta = all_X = all_X_meta
print(all_X.shape)
print(all_Y.shape)
if n_samples != 'all':
n_samples = int(n_samples)
all_X = all_X[:n_samples]
all_Y = all_Y[:n_samples]
if with_metadata:
all_X_in_meta = all_X_in_meta[:n_samples]
if params['training']['normalize_y'] == True:
normalize(all_Y,copy=False)
if params['training']["val_from_file"]:
Y_val = np.load(common.DATASETS_DIR+'/y_val_'+Y_path+'.npy')
Y_test = np.load(common.DATASETS_DIR+'/y_test_'+Y_path+'.npy') #!!! OJO remove S from trainS
if params['dataset']['sparse']:
X_val = load_sparse_csr(common.TRAINDATA_DIR+'/X_val_%s_%s.npz' % (metadata_source,dataset)).todense()
X_test = load_sparse_csr(common.TRAINDATA_DIR+'/X_test_%s_%s.npz' % (metadata_source,dataset)).todense()
else:
X_val = np.load(common.TRAINDATA_DIR+'/X_val_%s_%s.npy' % (metadata_source,dataset))
X_test = np.load(common.TRAINDATA_DIR+'/X_test_%s_%s.npy' % (metadata_source,dataset))
X_train = all_X
Y_train = all_Y
else:
N = all_Y.shape[0]
train_percent = 1 - val_percent - test_percent
N_train = int(train_percent * N)
N_val = int(val_percent * N)
logging.debug("Training data points: %d" % N_train)
logging.debug("Validation data points: %d" % N_val)
logging.debug("Test data points: %d" % (N - N_train - N_val))
if not only_metadata:
# Slice data
X_train = all_X[:N_train]
X_val = all_X[N_train:N_train + N_val]
X_test = all_X[N_train + N_val:]
Y_train = all_Y[:N_train]
Y_val = all_Y[N_train:N_train + N_val]
Y_test = all_Y[N_train + N_val:]
if with_metadata:
if only_metadata:
X_train = all_X_in_meta[:N_train]
X_val = all_X_in_meta[N_train:N_train + N_val]
X_test = all_X_in_meta[N_train + N_val:]
else:
X_train = [X_train,all_X_in_meta[:N_train]]
X_val = [X_val,all_X_in_meta[N_train:N_train + N_val]]
X_test = [X_test,all_X_in_meta[N_train + N_val:]]
return X_train, Y_train, X_val, Y_val, X_test, Y_test
def load_data_hf5(params,val_percent, test_percent):
hdf5_file = common.PATCHES_DIR+"/patches_train_%s_%s.hdf5" % (params['dataset']['dataset'],params['dataset']['window'])
f = h5py.File(hdf5_file,"r")
N = f["targets"].shape[0]
f.close()
train_percent = 1 - val_percent - test_percent
N_train = int(train_percent * N)
N_val = int(val_percent * N)
X_train = HDF5Matrix(hdf5_file, 'features', start=0, end=N_train)
Y_train = HDF5Matrix(hdf5_file, 'targets', start=0, end=N_train)
X_val = HDF5Matrix(hdf5_file, 'features', start=N_train, end=N_train+N_val)
Y_val = HDF5Matrix(hdf5_file, 'targets', start=N_train, end=N_train+N_val)
X_test = HDF5Matrix(hdf5_file, 'features', start=N_train+N_val, end=N)
Y_test = HDF5Matrix(hdf5_file, 'targets', start=N_train+N_val, end=N)
return X_train, Y_train, X_val, Y_val, X_test, Y_test, N_train
def load_data_hf5_memory(params,val_percent, test_percent, y_path, id2gt, X_meta = None, val_from_file = False):
if val_from_file:
hdf5_file = common.PATCHES_DIR+"/patches_train_%s_%sx%s.hdf5" % (params['dataset']['dataset'],params['dataset']['npatches'],params['dataset']['window'])
f = h5py.File(hdf5_file,"r")
index_train = f["index"][:]
index_train = np.delete(index_train, np.where(index_train == ""))
N_train = index_train.shape[0]
val_hdf5_file = common.PATCHES_DIR+"/patches_val_%s_%sx%s.hdf5" % (params['dataset']['dataset'],params['dataset']['npatches'],params['dataset']['window'])
f_val = h5py.File(val_hdf5_file,"r")
X_val = f_val['features'][:]
#Y_val = f_val['targets'][:]
factors_val = np.load(common.DATASETS_DIR+'/y_val_'+y_path+'.npy')
index_factors_val = open(common.DATASETS_DIR+'/items_index_val_'+params['dataset']['dataset']+'.tsv').read().splitlines()
id2gt_val = dict((index,factor) for (index,factor) in zip(index_factors_val,factors_val))
index_val = [i for i in f_val['index'][:] if i in id2gt_val]
X_val = np.delete(X_val, np.where(index_val == ""), axis=0)
index_val = np.delete(index_val, np.where(index_val == ""))
Y_val = np.asarray([id2gt_val[id] for id in index_val])
test_hdf5_file = common.PATCHES_DIR+"/patches_test_%s_%sx%s.hdf5" % (params['dataset']['dataset'],params['dataset']['npatches'],params['dataset']['window'])
f_test = h5py.File(test_hdf5_file,"r")
X_test = f_test['features'][:]
#Y_test = f_test['targets'][:]
factors_test = np.load(common.DATASETS_DIR+'/y_test_'+y_path+'.npy')
index_factors_test = open(common.DATASETS_DIR+'/items_index_test_'+params['dataset']['dataset']+'.tsv').read().splitlines()
id2gt_test = dict((index,factor) for (index,factor) in zip(index_factors_test,factors_test))
index_test = [i for i in f_test['index'][:] if i in id2gt_test]
X_test = np.delete(X_test, np.where(index_test == ""), axis=0)
index_test = np.delete(index_test, | np.where(index_test == "") | numpy.where |
import json
import numpy as np
from keras.models import Sequential
from keras.layers.core import Dense
from keras.optimizers import sgd
from catch import *
class ExperienceReplay(object):
def __init__(self, max_memory=500):
self.max_memory = max_memory
self.memory = list()
def add(self, sars_d):
# memory[i] = [state_t, action_t, reward_t, state_t+1, game_over?]
self.memory.append(sars_d)
if len(self.memory) > self.max_memory:
del self.memory[0]
def sample_batch(self, batch_size=50):
len_memory = len(self.memory)
return_batch_size = min(len_memory, batch_size)
batch = []
for i, idx in enumerate( | np.random.randint(0, len_memory, size=return_batch_size) | numpy.random.randint |
'''
This code was written primarily by <NAME> with inspiration from previous code
by <NAME> and <NAME>.
Additions were made by <NAME>
'''
import math
import numpy as np
#from matplotlib import pyplot as plt
from ctypes import *
from numpy import random as nrm
import random as rnd
from scipy.integrate import odeint
from scipy.interpolate import interp1d
from scipy.optimize import minimize
import networkx # for regular graphs
import os
import subprocess
Jij = False
############################################
# Functions to generate graph connectivity
############################################
def generate_Jij_LR(n, Zcoeff, alpha):
'''Generates a Jij matrix for the long range Ising model'''
global Jij
Jij = np.zeros((n,n))
for i in range(n):
for j in range(i+1,n):
if (i!=j):
dist2 = (i-j)*(i-j);
Jij[i,j] = Zcoeff/(dist2**(0.5*alpha));
Jij[j,i] = Jij[i,j]
def generate_Jij_MC(n, d, seed=None):
'''Generates a Jij matrix for n bits of MaxCut on a d-regular graph'''
global Jij
Jij = np.zeros((n,n))
graph = networkx.random_regular_graph(d,n,seed=seed)
edges=graph.edges()
#edges = [(0, 2), (0, 3), (0, 5), (1, 4), (1, 6), (1, 7), (2, 5), (2, 7), (3, 5), (3, 6), (4, 6), (4, 7)]
print(edges)
for edge in edges:
(i,j)=edge
Jij[i,j] = 1
Jij[j,i] = 1
return edges
def generate_Jij(n):
'''Generates a randomized Jij matrix and stores it in a global variable'''
global Jij
Jij = np.zeros((n,n))
for i in range(n):
for j in range(i+1,n):
nrm.seed(i*n*10+j*10)
Jij[i,j] = 2*nrm.rand() - 1
Jij[j,i] = Jij[i,j]
####################################
# Functions to generate Hamiltonian
####################################
def get_energy(x):
global Jij
n = len(Jij)
val = 0
for i in range(n):
biti = int(x/(2**i))&1
for j in range(i+1, n):
bitj = int(x/(2**j))&1
val = val + (Jij[i][j]*(1-(biti^bitj)*(2**1)))
return val
def get_diag():
'''Gets the diagonal of the cost function Hamiltonian. This assumes
you have already initialzed Jij'''
global Jij
#H = ham()
n = len(Jij)
#qc.allocateH(byref(H),n)
diag = []
for i in range(2**n):
diag += [get_energy(i)]
return diag
def flip_index (i, j, n):
'''If i is the decimal version of a bit string of length n, this outputs
the decimal version of the bit string that is that one but with
bit j flipped'''
rem = i
one = +1
for k in range(j+1):
temp = rem - 2**(n-k-1)
if (temp>=0):
rem =temp
one = -1
else:
one = +1
return i + one*2**(n-j-1)
def display_ham(n,want):
'''Prints out the Hamiltonian.
n=number of qubits
want=boolean with True = C and False = B
'''
mat = get_ham(n,want)
output=""
for c in mat:
for el in c:
if np.abs(np.imag(el))<0.000001:
output+=str(np.real(el))+"\t"
else:
output+=str(el)+"\t"
output+="\n"
print(output)
def get_ham(n,want):
'''Gets the Hamiltonian in a numpy format
n=number of qubits
want=boolean with True = C and False = B
'''
N = 2**n
diag = get_diag()
mat = []
for i in range(N):
unit = [0 for k in range(2*N)]
unit[i] = 1
if want:
col=applyC_sing(unit,diag)
else:
col=applyB_sing(n, unit)
mat += [[col[j]+1j*col[j+N] for j in range(N)]]
return np.array(mat)
# works
def applyC_sing(y,diag):
'''Applies the diagonal part of the Hamiltonian (i.e. C) to the vector y'''
N = int(len(y)/2)
output=[0 for i in range(2*N)]
for i in range(N):
output[i] = diag[i]*y[i]
output[i+N] = diag[i]*y[i+N]
return output
# works
def applyB_sing(n, y):
'''Applies the transverse field (i.e. B) to the vector y'''
N = int(len(y)/2)
output=[0 for i in range(2*N)]
for i in range(N):
for j in range(n):
index = flip_index(i,j,n)
output[i] += -y[index] # real
output[i+N] += -y[index+N] # imaginary
return output
######################################################
# Generate Initial State
# The format is a 2*2**n list all with real elements
# followed by all imaginary elements
######################################################
def uniform(n):
'''returns a list of length 2*2^n where the first 2^n entries are all
sqrt(1/2^n) and the last ones are all 0
This is usually the initial state'''
N=2**n
y = [1/math.sqrt(N) for i in range(N)]
y += [0 for i in range(N)]
return y
######################################
# Utility Functions
######################################
def get_u (t,uN,tf,ulist):
'''interpolates the values of u stored in ulist to get the current value
of u at t, there are uN values in ulist, in the order
[u(0), u(tf/(uN-1)), ..., u(tf)]
this function just does a linear interpolation'''
if t>tf: t=tf
lower = min(int(math.floor((uN)*(t/tf))), uN - 1);
# upper = int(math.ceil((uN-1)*(t/tf)));
# amount = (uN-1)*(t-tf*lower/(uN-1))/tf;
#
#
# return (ulist[upper])*amount+(ulist[lower])*(1-amount);
return ulist[lower]
def norm(y):
'''returns the norm of the vector y where y has all the real
components in the first half and all the imaginary components
in the second half'''
output=0;
N = len(y)/2
for i in range(N):
output+=y[i]**2+y[i+N]**2
return output
def cdot(y1,y2):
'''returns the complex inner product <y1|y2>, assumes the vectors
have real components in the first half and imaginary components in the
second half'''
output=0;
N = int(len(y1)/2)
for i in range(N):
output+=(y1[i]-1j*y1[i+N])*(y2[i]+1j*y2[i+N])
return output
#################################################
# ODE Solving of the Schrodinger equation
#################################################
def func_schro (y, t, n, uN, tf, ulist,diag) :
'''This is the function f such that dy/dt = f(y,t), so this is essentially
our differential equation or Schrodinger equation put into standard form.
t is the time variable
y[] are the vector elements, all real parts first then all imaginaries
f[] is the function f, and this will be the output
*params is a pointer to an array of andy number of parameters we want
This function assumes the form of B = -\sum \sigma_x
and C is the Ising model with the defined Jij matrix'''
N = 2**n
u = get_u(t, uN, tf, ulist)
dydt = [0 for i in range(2*N)]
dydtC = applyC_sing(y,diag)
dydtB = applyB_sing(n,y)
for i in range(N):
dydt[i] += u*dydtB[i+N] + (1-u)*dydtC[i+N]
dydt[i+N] += -u*dydtB[i] - (1-u)*dydtC[i]
"""
for i in range(N):
# APPLY C
dydt[i] = y[i+N]*diag[i]*(1-u); # real
dydt[i+N] = -y[i]*diag[i]*(1-u);# imaginary
# iterate over all "adjacent" states, i.e. one bit flip away
for j in range(n): # off-diagonal
# APPLY B
index = flip_index(i,j,n)
dydt[i] += -u*y[index+N] # real
dydt[i+N] += u*y[index] # imaginary
"""
return dydt;
def func_schroN (y, t, n, uN, tf, ulist,diag) :
'''This is the function f such that dy/dt = f(y,t), so this is essentially
our differential equation put into standard form, running time in
t is the time variable
y[] are the vector elements, all real parts first then all imaginaries
f[] is the function f, and this will be the output
*params is a pointer to an array of andy number of parameters we want
This function assumes the form of B = -\sum \sigma_x
and C is the Ising model with the defined Jij matrix
This version is the negative and is used for reverse time evolution
Note that we assume in this function that ulist has already been reversed
for the purposes of reverse evolution.'''
N = 2**n
u = get_u(t, uN, tf, ulist)
dydt = [0 for i in range(2*N)]
dydtC = applyC_sing(y,diag)
dydtB = applyB_sing(n, y)
for i in range(N):
dydt[i] += -u*dydtB[i+N] - (1-u)*dydtC[i+N]
dydt[i+N] += u*dydtB[i] + (1-u)*dydtC[i]
"""
for i in range(N):
dydt[i] = -y[i+N]*diag[i]*(1-u); # real
dydt[i+N] = y[i]*diag[i]*(1-u);# imaginary
# iterate over all "adjacent" states, i.e. one bit flip away
for j in range(n): # off-diagonal
index = flip_index(i,j,n)
dydt[i] += u*y[index+N] # real
dydt[i+N] += -u*y[index] # imaginary
"""
return dydt;
#####################################################
# Functions to generate the analytic gradient
#####################################################
def avg_energy(y,diag):
'''Tells us the energy expectation value of the state y
At the moment, this just calculates the diagonal portion of the energy'''
k = applyC_sing(y,diag)
return cdot(y,k)
def get_k(yf, tlist, n, uN, tf, ulist, diag):
'''Takes in the final value of the state yf and outputs the state k at all
the time intervals given in tlist. This uses our custom real first then
imaginary in the second half vector form'''
kf = applyC_sing(yf,diag)
nulist = ulist[-1::-1]
ntlist = tlist
sol = odeint(func_schroN, kf, ntlist , args=(n,uN,tf,nulist,diag))
return sol[-1::-1]
def get_Philist (tlist,n,tf,ulist,diag):
'''Takes in a specific procedure, notably including the annealing
path ulist and returns what the values of Phi are for that path
at the times given by tlist
Also returns the final energy of the procedure'''
uN = len(ulist)
y0 = uniform(n)
all_y = odeint(func_schro, y0, tlist , args=(n,uN,tf,ulist,diag))
#print "Figure of Merit: "+str(avg_energy(all_y[-1],diag))
all_k = get_k(all_y[-1],tlist,n,uN,tf,ulist,diag)
Philist=[]
for i in range(uN):
Philist += [calc_Phi(all_y[i],all_k[i],n,diag)]
#print(cdot(all_y[-1],all_y[-1]))
return [Philist,np.real(avg_energy(all_y[-1],diag)),all_y]
def get_Philist_admm(tlist, n, tf, ulist, vlist, lambdalist, rho, diag):
uN = len(ulist)
y0 = uniform(n)
all_y = odeint(func_schro, y0, tlist, args=(n, uN, tf, ulist, diag))
# print "Figure of Merit: "+str(avg_energy(all_y[-1],diag))
all_k = get_k(all_y[-1], tlist, n, uN, tf, ulist, diag)
Philist = []
norm_grad = np.zeros(uN)
norm_grad[0] = rho * (ulist[1] - ulist[0] - vlist[0] + lambdalist[0])
norm_grad[uN - 1] = rho * (ulist[uN - 1] - ulist[uN - 2] - vlist[uN - 2] + lambdalist[uN - 2])
for t in range(1, uN - 1):
norm_grad[t] = rho * (ulist[t] - ulist[t - 1] - vlist[t - 1] + lambdalist[t - 1])
for i in range(uN):
Philist += [calc_Phi(all_y[i], all_k[i], n, diag) + norm_grad[i]]
# print(cdot(all_y[-1],all_y[-1]))
return [Philist, np.real(avg_energy(all_y[-1], diag)), all_y]
def calc_Phi(y,k,n,diag):
'''Calculates the value of Phi for the given y and k vectors
This function assumes those vectors are for the same time and does not
need any information about the time'''
output = 0
output += cdot(y,applyB_sing(n,k))
output += -cdot(y,applyC_sing(k,diag))
output = 2*np.imag(output)
return output
def compute_energy_u(tlist, tf, ulist):
global Jij
n = len(Jij)
diag = get_diag()
return get_Energy_u(tlist, n, tf, ulist, diag)
def get_Energy_u (tlist,n,tf,ulist,diag):
'''Takes in a specific procedure, notably including the annealing
path ulist and returns what the value of the energy is for that path
at the final time'''
uN = len(ulist)
y0 = uniform(n)
all_y = odeint(func_schro, y0, tlist , args=(n,uN,tf,ulist,diag))
return np.real(avg_energy(all_y[-1],diag))
#######################################################
# Carries out the gradient descent on the u(t) function
#######################################################
def compute_gradient(tlist, tf, ulist):
global Jij
n = len(Jij)
diag = get_diag()
[Philist, Energy, state] = get_Philist(tlist, n, tf, ulist, diag)
return Philist
def gradient_descent_opt(n, uN, tf, iterations, min_grad, ulist_in=[], type="normal", v=None, _lambda=None, rho=None):
'''Carries out the gradient descent and outputs the ulist from the end
of the procedure.
n = number of qubits
uN = number of points that u(t) is discretized into
tf = the total time of the procedure
iterations = how many times to do the gradient descent step
ulist_in = intial guess for function, working on making a default, delete
and use internal code if you want something different
Outputs:
The final ulist
Philist
Final Energy'''
diag = get_diag() # Diagonal part of the Hamiltonian
#diag = map(lambda x: diag[x],range(2**n))
Etrue = min(diag)
beta=250. # might need to up this number for more complicated procedures
# could lower it for smaller systems to speed up convergence at the cost
# of accuracy
lambdas= 0
if len(ulist_in)==0:
# Use these as alternatives if you don' have an initial guess for ulist
#ulist = map(lambda x: 1-x/(uN-1.), range(0,uN))
ulist = list(map(lambda x: 0.5, range(0,uN))) # this one works just fine
#ulist = [nrm.rand() for i in range(uN)]
else:
ulist=ulist_in
tlist = list(map(lambda x: tf*x, map(lambda x: x/(uN-1.), range(0, uN))))
ylist = ulist
for i in range(iterations):
lambdap = (1.+math.sqrt(1.+4.*lambdas**2))/2.
gamma = (1-lambdas)/lambdap
lambdas = lambdap
if type == "admm":
[Philist, Energy, state] = get_Philist_admm(tlist, n, tf, ulist, v, _lambda, rho, diag)
if type == "normal":
[Philist, Energy, state] = get_Philist(tlist, n, tf, ulist, diag)
ylistnew = [max([0, min([1, ulist[j] + Philist[j]/(beta)])]) for j in range(uN)]
ulist = [max([0, min([1, (1-gamma)*ylistnew[j]+gamma*ylist[j]])]) for j in range(uN)]
ylist = ylistnew
# print(str(tf)+" "+str(i)+"/"+str(iterations)+": "+str([0+Energy,Etrue]))
# print(np.linalg.norm(np.array(Philist), 2))
# print(Philist)
if np.linalg.norm(np.array(Philist), 2) < min_grad:
break
num_it = i
return [ulist, Philist, Energy, state, num_it]
##############################################
# IO Utility functions
##############################################
def import_u():
'''Imports a previously found u(t). I am mostly using this to improve previously found
results and improve their quality'''
infile=open("maxcut_ver2.tsv",'r') # change to your favorite file
full = infile.read()
infile.close()
lines = full.split("\n")
splitlines = map(lambda x: x.split("\t"), lines[:-1])
numbers = [map(float,line) for line in splitlines]
ulist = map(lambda x: x[2], numbers)
qaoalist = map(lambda x: x[3], numbers)
return [ulist,qaoalist]
def print_to_file(n,tf,tlist,ulist,Philist,Energy,edges):
outstring = "B and C, n="+str(n)+", tf = "+str(tf)+"\n"
outstring+= "Energy = "+str(Energy)+"\n"
outstring+= str(edges)
for i in range(len(ulist)):
outstring+="\n"+str(tlist[i])+"\t"+str(ulist[i])+"\t"+str(Philist[i])
outfile = open("B_and_C_tf="+str(tf)+"_n="+str(n)+".tsv",'w')
outfile.write(outstring)
print(ulist, outfile)
outfile.close()
########################################
# What the program actually does
########################################
import sys
if __name__=="__main__":
n = 4 # number of qubits
edges = generate_Jij_MC(n,3) # Generates the connectivity graph # sets a global variable
uN = 41 # number of discrete steps in u(t)
#generate_Jij_MC(n, 3) # generate the problem of MaxCut on a 3-regular graph
#generate_Jij_LR(n,1.0,0.5) # generate the long range Ising problem
tstep = 2.0 # The step in the tf, total time for the procedure
tsteps = 1 # how many tf steps to take
iterations = 200 # number of iterations of gradient descent
display_ham(n,False)
display_ham(n,True)
C_mat = get_ham(n,True)
np.savetxt('C_mat_' + str(n) + '.csv', C_mat)
B_mat = get_ham(n,False)
np.savetxt('B_mat_' + str(n) + '.csv', B_mat)
######################################################
# ... Sven's additions
RealB = B_mat.real;
ImagB = B_mat.imag;
Brows, Bcols = np.nonzero(RealB)
print("#nonzero REAL elements of B")
for ii in range(len(Brows)):
print("let RealB[",Brows[ii]+1,",",Bcols[ii]+1,"] := ",RealB[Brows[ii],Bcols[ii]],";")
Brows, Bcols = np.nonzero(ImagB)
print("#nonzero IMAGINARY elements of B")
for ii in range(len(Brows)):
print("let ImagB[",Brows[ii]+1,",",Bcols[ii]+1,"] := ",ImagB[Brows[ii],Bcols[ii]],";")
RealC = C_mat.real;
ImagC = C_mat.imag;
Crows, Ccols = | np.nonzero(RealC) | numpy.nonzero |
#coding=utf-8
import numpy as np
import codecs
import os
import sys
import time
from functools import reduce
'''
load dict data which generated by trans_fastText
'''
def load_fastText_dict(dict_path):
dict = np.load(dict_path)
return dict
'''
load word embedding data which generated by trans_fastText
'''
def load_fastText_word_embeadding(path,index=None):
we = np.load(path)
if index is not None:
d = we[index]
we = np.concatenate((we,d,d,d,np.zeros(shape=[1,300])))
return we
'''
load the dict file and word embedding data file, which generated by trans_fastText
'''
def load_fastTextByFile(dict_path,word_embeadding_path):
dict,index = load_fastText_dict(dict_path)
we = load_fastText_word_embeadding(word_embeadding_path,index)
assert np.shape(dict)[0]==np.shape(we)[0]
return dict,we
'''
load the dict file and word embedding data file, which generated by trans_fastText
'''
def load_fastTextByDir(dir_path):
return load_fastTextByFile(os.path.join(dir_path,"dict.bin.npy"),os.path.join(dir_path,"wordembeadding.bin.npy"))
'''
Trans fast text word embedding data to two binary file: word embedding data and dict data
'''
def trans_fastText(file_path,save_dir="./"):
file = codecs.open(file_path, "r", "utf-8")
dict = []
file.readline()
we = []
nr = 332647
tmp = range(300)
count = 0
begin_t = time.time()
while True:
line = file.readline()
if not line:
break
data = line.split(u" ")
if len(data) == 300:
data = [' '] + data
elif len(data) < 300:
continue
dict.append(data[0])
if count == 73144:
print("A")
for i in range(300):
tmp[i] = (float(data[i + 1]))
we.append(np.array([tmp], dtype=np.float32))
if count % 100 == 0:
sys.stdout.write('\r>> Converting image %d/%d' % (len(dict), nr))
sys.stdout.flush()
count = count + 1
print("\n")
print("total time=%f" % (time.time() - begin_t))
# index = dict.index(u"甲肝")
# index = dict.index(u"乙肝")
# index = dict.index(u"丙炎")
# index = dict.index(u"")
we = np.concatenate(we)
# we = np.concatenate([we,[we[0]],[we[0]],[we[0]]])
np_dict = np.array(dict)
np.save(os.path.join(save_dir,"wordembeadding.bin"), we)
np.save(os.path.join(save_dir,"dict.bin"), np_dict)
'''
将文本进行分词并返回在词典中的索引
'''
def tokenize(text,thul,dict):
text = text.encode("utf-8")
thul_token = thul.cut(text)
res = []
token=[]
for t in thul_token:
word = t[0]
u_word = word.decode("utf-8")
index = np.where(dict == u_word)
shape = np.shape(index[0])
if shape[0] == 0:
words = tokenize_word(u_word,dict)
token.extend(words)
res.extend(indexs_of_words(words,dict))
else:
res.append(index[0][0])
token.append(u_word)
return res,token
def tokenize_word(word,dict):
if len(word)<=1:
return [word]
if len(word)==2:
return [word[0],word[1]]
begin_word = word[:2]
index = np.where(dict==begin_word)
if np.shape(index[0])[0] ==0:
return [begin_word[0],begin_word[1]]+tokenize_word(word[2:],dict)
else:
return [begin_word]+tokenize_word(word[2:],dict)
def indexs_of_words(words,dict):
res = []
for word in words:
index = np.where(dict == word)
shape = | np.shape(index[0]) | numpy.shape |
import os
import ee
import geemap
import json
import requests
import numpy as np
import pandas as pd
import matplotlib.pylab as plt
from datetime import datetime
from datetime import timedelta
import rasterio as rio
from rasterio import plot
from rasterio import warp
try:
ee.Initialize()
except:
ee.Authenticate()
ee.Initialize()
class dataCollector:
def __init__(self, beam=None, oaurl=None, track=None, date=None, latlims=None, lonlims=None, verbose=False):
if (beam is None) or ((oaurl is None) and (None in [track, date, latlims, lonlims])):
raise Exception('''Please specify a beam and
- either: an OpenAltimetry API url,
- or: a track, date, latitude limits and longitude limits.''')
else:
if oaurl is not None:
url = oaurl
tofind = '&beamName='
ids = url.find(tofind)
while ids>-1:
url = url.replace(url[ids:ids+len(tofind)+4],'')
ids = url.find(tofind)
iprod = url.find('/atl')
url = url.replace(url[iprod:iprod+6],'/atlXX')
url += tofind + beam + '&client=jupyter'
idate = url.find('date=') + len('date=')
date = url[idate:idate+10]
itrack = url.find('trackId=') + len('trackId=')
trackend = url[itrack:].find('&')
track = int(url[itrack:itrack+trackend])
bb = []
for s in ['minx=', 'maxx=', 'miny=', 'maxy=']:
ids = url.find(s) + len(s)
ide = url[ids:].find('&')
bb.append(float(url[ids:ids+ide]))
lonlims = bb[:2]
latlims = bb[2:]
elif None not in [track, date, latlims, lonlims]:
url = 'https://openaltimetry.org/data/api/icesat2/atlXX?'
url += 'date={date}&minx={minx}&miny={miny}&maxx={maxx}&maxy={maxy}&trackId={track}&beamName={beam}'.format(
date=date,minx=lonlims[0],miny=latlims[0],maxx=lonlims[1],maxy=latlims[1],track=track,beam=beam)
url += '&outputFormat=json&client=jupyter'
self.url = url
self.date = date
self.track = track
self.beam = beam
self.latlims = latlims
self.lonlims = lonlims
if verbose:
print('OpenAltimetry API URL:', self.url)
print('Date:', self.date)
print('Track:', self.track)
print('Beam:', self.beam)
print('Latitude limits:', self.latlims)
print('Longitude limits:', self.lonlims)
def requestData(self, verbose=False):
if verbose:
print('---> requesting ATL03 data...',end='')
product = 'atl03'
request_url = self.url.replace('atlXX',product)
data = requests.get(request_url).json()
lat, lon, h, confs = [], [], [], []
for beam in data:
for confidence in beam['series']:
for p in confidence['data']:
confs.append(confidence['name'])
lat.append(p[0])
lon.append(p[1])
h.append(p[2])
self.atl03 = pd.DataFrame(list(zip(lat,lon,h,confs)), columns = ['lat','lon','h','conf'])
if verbose:
print(' Done.')
print('---> requesting ATL06 data...',end='')
product = 'atl06'
request_url = self.url.replace('atlXX',product)
data = requests.get(request_url).json()
self.atl06 = pd.DataFrame(data['series'][0]['lat_lon_elev'], columns = ['lat','lon','h'])
if verbose:
print(' Done.')
print('---> requesting ATL07 data...',end='')
product = 'atl07'
request_url = self.url.replace('atlXX',product)
data = requests.get(request_url).json()
self.atl07 = pd.DataFrame(data['series'][0]['lat_lon_elev'], columns = ['lat','lon','h'])
if verbose:
print(' Done.')
print('---> requesting ATL08 data...',end='')
product = 'atl08'
request_url = self.url.replace('atlXX',product)
data = requests.get(request_url).json()
self.atl08 = pd.DataFrame(data['series'][0]['lat_lon_elev_canopy'], columns = ['lat','lon','h','canopy'])
if verbose:
print(' Done.')
################################################################################################
def plotData(self,ax=None,title='some Data I found on OpenAltimetry',plot_atl07=True,plot_atl08=True):
# get data if not already there
if 'atl03' not in vars(self).keys():
print('Data has not yet been requested from OpenAltimetry yet. Doing this now.')
self.requestData(verbose=True)
axes_not_specified = True if ax == None else False
# create the figure and axis
if axes_not_specified:
fig, ax = plt.subplots(figsize=[10,6])
atl03 = ax.scatter(self.atl03.lat, self.atl03.h, s=2, color='black', alpha=0.2, label='ATL03')
atl06, = ax.plot(self.atl06.lat, self.atl06.h, label='ATL06')
if plot_atl07 == True:
atl07, = ax.plot(self.atl07.lat, self.atl07.h, label='ATL07')
if plot_atl08 == True:
atl08, = ax.plot(self.atl08.lat, self.atl08.h, label='ATL08', linestyle='--')
heights = self.atl03.h[self.atl03.conf != 'Noise']
y_min1 = np.min(heights)
y_max1 = np.max(heights)
if plot_atl08 == True:
maxprods = np.nanmax((self.atl06.h.max(), self.atl08.h.max()))
minprods = np.nanmin((self.atl06.h.min(), self.atl08.h.min()))
else:
maxprods = np.nanmax(self.atl06.h.max())
minprods = np.nanmin((self.atl06.h.min(), self.atl07.h.min()))
hrange = maxprods - minprods
y_min2 = minprods - hrange * 0.5
y_max2 = maxprods + hrange * 0.5
y_min = np.nanmin((y_min1, y_min2))
y_max = np.nanmax((y_max1, y_max2))
x_min = self.atl08.lat.min()
x_max = self.atl08.lat.max()
ax.set_xlim((x_min, x_max))
ax.set_ylim((y_min, y_max))
# label the axes
ax.set_title(title)
ax.set_xlabel('latitude')
ax.set_ylabel('elevation in meters')
# add a legend
ax.legend(loc='lower right')
# add some text to provide info on what is plotted
info = 'ICESat-2 track {track:d}-{beam:s} on {date:s}\n({lon:.4f}E, {lat:.4f}N)'.format(track=self.track,
beam=self.beam,
date=self.date,
lon=np.mean(self.lonlims),
lat=np.mean(self.latlims))
infotext = ax.text(0.03, 0.03, info,
horizontalalignment='left',
verticalalignment='bottom',
transform=ax.transAxes,
fontsize=7,
bbox=dict(edgecolor=None, facecolor='white', alpha=0.9, linewidth=0))
if axes_not_specified:
fig.tight_layout()
return fig
else:
return ax
################################################################################################
def plotData_hv(self):
import holoviews as hv
from holoviews import opts
hv.extension('bokeh', 'matplotlib')
confdict = {'Noise': -1.0, 'Buffer': 0.0, 'Low': 1.0, 'Medium': 2.0, 'High': 3.0}
self.atl03['conf_num'] = [confdict[x] for x in self.atl03.conf]
self.atl08['canopy_h'] = self.atl08.h + self.atl08.canopy
atl03scat = hv.Scatter(self.atl03, 'lat', vdims=['h', 'conf_num'], label='ATL03')\
.opts(color='conf_num', alpha=1, cmap='dimgray_r')
atl06line = hv.Curve(self.atl06, 'lat', 'h', label='ATL06')\
.opts(color='r', alpha=0.5, line_width=3)
atl08line = hv.Curve(self.atl08, 'lat', 'h', label='ATL08')\
.opts(color='b', alpha=1, line_width=1)
atl08scat = hv.Scatter(self.atl08, 'lat', 'canopy_h', label='ATL08 Canopy')
atl08scat = atl08scat.opts(alpha=1, color='g', size=4)
hrange = self.atl06.h.max() - self.atl06.h.min()
overlay = (atl03scat * atl06line * atl08line * atl08scat).opts(
height=500,
width=800,
xlabel='latitude',
ylabel='elevation',
title='ICESat-2 track %d %s on %s' % (self.track,self.beam.upper(),self.date),
legend_position='bottom_right',
ylim=(self.atl06.h.min()-hrange, self.atl06.h.max()+hrange),
xlim=(self.atl06.lat.min(), self.atl06.lat.max())
)
return overlay
################################################################################################
def makeGEEmap(self, days_buffer=25):
# get data if not already there
if 'atl03' not in vars(self).keys():
print('Data has not yet been requested from OpenAltimetry yet. Doing this now.')
self.requestData(verbose=True)
def dist_latlon2meters(lat1, lon1, lat2, lon2):
# returns the distance between two lat/lon coordinate points along the earth's surface in meters
R = 6371000
def deg2rad(deg):
return deg * (np.pi/180)
dlat = deg2rad(lat2-lat1)
dlon = deg2rad(lon2-lon1)
a = np.sin(dlat/2) * np.sin(dlat/2) + np.cos(deg2rad(lat1)) * np.cos(deg2rad(lat2)) * np.sin(dlon/2) * np.sin(dlon/2)
c = 2 * np.arctan2(np.sqrt(a), np.sqrt(1-a))
return R * c
lat1, lat2 = self.atl08.lat[0], self.atl08.lat.iloc[-1]
lon1, lon2 = self.atl08.lon[0], self.atl08.lon.iloc[-1]
center_lat = (lat1 + lat2) / 2
center_lon = (lon1 + lon2) / 2
ground_track_length = dist_latlon2meters(lat1, lon1, lat2, lon2)
print('The ground track is %d meters long.' % np.round(ground_track_length))
collection_name1 = 'COPERNICUS/S2_SR' # Sentinel-2 earth engine collection
# https://developers.google.com/earth-engine/datasets/catalog/COPERNICUS_S2_SR
collection_name2 = 'LANDSAT/LC08/C01/T2' # Landsat 8 earth engine collection
# https://developers.google.com/earth-engine/datasets/catalog/LANDSAT_LC08_C01_T2
# Note: Landsat 8 ingestion into Earth Engine seems to not have reached Antarctica yet, so using raw scenes...
# the point of interest (center of the track) as an Earth Engine Geometry
point_of_interest = ee.Geometry.Point(center_lon, center_lat)
def query_scenes(self, days_buffer):
# get the dates
datetime_requested = datetime.strptime(self.date, '%Y-%m-%d')
search_start = (datetime_requested - timedelta(days=days_buffer)).strftime('%Y-%m-%d')
search_end = (datetime_requested + timedelta(days=days_buffer)).strftime('%Y-%m-%d')
print('Search for imagery from {start:s} to {end:s}.'.format(start=search_start, end=search_end))
# the collection to query:
# 1) merge Landsat 8 and Sentinel-2 collections
# 2) filter by acquisition date
# 3) filter by the point of interest
# 4) sort by acquisition date
collection = ee.ImageCollection(collection_name1) \
.merge(ee.ImageCollection(collection_name2)) \
.filterDate(search_start, search_end) \
.filterBounds(point_of_interest) \
.sort('system:time_start')
info = collection.getInfo()
n_imgs = len(info['features'])
print('--> Number of scenes found within +/- %d days of ICESat-2 overpass: %d' % (days_buffer, n_imgs))
return (collection, info, n_imgs)
# query collection for initial days_buffer
collection, info, n_imgs = query_scenes(self, days_buffer)
# if query returns more than 20 images, try to narrow it down
tries = 0
while (n_imgs > 20) & (tries<5):
print('----> This is too many. Narrowing it down...')
days_buffer = np.round(days_buffer * 15 / n_imgs)
collection, info, n_imgs = query_scenes(self, days_buffer)
n_imgs = len(info['features'])
tries += 1
# if query returns no images, then return
if n_imgs < 1:
print('NO SCENES FOUND. Try to widen your search by including more dates.')
return
# region of interest around the ground track (use this area to scale visualization factors)
buffer_around_center_meters = ground_track_length/2
region_of_interest = point_of_interest.buffer(buffer_around_center_meters)
# make an earth engine feature collection from the ground track so we can show it on the map
ground_track_coordinates = list(zip(self.atl08.lon, self.atl08.lat))
ground_track_projection = 'EPSG:4326' # <-- this specifies that our data longitude/latitude in degrees [https://epsg.io/4326]
gtx_feature = ee.FeatureCollection(ee.Geometry.LineString(coords=ground_track_coordinates,
proj=ground_track_projection,
geodesic=True))
Map = geemap.Map(center=(40, -100), zoom=4)
Map.add_basemap('HYBRID')
for i, feature in enumerate(info['features']):
# get the relevant info
thisDate = datetime.fromtimestamp(feature['properties']['system:time_start']/1e3)
dtstr = thisDate.strftime('%Y-%m-%d')
dt = (thisDate - datetime.strptime(self.date, '%Y-%m-%d')).days
ID = feature['id']
rel = 'before' if dt<0 else 'after'
print('%02d: %s (%3d days %s ICESat-2 overpass): %s' % (i, dtstr, np.abs(dt), rel, ID))
# get image by id, and normalize rgb range
image_id = feature['id']
thisScene = ee.Image(image_id)
rgb = thisScene.select('B4', 'B3', 'B2')
rgbmax = rgb.reduce(ee.Reducer.max()).reduceRegion(reducer=ee.Reducer.max(), geometry=region_of_interest, bestEffort=True, maxPixels=1e6)
rgbmin = rgb.reduce(ee.Reducer.min()).reduceRegion(reducer=ee.Reducer.min(), geometry=region_of_interest, bestEffort=True, maxPixels=1e6)
rgb = rgb.unitScale(ee.Number(rgbmin.get('min')), ee.Number(rgbmax.get('max'))).clamp(0.0, 1.0)
# if the image is Landsat 8, then pan-sharpen the image
if 'LANDSAT' in ID:
pan = thisScene.select('B8').unitScale(ee.Number(rgbmin.get('min')), ee.Number(rgbmax.get('max'))).clamp(0.0, 1.0)
huesat = rgb.rgbToHsv().select('hue', 'saturation')
rgb = ee.Image.cat(huesat, pan).hsvToRgb().clamp(0.0, 1.0)
# make the image uint8
rgb = rgb.multiply(255).uint8()
# add to map (only show the first layer, then can toggle others on in map)
show_layer = True if i==0 else False
Map.addLayer(rgb, name='%02d: %d days, %s'%(i,dt,ID), shown=show_layer)
# show ground track on map, and center on our region of interest
Map.addLayer(gtx_feature, {'color': 'red'}, 'ground track')
Map.centerObject(region_of_interest,zoom=11)
return Map
################################################################################################
def plotDataAndMap(self, scene_id, crs='EPSG:3857', title='ICESat-2 Data'):
from utils.curve_intersect import intersection
# get data if not already there
if 'atl03' not in vars(self).keys():
print('Data has not yet been requested from OpenAltimetry yet. Doing this now.')
self.requestData(verbose=True)
# plot the ICESat-2 data
fig = plt.figure(figsize=[12,5])
ax_data = fig.add_subplot(122)
self.plotData(ax_data, title=title)
# get the image and plot
ax_img = fig.add_subplot(121)
def dist_latlon2meters(lat1, lon1, lat2, lon2):
# returns the distance between two lat/lon coordinate points along the earth's surface in meters
R = 6371000
def deg2rad(deg):
return deg * (np.pi/180)
dlat = deg2rad(lat2-lat1)
dlon = deg2rad(lon2-lon1)
a = np.sin(dlat/2) * np.sin(dlat/2) + np.cos(deg2rad(lat1)) * np.cos(deg2rad(lat2)) * np.sin(dlon/2) * np.sin(dlon/2)
c = 2 * np.arctan2(np.sqrt(a), np.sqrt(1-a))
return R * c
lat1, lat2 = self.atl08.lat[0], self.atl08.lat.iloc[-1]
lon1, lon2 = self.atl08.lon[0], self.atl08.lon.iloc[-1]
center_lat = (lat1 + lat2) / 2
center_lon = (lon1 + lon2) / 2
ground_track_length = dist_latlon2meters(lat1, lon1, lat2, lon2)
# the point of interest (center of the track) as an Earth Engine Geometry
point_of_interest = ee.Geometry.Point(center_lon, center_lat)
# region of interest around the ground track (use this area to scale visualization factors)
buffer_around_center_meters = ground_track_length*0.52
region_of_interest = point_of_interest.buffer(buffer_around_center_meters)
thisScene = ee.Image(scene_id)
info = thisScene.getInfo()
# get the relevant info
thisDate = datetime.fromtimestamp(info['properties']['system:time_start']/1e3)
dtstr = thisDate.strftime('%Y-%m-%d')
download_folder = 'downloads/'
download_filename = '%s%s-8bitRGB.tif' % (download_folder, scene_id.replace('/', '-'))
if os.path.exists(download_filename):
print('This file already exists, not downloading again: %s' % download_filename)
else:
# get image by id, and normalize rgb range
rgb = thisScene.select('B4', 'B3', 'B2')
rgbmax = rgb.reduce(ee.Reducer.max()).reduceRegion(reducer=ee.Reducer.max(), geometry=region_of_interest, bestEffort=True, maxPixels=1e6)
rgbmin = rgb.reduce(ee.Reducer.min()).reduceRegion(reducer=ee.Reducer.min(), geometry=region_of_interest, bestEffort=True, maxPixels=1e6)
rgb = rgb.unitScale(ee.Number(rgbmin.get('min')), ee.Number(rgbmax.get('max'))).clamp(0.0, 1.0)
# if the image is Landsat 8, then pan-sharpen the image
if 'LANDSAT' in scene_id:
pan = thisScene.select('B8').unitScale(ee.Number(rgbmin.get('min')), ee.Number(rgbmax.get('max'))).clamp(0.0, 1.0)
huesat = rgb.rgbToHsv().select('hue', 'saturation')
rgb = ee.Image.cat(huesat, pan).hsvToRgb().clamp(0.0, 1.0)
# make the image uint8
rgb = rgb.multiply(255).uint8()
rgb_info = rgb.getInfo()
downloadURL = rgb.getDownloadUrl({'name': 'mySatelliteImage',
'crs': crs,
'scale': rgb_info['bands'][0]['crs_transform'][0],
'region': region_of_interest,
'filePerBand': False,
'format': 'GEO_TIFF'})
response = requests.get(downloadURL)
if not os.path.exists(download_folder): os.makedirs(download_folder)
with open(download_filename, 'wb') as fd:
fd.write(response.content)
print('Downloaded %s' % download_filename)
img = rio.open(download_filename)
plot.show(img, ax=ax_img)
# get the graticule right
latlon_bbox = warp.transform(img.crs, {'init': 'epsg:4326'},
[img.bounds[i] for i in [0,2,2,0,0]],
[img.bounds[i] for i in [1,1,3,3,1]])
min_lat = np.min(latlon_bbox[1])
max_lat = np.max(latlon_bbox[1])
min_lon = np.min(latlon_bbox[0])
max_lon = np.max(latlon_bbox[0])
latdiff = max_lat-min_lat
londiff = max_lon-min_lon
diffs = np.array([0.0001, 0.0002, 0.00025, 0.0004, 0.0005,
0.001, 0.002, 0.0025, 0.004, 0.005,
0.01, 0.02, 0.025, 0.04, 0.05, 0.1, 0.2, 0.25, 0.4, 0.5, 1, 2])
latstep = | np.min(diffs[diffs>latdiff/8]) | numpy.min |
# This file is largely copied from pgd_attack.py found at
# githu.com/MadryLab/mnist_challenge. Relevant Paper:
# <NAME>, <NAME>, <NAME>, <NAME>, <NAME>. Towards Deep Learning
# Models Resistant to Adversarial Attacks. ICLR 2018.
# I've changed it to work with more general input spaces and convolutional
# networks.
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import os
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import tensorflow.compat.v1 as tf
# tf.disable_resource_variables()
tf.logging.set_verbosity(tf.logging.FATAL)
tf.disable_v2_behavior()
class IntervalPGDAttack:
def __init__(self, model, k, a, random_start, loss_func):
"""Attack parameter initialization. The attack performs k steps of
size a, while always staying within epsilon from the initial
point."""
self.model = model
self.k = k
self.a = a
self.rand = random_start
if loss_func == 'xent':
loss = model.xent
elif loss_func == 'cw':
label_mask = tf.one_hot(model.y_input,
10,
on_value=1.0,
off_value=0.0,
dtype=tf.float32)
correct_logit = tf.reduce_sum(label_mask * model.pre_softmax, axis=1)
wrong_logit = tf.reduce_max((1-label_mask) * model.pre_softmax, axis=1)
loss = -tf.nn.relu(correct_logit - wrong_logit + 50)
else:
print('Unknown loss function. Defaulting to cross-entropy')
loss = model.xent
self.grad = tf.gradients(loss, model.x_input)[0]
with tf.Session() as sess:
self.graph = sess.graph
def perturb(self, x_nat, y, lower, upper, sess):
if self.rand:
# Picks a uniformly distributed random point inside the region
#x = x_nat + np.random.uniform(-self.epsilon, self.epsilon, x_nat.shape)
x = np.random.uniform(lower, upper, x_nat.shape);
else:
x = | np.copy(x_nat) | numpy.copy |
# coding=utf-8
# Copyright 2018 The DisentanglementLib Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for the weakly-supervised methods."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import parameterized
from disentanglement_lib.methods.weak import weak_vae # pylint: disable=unused-import
import numpy as np
import tensorflow as tf
class WeakVaeTest(parameterized.TestCase, tf.test.TestCase):
@parameterized.parameters(
(np.zeros([64, 10]),
np.zeros([64, 10]),
np.ones([64, 10]),
np.ones([64, 10]),
np.concatenate((np.zeros([64, 5]), np.ones([64, 5])), axis=1),
np.concatenate((np.ones([64, 5]), np.zeros([64, 5])), axis=1)),
(np.array([[1, 1]]),
np.array([[1, 1]]),
np.array([[0, 0]]),
| np.array([[0, 0]]) | numpy.array |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Image Filtering
Reference: http://machinelearninguru.com/computer_vision/basics/convolution/image_convolution_1.html
Note:
The outputs are slightly different from the original outputs in the post,
due to the `image.png` file is not available (using `image.jpg` instead)
<NAME>, 2018-12-12
"""
from __future__ import print_function
from skimage import io, viewer, color
import matplotlib.pyplot as plt
import numpy as np
from skimage import exposure
#import pylab
def convolve2d(image, kernel):
# This function which takes an image and a kernel
# and returns the convolution of them
# Args:
# image: a numpy array of size [image_height, image_width].
# kernel: a numpy array of size [kernel_height, kernel_width].
# Returns:
# a numpy array of size [image_height, image_width] (convolution output).
kernel = np.flipud(np.fliplr(kernel)) # Flip the kernel
output = np.zeros_like(image) # convolution output
# Add zero padding to the input image
image_padded = np.zeros((image.shape[0] + 2, image.shape[1] + 2))
image_padded[1:-1, 1:-1] = image
for x in range(image.shape[1]): # Loop over every pixel of the image
for y in range(image.shape[0]):
# element-wise multiplication of the kernel and the image
output[y,x]=(kernel*image_padded[y:y+3,x:x+3]).sum()
return output
### Load and plot image ###
# set the image file name
img_file = 'image.jpg'
# load the image as grayscale in one step
img = io.imread(img_file, as_gray=True)
# alternatively, you can load the original image and then convert it to grayscale
# img2 = io.imread(img_file)
# img2 = color.rgb2gray(img2)
print('image matrix size: {}'.format(img.shape)) # print the size of image
print('First 5 columns and rows of the image matrix:\n {}'.format(img[:5,:5]*255))
viewer.ImageViewer(img).show() # plot the image
### Convolve the sharpen kernel with an image ###
# Adjust the contrast of the image by applying Histogram Equalization
# clip_limit: normalized between 0 and 1 (higher values give more contrast)
image_equalized = exposure.equalize_adapthist(img/np.max(np.abs(img)), clip_limit=0.03)
plt.imshow(image_equalized, cmap=plt.cm.gray)
plt.axis('off')
plt.show()
# Convolve the sharpen kernel and the image
kernel = np.array([[0,-1,0],[-1,5,-1],[0,-1,0]])
image_sharpen = convolve2d(img, kernel)
print('First 5 columns and rows of the image_sharpen matrix:\n {}'.format(image_sharpen[:5,:5]*255))
# Plot the filtered image
plt.imshow(image_sharpen, cmap=plt.cm.gray)
plt.axis('off')
plt.show()
# Adjust the contrast of the filtered image by applying Histogram Equalization
image_sharpen_equalized = exposure.equalize_adapthist(image_sharpen/np.max(np.abs(image_sharpen)), clip_limit=0.03)
plt.imshow(image_sharpen_equalized, cmap=plt.cm.gray)
plt.axis('off')
plt.show()
### Convolve the sharpen kernal with an image using Python packages (Scipy) ###
import scipy
# you can use 'valid' instead of 'same', then it will not add zero padding
image_sharpen = scipy.signal.convolve2d(img, kernel, 'same')
#image_sharpen = scipy.signal.convolve2d(img, kernel, 'valid')
print('First 5 columns and rows of the image_sharpen matrix:\n {}'.format(image_sharpen[:5,:5]*255))
### Convolve the sharpen kernal with an image using Python packages (OpenCV) ###
import cv2
image_sharpen = cv2.filter2D(img, -1, kernel)
print('First 5 columns and rows of the image_sharpen matrix:\n {}'.format(image_sharpen[:5,:5]*255))
# Adjust the contrast of the filtered image by applying Histogram Equalization
image_sharpen_equalized = exposure.equalize_adapthist(image_sharpen/np.max( | np.abs(image_sharpen) | numpy.abs |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Sep 20 12:25:11 2018
@author: anantgupta
"""
from __future__ import division
# Add classes for Extended Targets
import numpy as np
import numpy.matlib
import sympy as sp
from GAutils import proc_est as pr
from itertools import combinations
from GAutils import config as cfg
from GAutils import PCRLB as pcrlb
from numba import jit
class FMCWprms:
c = 3e8 # Speed of light
def __init__(self, B=0.5e9, Ts=1 / 82e4, fc=6e10, Ni=64, Nch=64): # was 150M, 1.28M (1m, 0.7816m/s); (0.5G,0.82M)->(0.3m,0.5m/s)
self.fc = fc
self.B = B
self.Ts = Ts
self.Ni = Ni
self.Nch = Nch
self.ss = B / Ts / Ni
self.tf = Nch * Ni * Ts
Kr = Ts * self.ss * 4 * np.pi / self.c
Kd = Ni* Ts * fc * 4 * np.pi / self.c
self.FIMr =(Kr**2)*Nch*(Ni/6 * (2 * Ni**2 + 1))
self.FIMd =(Kd**2)*Ni*(Nch/6*(2 * Nch**2 + 1))
# self.x1, self.x2 = np.meshgrid(np.arange(self.Ni)-self.Ni/2,
# np.arange(self.Nch)-self.Nch/2)
def get_tfa(self, ft=1, st=1, center=0):# center was 0
self.x1, self.x2 = np.meshgrid(np.arange(self.Ni)-center*self.Ni/2,
np.arange(self.Nch)-center*self.Nch/2)
tfa = self.Ts * (ft*self.x1 + self.Ni * st * self.x2) # sampling time indices of frame
return tfa
class PointTarget:
# Add more parameters for target
t = 1 # Time variable
def __init__(self, xw, yw, vx, vy, proc_var=0.1, rcs=1):
self.x = xw
self.y = yw
self.vx = vx
self.vy = vy
self.proc_var = proc_var
self.state = [self.x,self.y,self.vx,self.vy]
self.rcs = rcs
class Sensor:
def __init__(self, xw, yw, vx=0, vy=0, ptx=1, mcs=FMCWprms(), meas_std = 0.0):
self.x = xw
self.y = yw
self.vx = vx
self.vy = vy
self.ptx = ptx # Tx power
self.mcs = mcs # FMCW parameters
self.meas_std = meas_std
self.fov = 1 # FOV sin(half beamwidth)
self.crb = self.getCRB()
# print(FIM)
def getCRB(self, scale=[1,1]):
FIMr= self.mcs.FIMr
FIMd = self.mcs.FIMd
sigma= self.meas_std**2
return (sigma/2)*np.array([scale[0]/FIMr, scale[1]/FIMd])
def getnominalCRB(self, nom_snr=-20, scale=[1,1]):
FIMr= self.mcs.FIMr
FIMd = self.mcs.FIMd
return (10**(-nom_snr/10)/2) * np.array([scale[0]/FIMr, scale[1]/FIMd])
class gardEst:
def __init__(self):
self.r = np.array([])# range
self.d = np.array([])# doppler
self.a = np.array([])# angle
self.g = np.array([], dtype='complex')# complex gain
self.ord = np.array([])
def add_Est(cls, g, a, r, d):
cls.ord = np.append(cls.ord, cls.r.shape)
cls.r = np.append(cls.r, r)
cls.d = np.append(cls.d, d)
cls.a = np.append(cls.a, a)
cls.g = np.append(cls.g, g)
def pop(cls, i):
cls.ord = np.delete(cls.ord,i)
cls.r = np.delete(cls.r,i)
cls.d = np.delete(cls.d,i)
cls.a = np.delete(cls.a,i)
cls.g = np.delete(cls.g,i)
class link: # Stores links to ranges in prev sensor and corr. vx's
def __init__(self, indx=[], vxa=[], xa=[], llr=[]):
self.indx = indx
self.vxa = vxa
self.xa = xa
self.llr = llr
class State: #Linked list of states: mean, covariance
def __init__(self, mean, cov):
self.mean = mean
self.cov = cov
self.next = None
class obs_node:
def __init__(self, g, a, r, d, oid, sid=0):
self.g = g
self.a = a
self.r = r
self.d = d
self.oid = oid # Order in observation
self.sid = sid
self.lkf = []
self.lkb = []
self.visited = False
self.used = None
def insert_flink(cls, lk):
cls.lkf.append(lk)
def insert_blink(cls, lk):
cls.lkb.append(lk)
class SignatureTracks: # collection of associated ranges[], doppler[] & estimated vx(scalar).
# Precompute Constant matrices
CRBdict =dict()
# For Extended Kalman Filter Initial covariance
Pinit_getter = pcrlb.CRBconverter()
x, y, vx, vy, sx, rm, dm, sr, sd = sp.symbols('x y vx vy sx rm dm sr sd')
r = sp.sqrt((x-sx)**2+y**2)
d = ((x-sx)*vx+y*vy)/r
# For EKF time update
hk = [sp.lambdify([x,y,vx,vy,sx], r, "numpy"), sp.lambdify([x,y,vx,vy,sx], d, "numpy")]
# hk = [jit(nopython=True)(sp.lambdify([x,y,vx,vy,sx], r, "numpy")), jit(nopython=True)(sp.lambdify([x,y,vx,vy,sx], d, "numpy"))]
# hk = [sp.utilities.lambdify.lambdastr([x,y,vx,vy,sx], r), sp.utilities.lambdify.lambdastr([x,y,vx,vy,sx], d)]
# To Precompute H Matrix
varl = [x, y, vx, vy]
f =[[] for _ in range(2)]
for v1 in range(4):
e = (r.diff(varl[v1]))
# NOTE: Probe analytical expression for FIM element using e.expand()
# f[0].append(jit(nopython=True)(sp.lambdify([x,y,vx,vy,sx], e, "numpy")) )
f[0].append(sp.lambdify([x,y,vx,vy,sx], e) )
for v1 in range(4):
e = (d.diff(varl[v1]))
# NOTE: Probe analytical expression for FIM element using e.expand()
# f[1].append(jit(nopython=True)(sp.lambdify([x,y,vx,vy,sx], e, "numpy") ))
f[1].append(sp.lambdify([x,y,vx,vy,sx], e) )
def __init__(self, r, d, sindx, g=[]):
self.r =[r]
self.d = [d]
self.g = [g]
self.sindx = [sindx] # sensor index
self.state_head = None # Linked List of states: Mean(3x1), D covariance matrix(3x3)
self.state_end = None
self.N=len(self.r)
self.pid =[]# Obs order at sensor
self.llr = 0 # Likelihood of observations
self.gc = None # Geometric fitting error
def get_Pinit(cls, sensors, target): # TODO: Get Pinit in principled manner
xr,yr,vxr,vyr = target.state
Am1 = np.zeros((4,4))
for s, sensor in enumerate(sensors):
crb = sensor.getnominalCRB()
cre = crb[0]
cde = crb[1]
F_mat = np.zeros((4,4))
for v1 in range(4):
for v2 in range(4):
F_mat[v1,v2] = cls.Pinit_getter.f[v1][v2](xr-sensor.x, yr, vxr, vyr, cre, cde)
Am1[:,:] += F_mat
Ami = np.linalg.inv(Am1)
return Ami
# @profile
# def get_newfit_error(cls, sensors, rnew, dnew, gnew, sidnew):
# # Reports geometry fitting error for given R,D pair
# rn = np.append(cls.r, rnew)
# dn = np.append(cls.d, dnew)
# Ns = len(rn)
# sindx_new = np.hstack((cls.sindx,sidnew))
# keyval = tuple(sindx_new)
# L = np.array([sensors[si].x for si in sindx_new])
# H = np.array([[sensors[si].x, 1] for si in sindx_new])
# Me = rn*dn
# Me2_centered = rn*rn - ( L**2 )
# if keyval in cls.CRBdict:
# CRB=cls.CRBdict[keyval]
# else:
# CRB = np.array([sensors[si].getnominalCRB() for i, si in enumerate(sindx_new)]) # Using nominal
# cls.CRBdict[keyval] = CRB
# # Main estimator
# x2, FAA, rank, s = np.linalg.lstsq(H,np.stack((Me, Me2_centered)).T,rcond=None)
# # scaling factors
# M1var = (np.sum( CRB * np.array([dn**2, rn**2]).T,1)
# + np.prod(CRB,1) )
# M2var = (4*CRB[:,0] * np.array( rn**2) + CRB[:,0]**2)# Ignoring higher order terms
# gc = (cfg.rd_wt[0]*([email protected][0])**2/M1var + cfg.rd_wt[1]*(Me2_centered [email protected][1])/M2var)
# return sum(gc)
@staticmethod
@jit(nopython=True, cache = True)
def get_newfit_error_group(r_cand, d_cand, Ngrp, r, d, L, CRB, rd_wt, upper_thres):
# Reports geometry fitting error for given R,D pair
# Ngrp = len(L)
rn = np.outer(np.ones(Ngrp), np.append(r,1))
dn = np.outer(np.ones(Ngrp), np.append(d,1))
rn[:,-1] = r_cand
dn[:,-1] = d_cand
# rn = np.block([rn, np.array([r_cand]).T])
# dn = np.block([dn, np.array([d_cand]).T])
H = np.outer(L, np.ones(2))
H[:,-1] = np.ones(len(L))
Me = (rn)*(dn)
Me2 = (rn)*(rn)
Me2_centered = Me2 - ( L**2 )
# Main estimator
x2, FAA, rank, s = np.linalg.lstsq(H,np.vstack((Me, Me2_centered)).T)
rdest = H@x2
# scaling factors
gc = []
Stns = []
valid_state_ids = []
for tid in range(Ngrp):
M1var = (( CRB[:,0] * dn[tid]**2 + CRB[:,1]* rn[tid]**2) + CRB[:,0]*CRB[:,1])
M2var = (4*CRB[:,0] * ( rn[tid]**2) + CRB[:,0]**2)# Ignoring higher order terms
# gc_val = np.sum(rd_wt[0]*(Me[tid][email protected][tid])**2/M1var + rd_wt[1]*(Me2_centered[tid] [email protected][Ngrp+tid])/M2var)
gc_val = np.sum(rd_wt[0]*(Me[tid]-rdest[:,tid])**2/M1var + rd_wt[1]*(Me2_centered[tid] -rdest[:,Ngrp+tid])**2/M2var)
if gc_val<upper_thres:
gc.append(gc_val)
# Compute state
v_hat = -x2.T[tid][0]
x_hat = -x2.T[Ngrp+tid][0]/2
xsa = x_hat - L
y_est = np.sqrt(abs(np.mean(Me2[tid] - xsa **2))) # TODO: handle negative value properly
vy_est = np.mean(Me[tid] - v_hat*xsa) / y_est # Estimated using other estimates
Stn = np.array([x_hat, y_est, v_hat, vy_est])
Stns.append(Stn)
valid_state_ids.append(tid)
return gc, Stns, valid_state_ids
def get_newfit_error_grp(cls, sensors, tnd_grp, sidnew, upper_thres):
# Reports geometry fitting error for given R,D pair
Ngrp = len(tnd_grp)
r_cand = np.array([tnd.r for tnd in tnd_grp])
d_cand = np.array([tnd.d for tnd in tnd_grp])
sindx_new = np.hstack((cls.sindx,sidnew))
L = np.array([sensors[si].x for si in sindx_new])
keyval = tuple(sindx_new)
if keyval in cls.CRBdict:
CRB=cls.CRBdict[keyval]
else:
CRB = np.array([sensors[si].getnominalCRB() for i, si in enumerate(sindx_new)]) # Using nominal
cls.CRBdict[keyval] = CRB
gc, Stns, valid_state_ids = cls.get_newfit_error_group(r_cand, d_cand, Ngrp, cls.r, cls.d, L, CRB, np.array(cfg.rd_wt), upper_thres)
Pn = np.diag([1, 1, 1, 1])
states = [State(Stn, Pn) for Stn in Stns]
return gc, states, valid_state_ids
def get_newfit_error_nn(cls, sensors, tnd_grp, sindx, upper_thres):
Ngrp = len(tnd_grp)
Rk = np.diag(sensors[sindx].getnominalCRB())
sensx = sensors[sindx].x
gc = []
states = []
valid_state_ids = []
if cls.N>1: # Fetch previous State
Stp = cls.state_end.mean
if cls.N>2:
Pp = cls.state_end.cov
else:
Pp = cls.get_Pinit(sensors, PointTarget(*Stp))
Hk = np.zeros((2,4))
for i in range(2):
for j in range(4):
Hk[i,j] = cls.f[i][j](Stp[0],Stp[1],Stp[2],Stp[3],sensx)
Ik = Hk @ Pp @ Hk.T + Rk # Innovation covariance (2x2)
try:
Kk = Pp @ Hk.T @ np.linalg.inv(Ik) # Kalman Gain (4x2)
except np.linalg.linalg.LinAlgError as err:
return gc, states, valid_state_ids
yhk = np.array([cls.hk[i](Stp[0],Stp[1],Stp[2],Stp[3],sensx) for i in range(2)])
Pn = (np.eye(4) - Kk@Hk) @ Pp @ (np.eye(4) - Kk@Hk) + Kk @ Rk @ Kk.T
for tid in range(Ngrp):
try:
yk = np.array([tnd.r, tnd.d]) # Measurement
Stn = Stp + Kk @ (yk - yhk)
gc.append(np.inner((yk - yhk), np.linalg.inv(Ik)@(yk - yhk)))
valid_state_ids.append(tid)
states.append(Stn)
except: # If any degenerate case occurs
gc.append(np.inf)
valid_state_ids.append(tid)
states.append(None)
return gc, states, valid_state_ids
# @jit(nopython=True)# @profile
def get_state(cls, sensors):
# Evaluate target kinematic state and corresp. fitting error
Ns = cls.N
r = np.array(cls.r)
d = np.array(cls.d)
sindx_new = cls.sindx
keyval = tuple(sindx_new)
if keyval in cls.CRBdict:
CRB=cls.CRBdict[keyval]
else:
CRB = np.array([sensors[si].getnominalCRB() for i, si in enumerate(sindx_new)]) # Using nominal
cls.CRBdict[keyval] = CRB
L = np.array([sensors[si].x for si in cls.sindx])
gc, Stns, valid_state_ids = cls.get_newfit_error_group(cls.r[-1], cls.d[-1], 1, np.array(cls.r[0:-1]), np.array(cls.d[0:-1]), L, CRB, np.array(cfg.rd_wt), np.inf)
Pn = np.diag([1, 1, 1, 1])
new_state = State(Stns[0], Pn)
cls.gc = gc[0]
# H = np.array([[sensors[si].x, 1] for si in sindx_new])
# Me = r * d
# Me2 = r * r
# Me2_centered = Me2 - ( L**2 )
# if keyval in cls.CRBdict:
# CRB=cls.CRBdict[keyval]
# else:
# CRB = np.array([sensors[si].getnominalCRB() for i, si in enumerate(sindx_new)]) # Using nominal
# cls.CRBdict[keyval] = CRB
# # scaling factors
# M1var = (np.sum( CRB * np.array([d**2, r**2]).T,1)
# + np.prod(CRB,1) )
# M2var = (4*CRB[:,0] * np.array( r**2) + CRB[:,0]**2)# Ignoring higher order terms
# # Main estimator
# x2, FAA, rank, s = np.linalg.lstsq(H,np.stack((Me, Me2_centered)).T,rcond=None)
# v_hat = -x2[0][0]
# x_hat = -x2[0][1]/2
# cls.gc = sum(cfg.rd_wt[0]*([email protected][0])**2/M1var + cfg.rd_wt[1]*(Me2_centered [email protected][1])/M2var)
# xsa = x_hat - L
# y_est = np.sqrt(abs(np.mean(Me2 - xsa **2))) # TODO: handle negative value properly
# vy_est = np.mean(Me - v_hat*xsa) / y_est # Estimated using other estimates
# Stn = np.array([x_hat, y_est, v_hat, vy_est])
# # Pn = np.diag([g_nu, g_nu2])
# Pn = np.diag([1, 1, 1, 1])
# new_state = State(Stn, Pn)
return new_state
# def get_newfit_error_ekf(cls, sensors, rnew, dnew, gnew, sindx):
# Rk = np.diag(sensors[sindx].getnominalCRB())
# if cls.N>1: # Fetch previous State
# Stp = cls.state_end.mean
# if cls.N>2:
# Pp = cls.state_end.cov
# else:
# Pp = cls.get_Pinit(sensors, PointTarget(*Stp))
# Hk = np.zeros((2,4))
# for i in range(2):
# for j in range(4):
# Hk[i,j] = cls.f[i][j](Stp[0],Stp[1],Stp[2],Stp[3],sensors[sindx].x)
# Ik = Hk @ Pp @ Hk.T + Rk # Innovation covariance (2x2)
# try:
# # Kk = Pp @ Hk.T @ np.linalg.inv(Ik) # Kalman Gain (4x2)
# yk = np.array([rnew, dnew]) # Measurement
# yhk = np.array([cls.hk[i](Stp[0],Stp[1],Stp[2],Stp[3],sensors[sindx].x) for i in range(2)])
# # Stn = Stp + Kk @ (yk - yhk)
# # Pn = (np.eye(4) - Kk@Hk) @ Pp @ (np.eye(4) - Kk@Hk) + Kk @ Rk @ Kk.T
# return np.inner((yk - yhk), np.linalg.inv(Ik)@(yk - yhk))
# except: # If any degenerate case occurs
# return np.inf
# else: # Compute initial covariance
# return 1
def add_update3(cls, rs, ds, gs, sindx, sensors, new_state=None, gcc = None):
# Dual cost method
# TODO: maintain covariance matrix
rp0 = cls.r[0]
dp0 = cls.d[0]
Np = cls.N
sindxp0 = cls.sindx[0]
# compute x, y, vx from all obs can be used to update state)
cls.r = np.append(cls.r, rs)
cls.d = np.append(cls.d, ds)
cls.g = np.append(cls.g, gs)
cls.sindx = np.append(cls.sindx, sindx)
cls.N = cls.N+1
# Update previous covariance
if Np > 1:
if new_state is None:
new_state = cls.get_state(sensors)
else:
cls.gc = gcc# /cls.N*np.ones(cls.N) # Fake geometric cost
cls.state_end.next = new_state
cls.state_end = new_state
else:
Pn = | np.zeros((2,2)) | numpy.zeros |
from __future__ import division, absolute_import, print_function
import numpy as np
try:
from scipy.spatial import cKDTree, KDTree
except ImportError:
pass
try:
from scipy.spatial import distance
except ImportError:
pass
try:
from scipy.spatial import ConvexHull, Voronoi
except ImportError:
pass
try:
from scipy.spatial import SphericalVoronoi
except ImportError:
pass
from .common import Benchmark, LimitedParamBenchmark
class Build(Benchmark):
params = [
[(3,10000,1000), (8,10000,1000), (16,10000,1000)],
['KDTree', 'cKDTree'],
]
param_names = ['(m, n, r)', 'class']
def setup(self, mnr, cls_name):
self.cls = KDTree if cls_name == 'KDTree' else cKDTree
m, n, r = mnr
np.random.seed(1234)
self.data = np.concatenate((np.random.randn(n//2,m),
np.random.randn(n-n//2,m)+np.ones(m)))
self.queries = np.concatenate((np.random.randn(r//2,m),
np.random.randn(r-r//2,m)+np.ones(m)))
def time_build(self, mnr, cls_name):
"""
Constructing kd-tree
=======================
dim | # points | time
"""
m, n, r = mnr
if cls_name == 'cKDTree_flat':
self.T = self.cls(self.data, leafsize=n)
else:
self.cls(self.data)
LEAF_SIZES = [8, 128]
BOX_SIZES = [None, 0.0, 1.0]
class Query(LimitedParamBenchmark):
params = [
[(3,10000,1000), (8,10000,1000), (16,10000,1000)],
[1, 2, np.inf],
BOX_SIZES, LEAF_SIZES,
]
param_names = ['(m, n, r)', 'p', 'boxsize', 'leafsize']
num_param_combinations = 21
@staticmethod
def do_setup(self, mnr, p, boxsize, leafsize):
m, n, r = mnr
np.random.seed(1234)
self.data = np.random.uniform(size=(n, m))
self.queries = np.random.uniform(size=(r, m))
self.T = cKDTree(self.data, leafsize=leafsize, boxsize=boxsize)
def setup(self, mnr, p, boxsize, leafsize):
LimitedParamBenchmark.setup(self, mnr, p, boxsize, leafsize)
Query.do_setup(self, mnr, p, boxsize, leafsize)
def time_query(self, mnr, p, boxsize, leafsize):
"""
Querying kd-tree
dim | # points | # queries | KDTree | cKDTree | flat cKDTree
"""
self.T.query(self.queries, p=p)
# Retain old benchmark results (remove this if changing the benchmark)
time_query.version = "327bc0627d5387347e9cdcf4c52a550c813bb80a859eeb0f3e5bfe6650a8a1db"
class Radius(LimitedParamBenchmark):
params = [
[(3,10000,1000)],
[1, 2, np.inf],
[0.2, 0.5],
BOX_SIZES, LEAF_SIZES,
]
param_names = ['(m, n, r)', 'p', 'probe radius', 'boxsize', 'leafsize']
num_param_combinations = 7
def __init__(self):
self.time_query_pairs.__func__.params = list(self.params)
self.time_query_pairs.__func__.params[0] = [(3,1000,30),
(8,1000,30),
(16,1000,30)]
self.time_query_ball_point.__func__.setup = self.setup_query_ball_point
self.time_query_pairs.__func__.setup = self.setup_query_pairs
def setup(self, *args):
pass
def setup_query_ball_point(self, mnr, p, probe_radius, boxsize, leafsize):
LimitedParamBenchmark.setup(self, mnr, p, probe_radius, boxsize, leafsize,
param_seed=3)
Query.do_setup(self, mnr, p, boxsize, leafsize)
def setup_query_pairs(self, mnr, p, probe_radius, boxsize, leafsize):
# query_pairs is fast enough so we can run all parameter combinations
Query.do_setup(self, mnr, p, boxsize, leafsize)
def time_query_ball_point(self, mnr, p, probe_radius, boxsize, leafsize):
self.T.query_ball_point(self.queries, probe_radius, p=p)
def time_query_ball_point_nosort(self, mnr, p, probe_radius, boxsize, leafsize):
self.T.query_ball_point(self.queries, probe_radius, p=p,
return_sorted=False)
def time_query_pairs(self, mnr, p, probe_radius, boxsize, leafsize):
self.T.query_pairs(probe_radius, p=p)
# Retain old benchmark results (remove this if changing the benchmark)
time_query_ball_point.version = "e0c2074b35db7e5fca01a43b0fba8ab33a15ed73d8573871ea6feb57b3df4168"
time_query_pairs.version = "cf669f7d619e81e4a09b28bb3fceaefbdd316d30faf01524ab33d41661a53f56"
class Neighbors(LimitedParamBenchmark):
params = [
[(3,1000,1000),
(8,1000,1000),
(16,1000,1000)],
[1, 2, np.inf],
[0.2, 0.5],
BOX_SIZES, LEAF_SIZES,
['cKDTree', 'cKDTree_weighted'],
]
param_names = ['(m, n1, n2)', 'p', 'probe radius', 'boxsize', 'leafsize', 'cls']
num_param_combinations = 17
def setup(self, mn1n2, p, probe_radius, boxsize, leafsize, cls):
LimitedParamBenchmark.setup(self, mn1n2, p, probe_radius, boxsize, leafsize, cls)
m, n1, n2 = mn1n2
self.data1 = np.random.uniform(size=(n1, m))
self.data2 = np.random.uniform(size=(n2, m))
self.w1 = np.ones(n1)
self.w2 = | np.ones(n2) | numpy.ones |
# -*- coding:utf-8 -*-
from src.utils import *
import numpy as np
import tensorflow as tf
from collections import deque
import gym
class CategoricalDQNAgent:
def __init__(self, config, base_network):
self.config = config
self.base_network = base_network
self.input_dim = config.input_dim # neural network input dimension
self.n_atoms = config.categorical_n_atoms
self.vmin = config.categorical_Vmin
self.vmax = config.categorical_Vmax
self.atoms = np.linspace(
config.categorical_Vmin,
config.categorical_Vmax,
config.categorical_n_atoms,
) # Z
self.envs = None
self.actor_network = self.base_network.nn_model()
self.target_network = tf.keras.models.clone_model(self.actor_network)
self.target_network.set_weights(self.actor_network.get_weights())
self.total_steps = 0
self.episodes = config.episodes
self.steps = config.steps
self.batch_size = config.batch_size
self.replay_buffer_size = config.replay_buffer_size
self.replay_buffer = deque()
self.delta_z = (config.categorical_Vmax - config.categorical_Vmin) / float(config.categorical_n_atoms - 1)
self.keras_check = config.keras_checkpoint
self.check_model_improved = 0
self.best_max = 0
def transition(self):
"""
In transition, the agent simply plays and record
[current_state, action, reward, next_state, done]
in the replay_buffer (or memory pool)
Updating the weights of the neural network happens
every single time the replay buffer size is reached.
done: boolean, whether the game has end or not.
"""
for each_ep in range(self.episodes):
current_state = self.envs.reset()
print('Episode: {} Reward: {} Max_Reward: {}'.format(each_ep, self.check_model_improved, self.best_max))
print('-' * 64)
self.check_model_improved = 0
for step in range(self.steps):
# reshape the input state to a tensor ===> Network ===> action probabilities
# size = (1, action dimension, number of atoms)
# e.g. size = (1, 2, 51)
action_prob, _ = self.actor_network.predict(
np.array(current_state).reshape((1, self.input_dim[0], self.input_dim[1])))
# calculate action value (Q-value)
action_value = np.dot(np.array(action_prob), self.atoms)
# choose action according to the E-greedy policy
action = policies.epsilon_greedy(action_values=action_value[0],
episode=each_ep,
stop_explore=self.config.stop_explore,
total_actions=self.config.action_dim)
next_state, reward, done, _ = self.envs.step(action=action)
# record the per step history into replay buffer
self.replay_buffer.append([current_state.reshape(self.input_dim).tolist(), action,
next_state.reshape(self.input_dim).tolist(), reward, done])
# when we collect certain number of batches, perform replay and
# update the weights in the actor network (Backpropagation)
# reset the replay buffer
if len(self.replay_buffer) == self.replay_buffer_size:
self.train_by_replay()
self.replay_buffer.clear()
# if episode is finished, break the inner loop
# otherwise, continue
if done:
self.total_steps += 1
break
else:
current_state = next_state
self.total_steps += 1
self.check_model_improved += reward
# for any episode where the reward is higher
# we copy the actor network weights to the target network
if self.check_model_improved > self.best_max:
self.best_max = self.check_model_improved
self.target_network.set_weights(self.actor_network.get_weights())
def train_by_replay(self):
"""
TD update by replaying the history.
Implementation of algorithm 1 in the paper.
"""
# step 1: generate replay samples (size = self.batch_size) from the replay buffer
# e.g. uniform random replay or prioritize experience replay
current_states, actions, next_states, rewards, terminals = \
replay_fn.uniform_random_replay(self.replay_buffer, self.batch_size)
# step 2:
# generate next state probability, size = (batch_size, action_dimension, number_of_atoms)
# e.g. (32, 2, 51) where batch_size = 32,
# each batch contains 2 actions,
# each action distribution contains 51 bins.
prob_next, _ = self.target_network.predict(next_states)
# step 3:
# calculate next state Q values, size = (batch_size, action_dimension, 1).
# e.g. (32, 2, 1), each action has one Q value.
# then choose the higher value out of the 2 for each of the 32 batches.
action_value_next = np.dot(np.array(prob_next), self.atoms)
action_next = np.argmax(action_value_next, axis=1)
# step 4:
# use the optimal actions as index, pick out the probabilities of the optimal action
prob_next = prob_next[np.arange(self.batch_size), action_next, :]
# match the rewards from the memory to the same size as the prob_next
rewards = np.tile(rewards.reshape(self.batch_size, 1), (1, self.n_atoms))
# perform TD update
discount_rate = self.config.discount_rate * (1 - terminals)
atoms_next = rewards + np.dot(discount_rate.reshape(self.batch_size, 1),
self.atoms.reshape(1, self.n_atoms))
# constrain atoms_next to be within Vmin and Vmax
atoms_next = np.clip(atoms_next, self.vmin, self.vmax)
# calculate the floors and ceilings of atom_next
b = (atoms_next - self.config.categorical_Vmin) / self.delta_z
l, u = np.floor(b).astype(int), np.ceil(b).astype(int)
# it is important to check if l == u, to avoid histogram collapsing.
d_m_l = (u + (l == u) - b) * prob_next
d_m_u = (b - l) * prob_next
# step 5: redistribute the target probability histogram (calculation of m)
# Note that there is an implementation issue
# The loss function requires current histogram and target histogram to have the same size
# Generally, the loss function should be the categorical cross entropy loss between
# P(x, a*): size = (32, 1, 51) and P(x(t+1), a*): size = (32, 1, 51), i.e. only for optimal actions
# However, the network generates P(x, a): size = (32, 2, 51), i.e. for all actions
# Therefore, I create a tensor with zeros (size = (32, 2, 51)) and update only the probability histogram
target_histo = np.zeros(shape=(self.batch_size, self.n_atoms))
for i in range(self.batch_size):
target_histo[i][action_next[i]] = 0.0 # clear the histogram that needs to be updated
np.add.at(target_histo[i], l[i], d_m_l[i]) # update d_m_l
np.add.at(target_histo[i], l[i], d_m_u[i]) # update d_m_u
# update actor network weights
self.actor_network.fit(x=current_states, y=target_histo, verbose=2, callbacks=self.keras_check)
def eval_step(self, render=True):
"""
Evaluation using the trained target network, no training involved
:param render: whether to visualize the evaluation or not
"""
for each_ep in range(self.config.evaluate_episodes):
current_state = self.envs.reset()
print('Episode: {} Reward: {} Training_Max_Reward: {}'.format(each_ep, self.check_model_improved,
self.best_max))
print('-' * 64)
self.check_model_improved = 0
for step in range(self.steps):
action_prob, _ = self.target_network.predict(
np.array(current_state).reshape((1, self.input_dim[0], self.input_dim[1])))
action_value = np.dot(np.array(action_prob), self.atoms)
action = | np.argmax(action_value[0]) | numpy.argmax |
"""
isicarchive.imfunc
This module provides image helper functions and doesn't have to be
imported from outside the main package functionality (IsicApi).
Functions
---------
color_superpixel
Paint the pixels belong to a superpixel list in a specific color
column_period
Guess periodicity of data (image) column
display_image
Display an image (in a Jupyter notebook!)
image_compose
Compose an image from parts
image_corr
Correlate pixel values across two images
image_crop
Crop an image according to coordinates (or superpixel index)
image_dice
Compute DICE coefficient of two images
image_gradient
Compute image gradient (and components)
image_gray
Generate gray-scale version of image
image_mark_border
Mark border pixels of image with encoded content (string, bytes)
image_mark_pixel
Mark pixel in image border
image_mark_work
Mark set of pixels (word) in image border
image_mix
Mix two (RGB or gray) image, with either max or blending
image_overlay
Mix an RGB image with a heatmap overlay (resampled)
image_read_border
Read encoded image border
image_register
Perform rigid-body alignment of images based on gradient
image_resample
Cheap (!) resampling of an image
image_rotate
Rotate an image (ndarray)
lut_lookup
Color lookup from a table (LUT)
segmentation_outline
Extract outline from a segmentation mask image
superpixel_dice
Compute DICE coefficient for superpixel lists
superpixel_neighbors
Generate neighbors lists for each superpixel in an image
superpixel_outlines
Extract superpixel (outline) shapes from superpixel map
superpixel_values
Return the values of a superpixel
write_image
Write an image to file or buffer (bytes)
"""
# specific version for file
__version__ = '0.4.11'
# imports (needed for majority of functions)
from typing import Any, List, Optional, Tuple, Union
import warnings
import numpy
from .vars import ISIC_DICE_SHAPE, ISIC_FUNC_PPI, ISIC_IMAGE_DISPLAY_SIZE_MAX
# color superpixels in an image
def color_superpixels(
image:Union[numpy.ndarray, Tuple],
splst:Union[list, numpy.ndarray],
spmap:numpy.ndarray,
color:Union[list, numpy.ndarray],
alpha:Union[float, numpy.float, list, numpy.ndarray] = 1.0,
almap:numpy.ndarray = None,
spval:Union[float, numpy.float, list, numpy.ndarray, None] = None,
copy_image:bool = False) -> numpy.ndarray:
"""
Paint the pixels belong to a superpixel list in a specific color.
Parameters
----------
image : numpy.ndarray or 2- or 3-element Tuple with image size
Image to be colored, if shape tuple, will be all 0 (black)
splst : list or flat numpy.ndarray
List of superpixels to color in the image
spmap : numpy.ndarray
Mapping array from func.superpixels_map(...)
color : either a list or numpy.ndarray
RGB Color code or list of codes to use to color superpixels
alpha : either float or numpy.float value or None
Alpha (opacity) value between 0.0 and 1.0, if None, set to 1.0
spval : optional numpy.ndarray
Per-pixel opacity value (e.g. confidence, etc.)
copy_image : bool
Copy the input image prior to painting, default: False
Returns
-------
image : numpy.ndarray
Image with superpixels painted
"""
# check inputs
if isinstance(image, tuple):
if len(image) == 2 and (isinstance(image[0], int) and
isinstance(image[1], int)):
im_shape = image
image = numpy.zeros(image[0] * image[1], dtype=numpy.uint8)
elif len(image) == 3 and (isinstance(image[0], int) and
isinstance(image[1], int) and isinstance(image[2], int) and
(image[2] == 1 or image[2] == 3)):
im_shape = image
image = numpy.zeros(image[0] * image[1] * image[2],
dtype=numpy.uint8).reshape((image[0] * image[1], image[2]))
else:
raise ValueError('Invalid image shape.')
copy_image = False
else:
im_shape = image.shape
num_cols = im_shape[1]
has_almap = False
if not almap is None:
if almap.size != (im_shape[0] * im_shape[1]):
raise ValueError('Invalid alpha map.')
has_almap = True
am_shape = almap.shape
try:
almap.shape = (almap.size,)
except:
raise
if copy_image:
image = numpy.copy(image)
if len(im_shape) == 3 or im_shape[1] > 3:
planes = im_shape[2] if len(im_shape) == 3 else 1
else:
if len(im_shape) > 1:
planes = im_shape[1]
else:
planes = 1
image.shape = (im_shape[0] * im_shape[1], planes)
has_alpha = False
if planes > 3:
planes = 3
has_alpha = True
numsp = len(splst)
if spval is None:
spval = numpy.ones(numsp, dtype=numpy.float32)
elif isinstance(spval, float) or isinstance(spval, numpy.float):
spval = spval * numpy.ones(numsp, dtype=numpy.float32)
elif len(spval) != numsp:
spval = numpy.ones(numsp, dtype=numpy.float32)
if len(color) == 3 and isinstance(color[0], int):
color = [color] * numsp
if alpha is None:
alpha = 1.0
if isinstance(alpha, float):
alpha = [alpha] * numsp
if isinstance(alpha, list):
if len(alpha) != numsp:
raise ValueError('alpha list must match number of superpixels')
sp_skip = 6.0 * numpy.trunc(0.75 + 0.25 * numpy.sqrt([
im_shape[0] * im_shape[1] / spmap.shape[0]]))[0]
# for each superpixel (index)
for idx in range(numsp):
# get pixel indices, compute inverse alpha, and then set pixel values
spcol = color[idx]
singlecol = False
num_colors = 1
if isinstance(spcol, list):
if isinstance(spcol[0], int):
singlecol = True
else:
num_colors = len(spcol)
elif isinstance(spcol, numpy.ndarray):
if spcol.size == 3:
singlecol = True
else:
num_colors = spcol.shape[0]
if num_colors > 6:
num_colors = 6
spalpha = alpha[idx]
if isinstance(spalpha, float) and not singlecol:
spalpha = [spalpha] * num_colors
spidx = splst[idx]
spnum = spmap[spidx, -1]
sppidx = spmap[spidx, 0:spnum]
if singlecol:
spalpha = spalpha * spval[idx]
spinv_alpha = 1.0 - spalpha
for p in range(planes):
if spalpha == 1.0:
image[sppidx, p] = spcol[p]
else:
image[sppidx, p] = numpy.round(
spalpha * spcol[p] + spinv_alpha * image[sppidx, p])
if has_alpha:
image[sppidx, 3] = numpy.round(255.0 * 1.0 -
(1.0 - 255.0 * image[sppidx, 3]) *
(1.0 - 255.0 * spalpha))
elif has_almap:
almap[sppidx] = 1.0 - (1.0 - almap[sppidx]) * spinv_alpha
else:
sppval = spval[idx]
if not (isinstance(sppval, list) or isinstance(sppval, numpy.ndarray)):
sppval = [sppval] * num_colors
elif len(sppval) < num_colors:
sppval = [sppval[0]] * num_colors
sppidxx = sppidx % num_cols
sppidxy = sppidx // num_cols
float_num = float(num_colors)
spcidx = numpy.trunc(0.5 + (sppidxx + sppidxy).astype(numpy.float) *
(float_num / sp_skip)).astype(numpy.int32) % num_colors
for cc in range(num_colors):
spcsel = spcidx == cc
spcidxxy = sppidxx[spcsel] + sppidxy[spcsel] * num_cols
spccol = spcol[cc]
spcalpha = spalpha[cc] * sppval[cc]
spinv_alpha = 1.0 - spcalpha
for p in range(planes):
if spcalpha == 1.0:
image[spcidxxy, p] = spccol[p]
else:
image[spcidxxy, p] = numpy.round(
spcalpha * spccol[p] + spinv_alpha * image[spcidxxy, p])
if has_alpha:
image[spcidxxy, 3] = numpy.round(255.0 * 1.0 -
(1.0 - 255.0 * image[spcidxxy, 3]) *
(1.0 - 255.0 * spcalpha))
elif has_almap:
almap[spcidxxy] = 1.0 - (1.0 - almap[spcidxxy]) * spinv_alpha
image.shape = im_shape
if has_almap:
almap.shape = am_shape
return image
# column period
def column_period(c:numpy.ndarray, thresh:int=0):
"""
Guess the periodicity of a column of (image) data
Parameters
----------
c : ndarray
Column of data (e.g. pixel values)
thresh : int
Optional threshold (default: 0)
Returns
-------
p : int (or float)
Guessed periodicity
"""
cc = numpy.zeros(c.size//2)
for ck in range(1, cc.size):
cc[ck] = numpy.corrcoef(c[:-ck],c[ck:])[0,1]
cc[numpy.isnan(cc)] = 0.0
ccc = numpy.zeros(cc.size//2)
for ck in range(3, ccc.size):
ccc[ck-1] = numpy.corrcoef(cc[1:-ck], cc[ck:-1])[0,1]
ccc[numpy.isnan(ccc)] = -1.0
ccs = numpy.argsort(-ccc)
ccsv = numpy.median(ccc[ccs[0:3]]) * 0.816
ccsl = numpy.sort(ccs[ccc[ccs]>=ccsv])
while thresh > 0 and len(ccsl) > 1 and ccsl[0] < thresh:
ccsl = ccsl[1:]
if len(ccsl) == 1:
return ccsl[0]
while len(ccsl) > 3 and ccsl[0] < ccsl[1] // 3:
ccsl = ccsl[1:]
ccsy = ccsl[-1]
ccsx = ccsl[0]
ccsr = ccsy % ccsx
if ccsr == 0:
return ccsx
if ccsx - ccsr < (ccsx // 4):
ccsr = ccsx - ccsr
if ccsr < (ccsx // 4) and ccsx >= 6 and len(ccsl) > 3:
ccst = ccsl.astype(numpy.float64) / float(ccsx)
ccsi = numpy.trunc(ccst + 0.5)
ccsd = float(ccsx) * (ccst - ccsi)
ccsx = float(ccsx) + numpy.sum(ccsd) / numpy.sum(ccsi)
return ccsx
while ccsy % ccsx != 0:
(ccsy, ccsx) = (ccsx, ccsy % ccsx)
return ccsx
# display image
def display_image(
image_data:Union[bytes, str, numpy.ndarray],
image_shape:Tuple = None,
max_size:int = ISIC_IMAGE_DISPLAY_SIZE_MAX,
library:str = 'matplotlib',
ipython_as_object:bool = False,
mpl_axes:object = None,
**kwargs,
) -> Optional[object]:
"""
Display image in a Jupyter notebook; supports filenames, bytes, arrays
Parameters
----------
image_data : bytes, str, ndarray/imageio Array
Image specification (file data, filename, or image array)
image_shape : tuple
Image shape (necessary if flattened array!)
max_size : int
Desired maximum output size on screen
library : str
Either 'matplotlib' (default) or 'ipython'
mpl_axes : object
Optional existing matplotlib axes object
No returns
"""
# IMPORT DONE HERE TO SAVE TIME AT MODULE INIT
import imageio
# check inputs
if image_data is None:
return
if not isinstance(library, str):
raise ValueError('Invalid library selection.')
library = library.lower()
if not library in ['ipython', 'matplotlib']:
raise ValueError('Invalid library selection.')
if (isinstance(image_data, numpy.ndarray) or
isinstance(image_data, imageio.core.util.Array)):
if library == 'ipython':
try:
image_data = write_image(image_data, 'buffer', 'jpg')
except:
raise
elif isinstance(image_data, str) and (len(image_data) < 256):
try:
with open(image_data, 'rb') as image_file:
image_data = image_file.read()
except:
raise
if library == 'matplotlib' and isinstance(image_data, bytes):
try:
image_data = imageio.imread(image_data)
except:
raise
if not isinstance(max_size, int) or (max_size < 32) or (max_size > 5120):
max_size = ISIC_IMAGE_DISPLAY_SIZE_MAX
if image_shape is None:
try:
if library == 'ipython':
image_array = imageio.imread(image_data)
image_shape = image_array.shape
else:
image_shape = image_data.shape
except:
raise
image_height = image_shape[0]
image_width = image_shape[1]
image_max_xy = max(image_width, image_height)
shrink_factor = max(1.0, image_max_xy / max_size)
image_width = int(image_width / shrink_factor)
image_height = int(image_height / shrink_factor)
# depending on library call appropriate function
if library == 'ipython':
# IMPORT DONE HERE TO SAVE TIME BETWEEN LIBRARY CHOICES
from ipywidgets import Image as ipy_Image
from IPython.display import display as ipy_display
try:
image_out = ipy_Image(value=image_data,
width=image_width, height=image_height)
if not ipython_as_object:
ipy_display(image_out)
return None
return image_out
except Exception as e:
warnings.warn('Problem producing image for display: ' + str(e))
return None
else:
# IMPORT DONE HERE TO SAVE TIME BETWEEN LIBRARY CHOICES
import matplotlib
import matplotlib.pyplot as mpl_pyplot
try:
display_width = image_width / ISIC_FUNC_PPI
display_height = image_height / ISIC_FUNC_PPI
if mpl_axes is None:
if 'figsize' in kwargs:
mpl_pyplot.figure(figsize=kwargs['figsize'])
else:
mpl_pyplot.figure(figsize=(display_width, display_height))
ax_img = mpl_pyplot.imshow(image_data,
interpolation='hanning')
ax_img.axes.set_axis_off()
mpl_pyplot.show()
else:
mpl_axes.imshow(image_data)
except Exception as e:
warnings.warn('Problem producing image for display: ' + str(e))
return None
# image center ([y,x coord] * 0.5)
def image_center(image:numpy.ndarray) -> numpy.ndarray:
try:
imsh = image.shape
return 0.5 * numpy.asarray([imsh[0], imsh[1]]).astype(numpy.float64)
except:
raise
# image composition (from other images)
def image_compose(
imlist:list,
outsize:Tuple,
bgcolor:list = [255,255,255],
) -> numpy.ndarray:
"""
Compose image from parts
Parameters
----------
imlist : list
List of image parts, each element a 3-element list with
image (ndarray), x- and y-position in the output image
outsize : Tuple
Size of output image
bgcolor : list
3-element list, default: [255, 255, 255] (white)
Returns
-------
out_image : ndarray
Output image composed of input images
"""
if not isinstance(outsize, tuple) and not isinstance(outsize, list):
raise ValueError('Invalid outsize parameter.')
if (len(outsize) != 2 or not isinstance(outsize[0], int) or
not isinstance(outsize[1], int) or outsize[0] < 1 or
outsize[1] < 1 or (outsize[0] * outsize[2] > 16777216)):
raise ValueError('Invalid image dimensions in outsize parameter.')
# generate output
out = numpy.zeros(3 * outsize[0] * outsize[1], dtype=numpy.uint8).reshape(
(outsize[1], outsize[0], 3,))
im_shape = out.shape
# set background color
if (isinstance(bgcolor, tuple) or isinstance(bgcolor, list)) and len(bgcolor) == 3:
try:
out[:,:,0] = bgcolor[0]
except:
pass
try:
out[:,:,1] = bgcolor[1]
except:
pass
try:
out[:,:,2] = bgcolor[2]
except:
pass
# iterare over particles
for ii in imlist:
# if not a minimally formatted list
if not isinstance(ii, list) or len(ii) < 3:
continue
# get image and inupt shape, check dims
ii_image = ii[0]
ii_shape = ii_image.shape
if len(ii_shape) < 2 or len(ii_shape) > 3:
continue
elif len(ii_shape) == 3 and not ii_shape[2] in [1, 3]:
continue
# get target position (top left)
ii_x = ii[1]
ii_y = ii[2]
if ii_x >= im_shape[1] or ii_y >= im_shape[0]:
continue
# and process alpha
if len(ii) == 3:
ii_alpha = 1.0
else:
ii_alpha = ii[3]
if not (isinstance(ii_alpha, float) or isinstance(ii_alpha, numpy.ndarray)):
continue
if isinstance(ii_alpha, float):
if ii_alpha <= 0.0:
continue
if ii_alpha > 1.0:
ii_alpha = 1.0
else:
if ii_alpha.ndim != 2:
continue
if ii_alpha.shape[0] != im_shape[0] or ii_alpha.shape[1] != im_shape[1]:
continue
ii_alpha[ii_alpha < 0.0] = 0.0
ii_alpha[ii_alpha > 1.0] = 1.0
# resizing of image
if len(ii) > 5 and ((isinstance(ii[4], int) and isinstance(ii[5], int)) or
(isinstance(ii[4], float) and isinstance(ii[5], float))):
from .sampler import Sampler
s = Sampler()
if isinstance(ii_alpha, numpy.ndarray):
ii_alpha = s.sample_grid(ii_alpha, ii[4:6], 'linear')
if len(ii) > 6 and isinstance(ii[6], str):
ikern = ii[6]
else:
ikern = 'cubic'
ii_image = s.sample_grid(ii_image, ii[4:6], ikern)
im_shape = ii_image.shape
# check arguments for compatibility
if not (isinstance(ii_image, numpy.ndarray) and
isinstance(ii_x, int) and isinstance(ii_y, int) and
(isinstance(ii_alpha, float) or (
isinstance(ii_alpha, numpy.ndarray) and
ii_alpha.ndim == 2 and ii_alpha.shape[0] == ii_image.shape[0]))):
continue
sfrom_x = 0
sfrom_y = 0
sto_x = ii_shape[1]
sto_y = ii_shape[0]
tfrom_x = ii_x
tfrom_y = ii_y
if tfrom_x < 0:
sfrom_x -= tfrom_x
tfrom_x = 0
if tfrom_y < 0:
sfrom_y -= tfrom_y
tfrom_y = 0
from_x = sto_x - sfrom_x
from_y = sto_y - sfrom_y
if from_x <= 0 or from_y <= 0:
continue
tto_x = tfrom_x + from_x
tto_y = tfrom_y + from_y
if tto_x > im_shape[1]:
shrink = tto_x - im_shape[1]
tto_x -= shrink
sto_x -= shrink
if tto_y > im_shape[0]:
shrink = tto_y - im_shape[0]
tto_y -= shrink
sto_y -= shrink
if tto_x <= tfrom_x or tto_y <= tfrom_y:
continue
if len(ii_shape) == 2:
if sfrom_x == 0 and sfrom_y == 0 and sto_x == ii_shape[1] and sto_y == ii_shape[0]:
out[tfrom_y:tto_y, tfrom_x:tto_x, :] = image_mix(
out[tfrom_y:tto_y, tfrom_x:tto_x, :], ii_image, ii_alpha)
else:
out[tfrom_y:tto_y, tfrom_x:tto_x, :] = image_mix(
out[tfrom_y:tto_y, tfrom_x:tto_x, :],
ii_image[sfrom_y:sto_y, sfrom_x:sto_x], ii_alpha)
else:
if sfrom_x == 0 and sfrom_y == 0 and sto_x == ii_shape[1] and sto_y == ii_shape[0]:
out[tfrom_y:tto_y, tfrom_x:tto_x, :] = image_mix(
out[tfrom_y:tto_y, tfrom_x:tto_x, :], ii_image, ii_alpha)
else:
out[tfrom_y:tto_y, tfrom_x:tto_x, :] = image_mix(
out[tfrom_y:tto_y, tfrom_x:tto_x, :],
ii_image[sfrom_y:sto_y, sfrom_x:sto_x, :], ii_alpha)
return out
# image correlation (pixel values)
def image_corr(
im1:numpy.ndarray,
im2:numpy.ndarray,
immask:numpy.ndarray = None,
) -> float:
"""
Correlate pixel values for two images
Parameters
----------
im1, im2 : ndarray
Image arrays (of same size!)
immask : ndarray
Optional masking array (in which case only over those pixels)
Returns
-------
ic : float
Correlation coefficient
"""
if im1.size != im2.size:
raise ValueError('Images must match in size.')
if immask is None:
cc = numpy.corrcoef(im1.reshape(im1.size), im2.reshape(im2.size))
else:
if immask.size != im1.size:
immask = image_resample(numpy.uint8(255) * immask.astype(numpy.uint8),
(im1.shape[0], im1.shape[1])) >= 128
if immask.dtype != numpy.bool:
immask = (immask > 0)
cc = numpy.corrcoef(im1[immask], im2[immask])
return cc[0,1]
# crop image
def image_crop(
image:numpy.ndarray,
cropping:Any,
padding:int = 0,
masking:str = None,
spmap:numpy.ndarray = None,
spnei:List = None,
spnei_degree:int = 1,
) -> numpy.ndarray:
"""
Crops an image to a rectangular region of interest.
Parameters
----------
image : ndarray
Image (2D or 2D-3) array
cropping : Any
Cropping selection, either of
- [y0, x0, y1, x1] rectangle (y1/x1 non inclusive)
- int(S), superpixel index, requires spmap!
padding : int
Additional padding around cropping in pixels
masking : str
Masking operation, if requested, either of
'smoothnei' - smooth the neighboring region
spmap : ndarray
Superpixel mapping array
spnei : list
Superpixel (list of) list(s) of neighbors
spnei_degree : int
How many degrees of neighbors to include (default: 1)
"""
im_shape = image.shape
if not isinstance(padding, int) or padding < 0:
padding = 0
if isinstance(cropping, list) and len(cropping) == 4:
y0 = max(0, cropping[0]-padding)
x0 = max(0, cropping[1]-padding)
y1 = min(im_shape[0], cropping[2]+padding)
x1 = min(im_shape[1], cropping[2]+padding)
elif isinstance(cropping, int) and cropping >= 0:
if spmap is None or not isinstance(spmap, numpy.ndarray):
raise ValueError('Missing spmap parameter.')
spidx = cropping
sppix = spmap[spidx,:spmap[spidx,-1]]
sppiy = sppix // im_shape[1]
sppix = sppix % im_shape[1]
y0 = max(0, numpy.amin(sppiy)-padding)
x0 = max(0, numpy.amin(sppix)-padding)
y1 = min(im_shape[0], numpy.amax(sppiy)+padding)
x1 = min(im_shape[1], numpy.amax(sppix)+padding)
yd = y1 - y0
xd = x1 - x0
dd = (yd + xd) // 2
if isinstance(spnei, list):
if len(spnei) > 8:
spnei = [spnei]
if not isinstance(spnei_degree, int) or spnei_degree < 1:
spnei_degree = 0
elif spnei_degree > len(spnei):
spnei_degree = len(spnei) - 1
else:
spnei_degree -= 1
spnei = spnei[spnei_degree]
try:
nei = spnei[spidx]
for n in nei:
sppix = spmap[n,:spmap[n,-1]]
sppiy = sppix // im_shape[1]
sppix = sppix % im_shape[1]
y0 = min(y0, max(0, numpy.amin(sppiy)-padding))
x0 = min(x0, max(0, numpy.amin(sppix)-padding))
y1 = max(y1, min(im_shape[0], numpy.amax(sppiy)+padding))
x1 = max(x1, min(im_shape[1], numpy.amax(sppix)+padding))
except:
raise
if isinstance(masking, str) and masking == 'smoothnei':
from .sampler import Sampler
s = Sampler()
yd = y1 - y0
xd = x1 - x0
try:
if len(im_shape) > 2:
ci = image[y0:y1,x0:x1,:]
else:
ci = image[y0:y1,x0:x1]
cim = numpy.zeros(yd * xd).reshape((yd,xd,))
cim[yd//2, xd//2] = 1.0
cims = s.sample_grid(cim, 1.0, 'gauss' + str(dd))
cims /= numpy.amax(cims)
cis = image_smooth_fft(ci, float(dd))
return image_mix(cis, ci, cims)
except:
raise
if len(im_shape) > 2:
return image[y0:y1,x0:x1,:]
else:
return image[y0:y1,x0:x1]
# Dice coeffient
def image_dice(
im1:numpy.ndarray,
im2:numpy.ndarray,
immask:numpy.ndarray = None) -> float:
"""
Compute DICE coefficient between two (binary mask) images
Parameters
----------
im1, im2 : ndarray
Two ndarray images of the same size
immask : ndarray
Optional mask that is applied, DICE within mask only
Returns
-------
dice : float
DICE coefficient
"""
if im1.shape != im2.shape:
if len(im1.shape) > 2:
if im1.shape[2] != 1:
raise ValueError('Image cannot have more than 1 plane.')
if len(im2.shape) > 2:
if im2.shape[2] != 1:
raise ValueError('Image cannot have more than 1 plane.')
if (im1.shape[0], im1.shape[1]) != ISIC_DICE_SHAPE:
im1 = image_resample(im1, ISIC_DICE_SHAPE)
if (im2.shape[0], im2.shape[1]) != ISIC_DICE_SHAPE:
im2 = image_resample(im2, ISIC_DICE_SHAPE)
if immask is None:
im1 = (im1.reshape(im1.size) > 0)
im2 = (im2.reshape(im2.size) > 0)
else:
if immask.size != im1.size:
immask = image_resample(numpy.uint8(255) * immask.astype(numpy.uint8),
(im1.shape[0], im1.shape[1])) >= 128
im1 = (im1[immask] > 0)
im2 = (im2[immask] > 0)
s1 = numpy.sum(im1)
s2 = numpy.sum(im2)
return 2 * numpy.sum(numpy.logical_and(im1, im2)) / (s1 + s2)
# Extended Dice coeffient
def image_dice_ext(
im1:numpy.ndarray,
val1:numpy.ndarray,
im2:numpy.ndarray,
val2:numpy.ndarray) -> float:
"""
Compute extended DICE coefficient between two (binary+value) images
Parameters
----------
im1 : ndarray
First image (ndarray, must be boolean)
val1 : ndarray
Values for first image
im2 : ndarray
Second image (ndarray, must be boolean)
val2 : ndarray
Values for second image
Returns
-------
xdice : float
Extended DICE coefficient
"""
if not (im1.shape == im2.shape == val1.shape == val2.shape):
raise ValueError('Images mismatch in shape.')
if len(im1.shape) > 2:
raise ValueError('Images must be single-plane.')
if im1.dtype != numpy.bool:
im1 = im1 > 0
if im2.dtype != numpy.bool:
im2 = im2 > 0
s1 = numpy.sum(im1)
s2 = numpy.sum(im2)
return (numpy.sum(val1[im2]) + numpy.sum(val2[im1])) / (s1 + s2)
# Smoothed Dice coeffient
def image_dice_fwhm(
im1:numpy.ndarray,
im2:numpy.ndarray,
fwhm:float) -> float:
"""
Compute smoothed-DICE coefficient between two (binary mask) images
Parameters
----------
im1, im2 : ndarray
Two ndarray images of the same size
fwhm : float
Smoothing kernel size
Returns
-------
xdice : float
Extended DICE coefficient
"""
if im1.shape != im2.shape:
raise ValueError('Images mismatch in shape.')
if len(im1.shape) > 2:
raise ValueError('Images must be single-plane.')
if im1.dtype != numpy.bool:
im1 = im1 > 0
if im2.dtype != numpy.bool:
im2 = im2 > 0
sim1 = image_smooth_scale(im1, fwhm)
sim2 = image_smooth_scale(im2, fwhm)
return image_dice_ext(im1, sim1, im2, sim2)
# image distance average
def image_dist_average(source:numpy.ndarray, target:numpy.ndarray) -> float:
"""
Compute average distance between each foreground in source to target
Parameters
----------
source, target : numpy.ndarray
Boolean images (will be made boolean if necessary)
Returns
-------
dist : float
Average distance of source to target
"""
# IMPORT DONE HERE TO SAVE TIME AT MODULE INIT
import scipy.ndimage as ndimage
if len(source.shape) > 2 or len(target.shape) > 2:
raise ValueError('Images must be 2D.')
if source.shape != target.shape:
raise ValueError('Images must match in shape.')
if source.dtype != numpy.bool:
source = source > 0
if target.dtype != numpy.bool:
target = target > 0
dist_to_target = ndimage.morphology.distance_transform_edt(numpy.logical_not(target))
return numpy.mean(dist_to_target[source])
# image gradient
def image_gradient(image:numpy.ndarray):
"""
Compute image gradient (and components)
Parameters
----------
image : ndarray
Image from which the gradient is computed
Returns
-------
gradient : tuple
Magnitude, and per-dimension components
"""
# IMPORT DONE HERE TO SAVE TIME AT MODULE INIT
from . import sampler
s = sampler.Sampler()
zsk = s._kernels['cubic']
ishape = image.shape
if len(ishape) > 2:
image = image_gray(image)[:,:,0]
s0 = numpy.arange(0.0, float(ishape[0]), 1.0).astype(numpy.float64)
s1 = numpy.arange(0.0, float(ishape[1]), 1.0).astype(numpy.float64)
(c1, c0) = numpy.meshgrid(s1, s0)
c0.shape = (c0.size,1,)
c1.shape = (c1.size,1,)
c01 = numpy.concatenate((c0,c1), axis=1)
step = (1.0 / 512.0)
dg0 = sampler._sample_grid_coords(
image, c01 + step * numpy.asarray([1.0,1.0]), zsk[0], zsk[1])
dg1 = dg0.copy()
cxy = sampler._sample_grid_coords(
image, c01 + step * numpy.asarray([1.0,-1.0]), zsk[0], zsk[1])
dg0 += cxy
dg1 -= cxy
cxy = sampler._sample_grid_coords(
image, c01 + step * numpy.asarray([-1.0,1.0]), zsk[0], zsk[1])
dg0 -= cxy
dg1 += cxy
cxy = sampler._sample_grid_coords(
image, c01 + step * numpy.asarray([-1.0,-1.0]), zsk[0], zsk[1])
dg0 -= cxy
dg1 -= cxy
dg0 *= 128.0
dg1 *= 128.0
dg0.shape = ((ishape[0], ishape[1],))
dg1.shape = ((ishape[0], ishape[1],))
return (numpy.sqrt(dg0 * dg0 + dg1 * dg1), dg0, dg1)
# image in gray
def image_gray(
image:numpy.ndarray,
rgb_format:bool = True,
conv_type:str = 'desaturate',
) -> numpy.ndarray:
"""
Convert RGB (color) image into gray-scale image
Parameters
----------
image : ndarray
RGB (3-plane) image ndarray
rgb_format : bool
If True (default) return a 3-plane image of equal component values
conv_type : str
either 'average', 'desaturate' (default), or 'luma'
Returns
-------
gray : ndarray
Gray-scale image ndarray
"""
im_shape = image.shape
if len(im_shape) < 3:
if rgb_format:
if image.dtype != numpy.uint8:
image = numpy.trunc(255.0 * image).astype(numpy.uint8)
return image.reshape((im_shape[0], im_shape[1], 1,)).repeat(3, axis=2)
return image
p = image[:, :, 0].astype(numpy.float)
if not conv_type or not isinstance(conv_type, str) or not conv_type[0].lower() in 'al':
pmin = p
pmax = p
for pc in range(1, min(3, im_shape[2])):
pmin = numpy.minimum(pmin, image[:, :, pc].astype(numpy.float))
pmax = numpy.maximum(pmin, image[:, :, pc].astype(numpy.float))
p = (pmin + pmax) / 2.0
elif conv_type[0] in 'aA':
for pc in range(1, min(3, im_shape[2])):
p += image[:, :, pc].astype(numpy.float)
p /= numpy.float(min(3, im_shape[2]))
else:
if im_shape[2] == 2:
p = (1.0/3.0) * p + (2.0/3.0) * image[:, :, 1]
elif im_shape[2] > 2:
p = 0.299 * p + 0.587 * image[:, :, 1] + 0.114 * image[:, :, 2]
if rgb_format:
if image.dtype != numpy.uint8:
p = numpy.trunc(255.0 * p).astype(numpy.uint8)
return p.astype(numpy.uint8).reshape(
(im_shape[0], im_shape[1], 1,)).repeat(3, axis=2)
return p.astype(image.dtype)
# HSL based histograms
def image_hslhist(
image:numpy.ndarray,
resize:int = 512,
bins:int = 64,
binsamples:int = 8,
hmin:float = 0.0,
hmax:float = 1.0,
smin:float = 0.0,
smax:float = 1.0,
lmin:float = 0.0,
lmax:float = 1.0,
mask:numpy.ndarray = None,
mask_cradius:float = 0.875,
) -> tuple:
# IMPORT DONE HERE TO SAVE TIME DURING IMPORT
from .sampler import Sampler
s = Sampler()
if len(image.shape) != 3 or image.shape[2] != 3:
raise ValueError('Invalid image. Must be RGB.')
if binsamples > bins or binsamples < 2:
raise ValueError('Invalid bin sampling.')
if image.dtype == numpy.uint8:
image = (1.0 / 255.0) * image.astype(numpy.float64)
if not resize is None and resize > 0:
image = s.sample_grid(image, [resize, resize])
hslimage = rgb2hslv(image[:,:,0], image[:,:,1], image[:,:,2])
if mask is None or len(mask.shape) != 2 or mask.shape != image.shape[:2]:
cx = 0.5 * float(image.shape[0] - 1)
cy = 0.5 * float(image.shape[1] - 1)
maskx, masky = numpy.meshgrid(numpy.arange(-1.0, 1.0+0.5/cx, 1.0/cx),
numpy.arange(-1.0, 1.0+0.5/cy, 1.0/cy))
mask = (maskx * maskx + masky * masky) <= 1.0
hs = numpy.histogram2d(hslimage[0][mask], hslimage[1][mask], bins=bins,
range=[[hmin, hmax], [smin, smax]])
hl = numpy.histogram2d(hslimage[0][mask], hslimage[2][mask], bins=bins,
range=[[hmin, hmax], [lmin, lmax]])
sl = numpy.histogram2d(hslimage[1][mask], hslimage[2][mask], bins=bins,
range=[[smin, smax], [lmin, lmax]])
if binsamples < bins:
ssize = float(bins) / float(binsamples)
sc = numpy.round(numpy.arange(0.5 * ssize, float(bins), ssize)).astype(numpy.int32)
hs = image_smooth_fft(hs[0], 1.0 / float(binsamples))[:,sc][sc,:]
hl = image_smooth_fft(hl[0], 1.0 / float(binsamples))[:,sc][sc,:]
sl = image_smooth_fft(sl[0], 1.0 / float(binsamples))[:,sc][sc,:]
else:
hs = hs[0]
hl = hl[0]
sl = sl[0]
return (hs, hl, sl)
# mark border of an image with "content"
def image_mark_border(
image:numpy.ndarray,
content:Union[str,bytes],
color_diff:int = 40,
ecc_redundancy_level:float = 0.75,
pix_width:int = 3,
border_expand:bool = True,
border_color:list = [128,128,128],
) -> numpy.ndarray:
"""
Mark image border with content (encoded)
Parameters
----------
image : ndarray
RGB or grayscale (uint8) image array
content : str or bytes array
Content to be encoded into the image border, if too long for
selected scheme, warning will be printed and fitting scheme
selected, if possible (max length=1023 bytes)
color_diff : int
Amount by which pixel brightness will differ to signify 0 and 1
ecc_redundancy_level : float
Amount of payload bytes that can be missing/damaged
pix_width:int
Number of pixels (in each dimension) to use for a marker
border_expand : bool
If True (default) expand border by number of pixels
Returns
-------
marked : ndarray
Image with content encoded into border
"""
# IMPORT DONE HERE TO SAVE TIME DURING MODULE INIT
from .reedsolo import RSCodec
from .sampler import Sampler
# get some numbers, encode message, copy image
if not isinstance(content, str) and not isinstance(content, bytes):
raise ValueError('Invalid content (type).')
if not isinstance(color_diff, int) or color_diff < 32:
color_diff = 32
if not isinstance(pix_width, int) or pix_width < 1:
raise ValueError('Invalid pix_width parameter.')
im_shape = image.shape
im_rgb = (len(im_shape) > 2 and im_shape[2] > 2)
im_y = im_shape[0]
im_x = im_shape[1]
if border_expand:
if im_rgb:
marked = numpy.zeros(
(im_y + 2 * pix_width, im_x + 2 * pix_width, im_shape[2],),
dtype=numpy.uint8)
marked[0:pix_width,pix_width:im_x+pix_width,:] = image[:pix_width,:,:]
marked[pix_width:im_y+pix_width,0:pix_width,:] = image[:,:pix_width,:]
marked[pix_width:im_y+pix_width,pix_width:im_x+pix_width,:] = image
marked[im_y+pix_width:,pix_width:im_x+pix_width,:] = image[-pix_width:,:,:]
marked[pix_width:im_y+pix_width,im_x+pix_width:,:] = image[:,-pix_width:,:]
marked[:pix_width,:pix_width,:] = numpy.trunc(0.5 * (
marked[:pix_width,pix_width:pix_width+pix_width,:].astype(numpy.float32) +
marked[pix_width:pix_width+pix_width,:pix_width,:].astype(numpy.float32)))
marked[-pix_width:,:pix_width,:] = numpy.trunc(0.5 * (
marked[-2*pix_width:-pix_width,:pix_width,:].astype(numpy.float32) +
marked[-pix_width:,pix_width:pix_width+pix_width,:].astype(numpy.float32)))
marked[:pix_width,-pix_width:,:] = numpy.trunc(0.5 * (
marked[:pix_width,-2*pix_width:-pix_width,:].astype(numpy.float32) +
marked[pix_width:pix_width+pix_width,-pix_width:,:].astype(numpy.float32)))
marked[-pix_width:,-pix_width:,:] = numpy.trunc(0.5 * (
marked[-2*pix_width:-pix_width,-pix_width:,:].astype(numpy.float32) +
marked[-pix_width:,-2*pix_width:-pix_width,:].astype(numpy.float32)))
else:
marked[0:pix_width,pix_width:im_x+pix_width] = image[:pix_width,:]
marked[pix_width:im_y+pix_width,0:pix_width] = image[:,:pix_width]
marked[pix_width:im_y+pix_width,pix_width:im_x+pix_width] = image
marked[im_y+pix_width:,pix_width:im_x+pix_width] = image[-pix_width:,:]
marked[pix_width:im_y+pix_width,im_x+pix_width:] = image[:,-pix_width:]
marked[:pix_width,:pix_width] = numpy.trunc(0.5 * (
marked[:pix_width,pix_width:pix_width+pix_width].astype(numpy.float32) +
marked[pix_width:pix_width+pix_width,:pix_width].astype(numpy.float32)))
marked[-pix_width:,:pix_width] = numpy.trunc(0.5 * (
marked[-2*pix_width:-pix_width,:pix_width].astype(numpy.float32) +
marked[-pix_width:,pix_width:pix_width+pix_width].astype(numpy.float32)))
marked[:pix_width,-pix_width:] = numpy.trunc(0.5 * (
marked[:pix_width,-2*pix_width:-pix_width].astype(numpy.float32) +
marked[pix_width:pix_width+pix_width,-pix_width:].astype(numpy.float32)))
marked[-pix_width:,-pix_width:] = numpy.trunc(0.5 * (
marked[-2*pix_width:-pix_width,-pix_width:].astype(numpy.float32) +
marked[-pix_width:,-2*pix_width:-pix_width].astype(numpy.float32)))
im_shape = marked.shape
else:
marked = image.copy()
s = Sampler()
if im_rgb:
if isinstance(border_color,list) and len(border_color) == 3:
marked[0:pix_width,:,0] = border_color[0]
marked[0:pix_width,:,1] = border_color[1]
marked[0:pix_width,:,2] = border_color[2]
marked[:,0:pix_width,0] = border_color[0]
marked[:,0:pix_width,1] = border_color[1]
marked[:,0:pix_width,2] = border_color[2]
marked[-pix_width:,:,0] = border_color[0]
marked[-pix_width:,:,1] = border_color[1]
marked[-pix_width:,:,2] = border_color[2]
marked[:,-pix_width:,0] = border_color[0]
marked[:,-pix_width:,1] = border_color[1]
marked[:,-pix_width:,2] = border_color[2]
else:
marked[0:pix_width,:,:] = s.sample_grid(marked[0:pix_width,:,:],
[list(range(pix_width)), list(range(im_shape[1]))],
'gauss' + str(24 * pix_width), out_type='uint8')
marked[:,0:pix_width,:] = s.sample_grid(marked[:,0:pix_width,:],
[list(range(im_shape[0])), list(range(pix_width))],
'gauss' + str(24 * pix_width), out_type='uint8')
marked[-pix_width:,:,:] = s.sample_grid(marked[-pix_width:,:,:],
[list(range(pix_width)), list(range(im_shape[1]))],
'gauss' + str(24 * pix_width), out_type='uint8')
marked[:,-pix_width:,:] = s.sample_grid(marked[:,-pix_width:,:],
[list(range(im_shape[0])), list(range(pix_width))],
'gauss' + str(24 * pix_width), out_type='uint8')
else:
if isinstance(border_color, list) and len(border_color) == 1:
border_color = border_color[0]
if isinstance(border_color, int):
marked[0:pix_width,:] = border_color
marked[:,0:pix_width] = border_color
marked[-pix_width:,:] = border_color
marked[:,-pix_width:] = border_color
else:
marked[0:pix_width,:] = s.sample_grid(marked[0:pix_width,:],
[list(range(pix_width)), list(range(im_shape[1]))],
'gauss' + str(24 * pix_width), out_type='uint8')
marked[:,0:pix_width] = s.sample_grid(marked[:,0:pix_width],
[list(range(im_shape[0])), list(range(pix_width))],
'gauss' + str(24 * pix_width), out_type='uint8')
marked[-pix_width:,:] = s.sample_grid(marked[-pix_width:,:],
[list(range(pix_width)), list(range(im_shape[1]))],
'gauss' + str(24 * pix_width), out_type='uint8')
marked[:,-pix_width:] = s.sample_grid(marked[:,-pix_width:],
[list(range(im_shape[0])), list(range(pix_width))],
'gauss' + str(24 * pix_width), out_type='uint8')
im_y = im_shape[0] - 2 * pix_width
im_x = im_shape[1] - 2 * pix_width
num_wrd_y = min(255, im_y // (pix_width * 24))
num_wrd_x = min(255, im_x // (pix_width * 24))
capacity = 4 * (num_wrd_y + num_wrd_x - 8)
if isinstance(content, str):
content = content.encode('utf-8')
clen = len(content)
if clen > 1023:
raise ValueError('Content too long.')
slen = int(0.95 + float(clen) * 2.0 * ecc_redundancy_level)
mlen = clen + slen
if mlen <= 255:
cchunks = clen
nchunks = 1
else:
nchunks = 1 + (mlen - 1) // 255
cchunks = 1 + (clen - 1) // nchunks
slen = int(0.95 + float(cchunks) * 2.0 * ecc_redundancy_level)
if (cchunks + slen) > 255:
nchunks += 1
cchunks = 1 + (clen - 1) // nchunks
slen = int(0.95 + float(cchunks) * 2.0 * ecc_redundancy_level)
if nchunks > 64:
raise ValueError('ECC factor too high.')
r = RSCodec(slen)
echunks = cchunks + slen
b = r.encode_to_bits(content, cchunks)
if capacity < len(b):
raise ValueError('Content too long to encode.')
if len(b) < capacity:
while len(b) % echunks != 0:
b.extend([r.value_to_bits(257)])
b0 = b[:]
while len(b) < capacity:
b.extend(b0)
# mark image with side markers
boff = 4 * (nchunks - 1)
sm0 = r.value_to_bits(0 + boff)
sm1 = r.value_to_bits(1 + boff)
sm2 = r.value_to_bits(2 + boff)
sm3 = r.value_to_bits(3 + boff)
wm0 = r.value_to_bits(num_wrd_y)
wm1 = r.value_to_bits(num_wrd_x)
sm = [[sm0,wm0], [sm0,wm0], [sm1,wm1], [sm1,wm1],
[sm2,wm0], [sm2,wm0], [sm3,wm1], [sm3,wm1]]
for cidx in range(8):
sm[cidx].extend([r.value_to_bits(cchunks), r.value_to_bits(slen)])
nwyr = num_wrd_y - 4
nwxr = num_wrd_x - 4
nwyc = float(nwyr)
nwxc = float(nwxr)
nwy = 0.5 * nwxc
nwx = 0.5 * nwyc
lidx = 0
while nwyr > 0 or nwxr > 0:
if nwy <= nwx:
sm[0].append(b[lidx])
lidx += 1
sm[1].append(b[lidx])
lidx += 1
sm[4].append(b[lidx])
lidx += 1
sm[5].append(b[lidx])
lidx += 1
nwy += nwxc
nwyr -= 1
else:
sm[2].append(b[lidx])
lidx += 1
sm[3].append(b[lidx])
lidx += 1
sm[6].append(b[lidx])
lidx += 1
sm[7].append(b[lidx])
lidx += 1
nwx += nwyc
nwxr -= 1
image_mark_pixel(marked, 0, pix_width, 0, color_diff, False)
image_mark_pixel(marked, 0, pix_width, im_shape[0]-pix_width, color_diff, False)
image_mark_pixel(marked, 2, pix_width, 0, color_diff, False)
image_mark_pixel(marked, 2, pix_width, im_shape[0]-pix_width, color_diff, False)
for cidx in range(8):
side = cidx // 2
if (side % 2) == 0:
num_wrd = num_wrd_y
else:
num_wrd = num_wrd_x
for widx in range(num_wrd):
word = sm[cidx][widx]
if (cidx % 2) == 0:
wcrd = widx
else:
wcrd = num_wrd + widx
image_mark_word(marked, side, pix_width, num_wrd, wcrd, color_diff, word)
return marked
# mark pixel in image (color darker or brighter)
def image_mark_pixel(image, side, pix_width, pcrd, value, brighter):
"""
Mark one pixel within an image (with bit value)
Parameters
----------
image : ndarray
Image to be marked
side : int
Side of the image on which to mark a pixel (0 through 3)
pix_width : int
Width of a pixel
pcrd : int
Pixel coordinate
value : int
Value to add (or subtract) from the original pixel value
brighter : bool
Boolean, add (True) or subtract (False) from original value
Returns
-------
None
"""
shape = image.shape
it = 255 - value
darker = not brighter
if side == 0 or side == 2:
yf = pcrd
yt = pcrd + pix_width
if side == 0:
xf = 0
xt = pix_width
else:
xf = shape[1] - pix_width
xt = shape[1]
else:
xf = pcrd
xt = pcrd + pix_width
if side == 1:
yf = 0
yt = pix_width
else:
yf = shape[0] - pix_width
yt = shape[0]
v0 = value
if len(shape) > 2 and shape[2] == 3:
v2 = v1 = v0
m0 = numpy.mean(image[yf:yt,xf:xt,0])
m1 = numpy.mean(image[yf:yt,xf:xt,1])
m2 = numpy.mean(image[yf:yt,xf:xt,2])
if darker and m0 > it:
v0 += m0 - it
elif brighter and m0 < value:
v0 += value - m0
if darker and m1 > it:
v1 += m1 - it
elif brighter and m1 < value:
v1 += value - m1
if darker and m2 > it:
v2 += m2 - it
elif brighter and m2 < value:
v2 += value - m2
if darker:
(v0, v1, v2) = (-v0, -v1, -v2)
image[yf:yt,xf:xt,0] = numpy.maximum(0.0, numpy.minimum(255.0,
image[yf:yt,xf:xt,0].astype(numpy.float) + v0))
image[yf:yt,xf:xt,1] = numpy.maximum(0.0, numpy.minimum(255.0,
image[yf:yt,xf:xt,1].astype(numpy.float) + v1))
image[yf:yt,xf:xt,2] = numpy.maximum(0.0, numpy.minimum(255.0,
image[yf:yt,xf:xt,2].astype(numpy.float) + v2))
else:
m0 = numpy.mean(image[yf:yt,xf:xt])
if darker and m0 > it:
v0 += m0 - it
elif brighter and m0 < value:
v0 += value - m0
if darker:
v0 = -v0
image[yf:yt,xf:xt] = numpy.maximum(0.0, numpy.minimum(255.0,
image[yf:yt,xf:xt].astype(numpy.float) + v0))
# mark word (of size 10 "pixels") in image
def image_mark_word(image, side, pix_width, num_wrd, wcrd, value, word):
"""
Mark 10-bit (8-bit encoded) "word" in image border pixels
Parameters
----------
image : ndarray
Image to be marked
side : int
Side of the image on which to mark a pixel (0 through 3)
pix_width : int
Width of a pixel
num_wrd : int
Number of words on this side
wcrd : int
Which word among those to be marked
value : int
Value that is passed to image_mark_pixel
word : list
List of bits, passed as "brighter" parameter to image_mark_pixel
Returns
-------
None
"""
shape = image.shape
if side == 0 or side == 2:
slen = shape[0]
else:
slen = shape[1]
if wcrd < num_wrd:
scrd = pix_width * (1 + 12 * wcrd)
pix_add = pix_width
else:
scrd = slen - pix_width * (2 + 12 * (wcrd - num_wrd))
pix_add = -pix_width
for i in range(10):
image_mark_pixel(image, side, pix_width, scrd, value, word[i] > 0)
scrd += pix_add
image_mark_pixel(image, side, pix_width, scrd, value*2, False)
scrd += pix_add
image_mark_pixel(image, side, pix_width, scrd, value*2, True)
# match images in properties
def image_match(
source_image:numpy.ndarray,
target_image:numpy.ndarray,
match_mask:numpy.ndarray = None,
match_contrast:bool = True,
match_hue:bool = True,
match_saturation:bool = True,
match_mean:bool = True,
match_std:bool = True,
gray_conv_type:str = 'desaturate',
) -> numpy.ndarray:
"""
Match two images on contrast, hue, and saturation
Parameters
----------
source_image, target_image : ndarray (must match in size)
Source image (will be matched to) and target image
match_mask : ndarray
Mask (must match in size)
match_contrast, match_hue, match_saturation : bool
Flags, controlling which aspects are matched (default: all True)
match_mean, match_std : bool
Flags, controlling how aspects are matched (default: all True)
gray_conv_type : str
Passed into image_gray as conv_type (see help there)
Returns
-------
matched_image : ndarray
Source image transformed to match target image
"""
try:
sshape = source_image.shape
tshape = target_image.shape
if sshape != tshape:
raise ValueError('Image shape mismatch.')
except:
raise
if not match_mask is None:
if not isinstance(match_mask, numpy.ndarray):
match_mask = None
elif match_mask.ndim != 2:
raise ValueError('Invalid mask dims.')
elif match_mask.shape[0] != sshape[0] or match_mask.shape[1] != sshape[1]:
raise ValueError('Invalid mask shape.')
mask_size = 0
if not match_mask is None:
mask_size = numpy.sum(match_mask)
if mask_size < 16:
raise ValueError('Mask covers too little area.')
if not match_mean and not match_std:
return source_image.copy()
source_type = source_image.dtype
source_image = source_image.astype(numpy.float64)
source_is_gray = (source_image.ndim == 2)
target_is_gray = (target_image.ndim == 2)
if match_contrast:
if source_is_gray:
source_gray = source_image
else:
source_gray = image_gray(source_image, rgb_format=False,
conv_type=gray_conv_type)
if target_is_gray:
target_gray = target_image.astype(numpy.float64)
else:
target_gray = image_gray(target_image, rgb_format=False,
conv_type=gray_conv_type)
if mask_size > 0:
source_gray = source_gray[match_mask]
target_gray = target_gray[match_mask]
source_mean = numpy.mean(source_gray)
if match_mean:
target_mean = numpy.mean(target_gray)
mean_corr = (target_mean - source_mean)
source_image = source_image + mean_corr
if match_std:
source_std = numpy.std(source_gray)
target_std = numpy.std(target_gray)
std_corr = target_std / source_std
source_image = target_mean + std_corr * (source_image - target_mean)
elif match_std:
source_std = numpy.std(source_gray)
target_std = numpy.std(target_gray)
std_corr = target_std / source_std
source_image = source_mean + std_corr * (source_image - source_mean)
if not source_is_gray and not target_is_gray and (match_hue or match_saturation):
source_hslv = rgb2hslv(source_image[:,:,0],
source_image[:,:,1], source_image[:,:,2])
target_hslv = rgb2hslv(target_image[:,:,0],
target_image[:,:,1], target_image[:,:,2])
source_hue = source_hslv[0]
source_sat = source_hslv[1]
target_hue = target_hslv[0]
target_sat = target_hslv[1]
if mask_size > 0:
source_hue = source_hue[match_mask]
source_sat = source_sat[match_mask]
target_hue = target_hue[match_mask]
target_sat = target_sat[match_mask]
if match_hue:
pass
source_image[source_image < 0] = 0
if source_type == numpy.uint8:
source_image[source_image > 255] = 255
return source_image.astype(source_type)
# image mixing (python portion)
def image_mix(
image_1:numpy.ndarray,
image_2:numpy.ndarray,
alpha_2:Union[float, numpy.ndarray, None] = 0.5,
) -> numpy.ndarray:
"""
Mix two (RGB and/or grayscale) image with either max or blending
Parameters
----------
image_1 : ndarray
First image (2D: gray, 3D: color)
image_2 : ndarray
Second image
alpha_2 : alpha value(s), either float, ndarray, or None
Blending selection - for a single value, this is the opacity
of the second image (default = 0.5, i.e. equal mixing); for
an array, it must match the size, and be a single plane; if
None, each image component is set to the maximum across the
two arrays
Returns
-------
out_image : ndarray
Mixed image
"""
# IMPORT DONE HERE TO SAVE TIME AT MODULE INIT
from .jitfunc import image_mix as image_mix_jit
# get original shapes and perform necessary checks and reshaping
im1shape = image_1.shape
im1reshape = True
im2shape = image_2.shape
im2reshape = True
if image_1.shape[0] != image_2.shape[0]:
raise ValueError('Invalid input images.')
if not alpha_2 is None and isinstance(alpha_2, numpy.ndarray):
a2shape = alpha_2.shape
if not alpha_2.dtype is numpy.float32:
alpha_2 = alpha_2.astype(numpy.float32)
im1pix = im1shape[0]
im1planes = 1
if len(im1shape) > 1:
if im1shape[1] == 3 and len(im1shape) == 2:
im1planes = 3
else:
im1pix *= im1shape[1]
if len(im1shape) > 2:
im1planes = im1shape[2]
if not im1planes in [1, 3]:
if im1planes > 3:
if len(im1shape) == 3:
image_1 = image_1[:,:,0:3]
else:
image_1 = image_1[:,0:3]
im1planes = 3
im1reshape = False
else:
raise ValueError('Invalid input image_1.')
im2pix = im2shape[0]
im2planes = 1
if len(im2shape) > 1:
if im2shape[1] == 3 and len(im2shape) == 2:
im2planes = 3
else:
im2pix *= im2shape[1]
if len(im2shape) > 2:
im2planes = im2shape[2]
if not im2planes in [1, 3]:
if im2planes > 3:
if len(im2shape) == 3:
image_2 = image_2[:,:,0:3]
else:
image_2 = image_2[:,0:3]
im2planes = 3
im2reshape = False
else:
raise ValueError('Invalid input image_1.')
raise ValueError('Invalid input image_2.')
if im1pix != im2pix:
raise ValueError('Invalid input images.')
if isinstance(alpha_2, numpy.ndarray) and alpha_2.size not in [1, im1pix]:
raise ValueError('Invalid Alpha size.')
try:
image_1.shape = (im1pix, im1planes)
except:
try:
image_1 = image_1.reshape((im1pix, im1planes))
except:
raise ValueError('Unabled to format image_1.')
try:
image_2.shape = (im1pix, im2planes)
except:
try:
image_2 = image_2.reshape((im1pix, im2planes))
except:
if im1reshape:
image_1.shape = im1shape
raise ValueError('Unabled to format image_2.')
if not alpha_2 is None:
if isinstance(alpha_2, float):
alpha_2 = numpy.float32(alpha_2) * numpy.ones(im1pix,
dtype=numpy.float32)
a2shape = alpha_2.shape
else:
if alpha_2.size == 1:
alpha_2 = alpha_2 * numpy.ones(im1pix, dtype=numpy.float32)
a2shape = alpha_2.shape
else:
try:
alpha_2.shape = (im1pix)
except:
try:
alpha_2 = alpha_2.reshape(im1pix)
except:
if im1reshape:
image_1.shape = im1shape
if im2reshape:
image_2.shape = im2shape
raise ValueError('Unable to format alpha_2.')
# attempt to use JIT function
try:
immix = image_mix_jit(image_1, image_2, alpha_2)
# and return original inputs to their previous state in any case!
except:
if im1reshape:
image_1.shape = im1shape
if im2reshape:
image_2.shape = im2shape
if isinstance(alpha_2, numpy.ndarray):
alpha_2.shape = a2shape
raise
if im1reshape:
image_1.shape = im1shape
if im2reshape:
image_2.shape = im2shape
if not alpha_2 is None:
alpha_2.shape = a2shape
if im1shape[-1] in [1, 3]:
immix.shape = im1shape
else:
if len(im1shape) == 3:
immix.shape = (im1shape[0], im1shape[1], immix.shape[-1])
return immix
# overlay image
def image_overlay(
im:numpy.ndarray,
heatmap:numpy.ndarray,
heatposlut:Union[list,numpy.ndarray]=[[255,0,0],[255,255,0]],
heatneglut:Union[list,numpy.ndarray]=None,
min_thresh:float=0.0,
max_thresh:float=1.0,
alpha:Union[float,numpy.ndarray]=-1.0,
alpha_max:float=1.0,
) -> numpy.ndarray:
# late imports
from .sampler import Sampler
s = Sampler()
# lookup colors
imsh = im.shape
if im.ndim != 3 or imsh[2] != 3:
raise ValueError('Invalid image, must be RGB x*y*3.')
if heatmap.ndim != 2:
raise ValueError('Invalid heatmap, must be x*y.')
hmsh = heatmap.shape
if isinstance(heatposlut, list):
heatposlut = numpy.asarray(heatposlut).astype(numpy.uint8)
if isinstance(heatneglut, list):
heatneglut = numpy.asarray(heatneglut).astype(numpy.uint8)
hplsh = heatposlut.shape
if len(hplsh) != 2 or hplsh[1] != 3:
raise ValueError('Invalid heatposlut shape.')
if not heatneglut is None:
hnlsh = heatneglut.shape
if len(hnlsh) != 2 or hnlsh[1] != 3:
raise ValueError('Invalid heatneglut shape.')
else:
hnlsh = [256,3]
if (max_thresh - min_thresh) != 1.0:
trans_fac = 1.0 / (max_thresh - min_thresh)
min_thresh /= trans_fac
if min_thresh < 0.0:
min_thresh = 0.0
if isinstance(alpha, numpy.ndarray):
if alpha.ndim != 2 or alpha.shape[0] != hmsh[0] or alpha.shape[1] != hmsh[1]:
alpha = -1.0
else:
if alpha.shape[0] != imsh[0] or alpha.shape[1] != imsh[1]:
alpha = s.sample_grid(alpha,list(imsh[0:2]), 'linear')
if not (isinstance(alpha, numpy.ndarray) or isinstance(alpha, float)):
raise ValueError('Invalid alpha parameter.')
if alpha_max <= 0.0:
return im.copy()
if isinstance(alpha, float):
if alpha > 1.0:
alpha = 1.0
elif alpha == 0:
return im.copy()
if alpha < 0.0:
alpha_map = heatmap.copy()
alpha_map[alpha_map < min_thresh] = min_thresh
alpha_map -= min_thresh
alpha_map /= (max_thresh - min_thresh)
alpha_map[alpha_map > 1.0] = 1.0
alpha = -alpha * alpha_map
alpha[alpha > 1.0] = 1.0
else:
alpha_map = heatmap >= min_thresh
alpha_map = alpha_map.astype(numpy.float32)
alpha = alpha * alpha_map
if alpha.shape[0] != imsh[0] or alpha.shape[1] != imsh[1]:
alpha = s.sample_grid(alpha,list(imsh[0:2]), 'linear')
if alpha_max < 1.0 and isinstance(alpha, numpy.ndarray):
alpha[alpha > alpha_max] = alpha_max
heatmap = heatmap - min_thresh
heatmap /= (max_thresh - min_thresh)
if hplsh[0] < 40:
lsfac = (hplsh[0] - 1) / 255.0
heatposlut = s.sample_grid(heatposlut,
[numpy.arange(0.0,float(hplsh[0])-1.0+0.5*lsfac,lsfac),3], 'linear')
if hnlsh[0] < 40:
lsfac = (hnlsh[0] - 1) / 255.0
heatneglut = s.sample_grid(heatneglut,
[numpy.arange(0.0,float(hplsh[0])-1.0+0.5*lsfac,lsfac),3], 'linear')
heatrgb = lut_lookup(heatmap.flatten(), heatposlut, heatneglut).reshape(
(hmsh[0],hmsh[1],3))
if hmsh[0] != imsh[0] or hmsh[1] != imsh[1]:
heatrgb = s.sample_grid(heatrgb, list(imsh[0:2]), 'linear').astype(numpy.uint8)
return image_mix(im, heatrgb, alpha)
# read image border
def image_read_border(
image:numpy.ndarray,
output:str = 'str',
pix_width:Union[None,int,float,numpy.ndarray] = None,
) -> Any:
"""
Read the encoded data from an image border
Parameters
----------
image : ndarray
Image containing data in its border pixels
output : str
Either 'str' (default) or 'bytes'
pix_width : int, float, ndarray
Single value or 4-element vector (for each reading direction),
default: auto-detect (None)
Returns
-------
decoded : str, bytes
Decoded content (if able to decode)
"""
# IMPORT DONE HERE TO SAVE TIME AT MODULE INIT
from .reedsolo import RSCodec
from .sampler import Sampler
r = RSCodec(64) # needed for bit decoding
s = Sampler()
# guess pixel width
im_shape = image.shape
if len(im_shape) > 2:
image = numpy.trunc(numpy.mean(image, axis=2)).astype(numpy.uint8)
if pix_width is None:
pix_width = numpy.zeros(4)
elif isinstance(pix_width, int):
pix_width = float(pix_width) * numpy.ones(4)
elif isinstance(pix_width, float):
pix_width = pix_width * numpy.ones(4)
elif pix_width.size != 4:
pix_width = numpy.zeros(4)
pwi = numpy.where(pix_width == 0.0)[0]
if len(pwi) > 0:
pwi = pwi[0]
im_shapeh = (im_shape[0] // 2, im_shape[1] // 2)
wlen = None
cidx = 0
while wlen is None:
wlen = column_period(image[:im_shapeh[0],cidx],12)
if not wlen is None:
break
cidx += 1
if wlen is None:
raise RuntimeError('Column undetected.')
if cidx > 0:
image = image[:,cidx:]
pix_width[pwi] = float(wlen) / 12.0
if pix_width[pwi] >= 2.0:
if numpy.corrcoef(image[:im_shapeh[0],0], image[:im_shapeh[0],1])[0,1] < 0.5:
raise RuntimeError('Column not duplicated as expected.')
if pwi < 2:
pwdiff = pix_width[pwi] - float(int(pix_width[pwi]))
if pwdiff != 0.0:
if pwdiff > 0.0 and pwdiff < 0.1:
xpix_width = float(int(pix_width[pwi]))
else:
xpix_width = float(int(2.0 * pix_width[pwi] + 0.5))
image = s.sample_grid(image, [xpix_width/pix_width[pwi],1.0])
pix_width[pwi] = xpix_width
try:
return image_read_border(image_rotate(image[:,cidx:], 'left'), output, pix_width)
except:
raise
pix_width = 0.1 * numpy.trunc(10.0 * pix_width + 0.5)
if not numpy.all(pix_width == pix_width[0]):
if pix_width[0] != pix_width[2] or pix_width[1] != pix_width[3]:
raise RuntimeError('Invalid image detected.')
if pix_width[0] > pix_width[1]:
image = s.sample_grid(image, [1.0, pix_width[0] / pix_width[1]])
else:
image = s.sample_grid(image, [pix_width[1] / pix_width[0], 1.0])
# get reference columns
pix_width = int(pix_width[0])
kspec = 'gauss' + str(pix_width*24)
if pix_width > 1:
c0_p = numpy.mean(image[pix_width:0-pix_width,:pix_width], axis=1)
c1_p = numpy.mean(image[:pix_width,pix_width:0-pix_width], axis=0)
c2_p = numpy.mean(image[pix_width:0-pix_width,0-pix_width:], axis=1)
c3_p = numpy.mean(image[0-pix_width:,pix_width:0-pix_width], axis=0)
else:
c0_p = image[1:-1,0]
c1_p = image[0,1:-1]
c2_p = image[1:-1,-1]
c3_p = image[-1,1:-1]
c0_p.shape = (c0_p.size)
c1_p.shape = (c1_p.size)
c2_p.shape = (c0_p.size)
c3_p.shape = (c1_p.size)
c0_n = c0_p[::-1]
c1_n = c1_p[::-1]
c2_n = c2_p[::-1]
c3_n = c3_p[::-1]
rc0_p = s.sample_values(c0_p, 1.0/pix_width, kspec)
rc0_n = s.sample_values(c0_n, 1.0/pix_width, kspec)
rc1_p = s.sample_values(c1_p, 1.0/pix_width, kspec)
rc1_n = s.sample_values(c1_n, 1.0/pix_width, kspec)
rc2_p = s.sample_values(c2_p, 1.0/pix_width, kspec)
rc2_n = s.sample_values(c2_n, 1.0/pix_width, kspec)
rc3_p = s.sample_values(c3_p, 1.0/pix_width, kspec)
rc3_n = s.sample_values(c3_n, 1.0/pix_width, kspec)
if pix_width > 1:
c0_p = s.sample_values(c0_p, 1.0/pix_width, 'resample')
c0_n = s.sample_values(c0_n, 1.0/pix_width, 'resample')
c1_p = s.sample_values(c1_p, 1.0/pix_width, 'resample')
c1_n = s.sample_values(c1_n, 1.0/pix_width, 'resample')
c2_p = s.sample_values(c2_p, 1.0/pix_width, 'resample')
c2_n = s.sample_values(c2_n, 1.0/pix_width, 'resample')
c3_p = s.sample_values(c3_p, 1.0/pix_width, 'resample')
c3_n = s.sample_values(c3_n, 1.0/pix_width, 'resample')
# subtract
c0_p = c0_p - rc0_p
c0_n = c0_n - rc0_n
c1_p = c1_p - rc1_p
c1_n = c1_n - rc1_n
c2_p = c2_p - rc2_p
c2_n = c2_n - rc2_n
c3_p = c3_p - rc3_p
c3_n = c3_n - rc3_n
# decode first values
c_values = []
try:
c_values.append(r.values_to_value(c0_p[:10]))
except:
c_values.append(None)
try:
c_values.append(r.values_to_value(c0_n[:10]))
except:
c_values.append(None)
try:
c_values.append(r.values_to_value(c1_p[:10]))
except:
c_values.append(None)
try:
c_values.append(r.values_to_value(c1_n[:10]))
except:
c_values.append(None)
try:
c_values.append(r.values_to_value(c2_p[:10]))
except:
c_values.append(None)
try:
c_values.append(r.values_to_value(c2_n[:10]))
except:
c_values.append(None)
try:
c_values.append(r.values_to_value(c3_p[:10]))
except:
c_values.append(None)
try:
c_values.append(r.values_to_value(c3_n[:10]))
except:
c_values.append(None)
c_xvals = [v // 4 for v in c_values if not v is None]
if len(c_xvals) < 4:
raise RuntimeError('Image quality too poor.')
if not all([v == c_xvals[0] for v in c_xvals]):
xval = float(numpy.median(numpy.asarray(c_xvals)))
if float(int(xval)) != xval:
raise RuntimeError('Image quality too poor.')
xval = int(xval)
if sum([xval != v for v in c_xvals]) > (1 + len(c_xvals) // 2):
raise RuntimeError('Image quality too poor.')
for (idx, v) in c_values:
if v is None:
continue
if (v // 4) != xval:
c_values[idx] = 4 * xval + v % 4
else:
xval = c_xvals[0]
while any([v is None for v in c_values]):
for (idx, v) in c_values:
nidx = (idx + 1) % 8
pidx = (idx + 7) % 8
if v is None:
if (idx % 2) == 0:
if not c_values[nidx] is None:
c_values[idx] = c_values[nidx]
elif not c_values[pidx] is None:
c_values[idx] = (4 * xval + (c_values[pidx] + 1) % 4)
else:
if not c_values[pidx] is None:
c_values[idx] = c_values[pidx]
elif not c_values[nidx] is None:
c_values[idx] = (4 * xval + (c_values[nidx] + 3) % 4)
# flip data into correct orientation
c_order = [v % 4 for v in c_values]
nchunks = 1 + xval
if c_order == [1, 1, 2, 2, 3, 3, 0, 0]:
(c0_p, c0_n, c1_p, c1_n, c2_p, c2_n, c3_p, c3_n) = (c1_n, c1_p, c2_p, c2_n, c3_n, c3_p, c0_p, c0_n)
elif c_order == [2, 2, 3, 3, 0, 0, 1, 1]:
(c0_p, c0_n, c1_p, c1_n, c2_p, c2_n, c3_p, c3_n) = (c2_n, c2_p, c3_n, c3_p, c0_n, c0_p, c1_n, c1_p)
elif c_order == [3, 3, 0, 0, 1, 1, 2, 2]:
(c0_p, c0_n, c1_p, c1_n, c2_p, c2_n, c3_p, c3_n) = (c3_p, c3_n, c0_n, c0_p, c1_p, c1_n, c2_n, c2_p)
elif c_order != [0, 0, 1, 1, 2, 2, 3, 3]:
raise RuntimeError('Invalid corner markers.')
# extract number of words
nwy = []
nwx = []
try:
nwy.append(r.values_to_value(c0_p[12:22]))
except:
pass
try:
nwy.append(r.values_to_value(c0_n[12:22]))
except:
pass
try:
nwy.append(r.values_to_value(c2_p[12:22]))
except:
pass
try:
nwy.append(r.values_to_value(c2_n[12:22]))
except:
pass
try:
nwx.append(r.values_to_value(c1_p[12:22]))
except:
pass
try:
nwx.append(r.values_to_value(c1_n[12:22]))
except:
pass
try:
nwx.append(r.values_to_value(c3_p[12:22]))
except:
pass
try:
nwx.append(r.values_to_value(c3_n[12:22]))
except:
pass
if len(nwy) == 0 or len(nwx) == 0:
raise RuntimeError('Error decoding number of words!')
if not all([v == nwy[0] for v in nwy]):
if len(nwy) == 2:
raise RuntimeError('Error decoding number of words!')
else:
nwy = float(numpy.median(numpy.asarray(nwy)))
if float(int(nwy)) != nwy:
raise RuntimeError('Error decoding number of words!')
else:
nwy = nwy[0]
if not all([v == nwx[0] for v in nwx]):
if len(nwx) == 2:
raise RuntimeError('Error decoding number of words!')
else:
nwx = float(numpy.median(numpy.asarray(nwx)))
if float(int(nwx)) != nwx:
raise RuntimeError('Error decoding number of words!')
else:
nwx = nwx[0]
# extract content length and number of symbols
clen = []
nsym = []
try:
clen.append(r.values_to_value(c0_p[24:34]))
except:
pass
try:
nsym.append(r.values_to_value(c0_p[36:46]))
except:
pass
try:
clen.append(r.values_to_value(c0_n[24:34]))
except:
pass
try:
nsym.append(r.values_to_value(c0_n[36:46]))
except:
pass
try:
clen.append(r.values_to_value(c1_p[24:34]))
except:
pass
try:
nsym.append(r.values_to_value(c1_p[36:46]))
except:
pass
try:
clen.append(r.values_to_value(c1_n[24:34]))
except:
pass
try:
nsym.append(r.values_to_value(c1_n[36:46]))
except:
pass
try:
clen.append(r.values_to_value(c2_p[24:34]))
except:
pass
try:
nsym.append(r.values_to_value(c2_p[36:46]))
except:
pass
try:
clen.append(r.values_to_value(c2_n[24:34]))
except:
pass
try:
nsym.append(r.values_to_value(c2_n[36:46]))
except:
pass
try:
clen.append(r.values_to_value(c3_p[24:34]))
except:
pass
try:
nsym.append(r.values_to_value(c3_p[36:46]))
except:
pass
try:
clen.append(r.values_to_value(c3_n[24:34]))
except:
pass
try:
nsym.append(r.values_to_value(c3_n[36:46]))
except:
pass
if len(clen) == 0:
raise RuntimeError('Error decoding content length.')
if not all([v == clen[0] for v in clen]):
if len(clen) == 2:
raise RuntimeError('Error decoding content length.')
else:
clen = float(numpy.median(numpy.asarray(clen)))
if float(int(clen)) != clen:
raise RuntimeError('Error decoding content length.')
clen = int(clen)
else:
clen = clen[0]
if len(nsym) == 0:
raise RuntimeError('Error decoding number of ECC bytes.')
if not all([v == nsym[0] for v in nsym]):
if len(nsym) == 2:
raise RuntimeError('Error decoding number of ECC bytes.')
else:
nsym = float(numpy.median(numpy.asarray(nsym)))
if float(int(nsym)) != nsym:
raise RuntimeError('Error decoding number of ECC bytes.')
nsym = int(nsym)
else:
nsym = nsym[0]
# get code words
r = RSCodec(nsym)
eclen = clen + nsym
chunks = [[None] * eclen for v in range(nchunks)]
cidx = 0
lidx = 0
nwyr = nwy - 4
nwxr = nwx - 4
nwyc = float(nwyr)
nwxc = float(nwxr)
nwy = 0.5 * nwxc
nwx = 0.5 * nwyc
yc = [c0_p[48:], c0_n[48:], c2_p[48:], c2_n[48:]]
xc = [c1_p[48:], c1_n[48:], c3_p[48:], c3_n[48:]]
ycidx = 0
xcidx = 0
yidx = 0
xidx = 0
while nwyr > 0 or nwxr > 0:
if nwy <= nwx:
try:
w = r.values_to_value(yc[ycidx][yidx:yidx+10])
except:
w = None
ycidx += 1
if ycidx > 3:
ycidx = 0
yidx += 12
nwy += nwxc
nwyr -= 1
else:
try:
w = r.values_to_value(xc[xcidx][xidx:xidx+10])
except:
w = None
xcidx += 1
if xcidx > 3:
xcidx = 0
xidx += 12
nwx += nwyc
nwxr -= 1
if not w is None:
if w == 257:
cidx = 0
lidx = 0
continue
if chunks[cidx][lidx] is None:
chunks[cidx][lidx] = w
elif isinstance(chunks[cidx][lidx], int):
chunks[cidx][lidx] = [chunks[cidx][lidx],w]
else:
chunks[cidx][lidx].append(w)
lidx += 1
if lidx >= eclen:
lidx = 0
cidx += 1
if cidx >= nchunks:
cidx = 0
out = bytearray()
for cidx in range(nchunks):
for lidx in range(eclen):
if chunks[cidx][lidx] is None:
chunks[cidx][lidx] = 0
elif isinstance(chunks[cidx][lidx], list):
ll = chunks[cidx][lidx]
if all([v == ll[0] for v in ll]):
ll = ll[0]
elif len(ll) > 2:
ll = int(numpy.median(numpy.asarray(ll)))
else:
ll = ll[0]
chunks[cidx][lidx] = ll
out.extend(bytearray(chunks[cidx]))
try:
out = r.decode(out, eclen)
except:
raise
if isinstance(output, str) and output == 'str':
out = out.decode('utf-8')
return out
# image registration (experimental!)
def image_register(
i1:numpy.ndarray,
i2:numpy.ndarray,
imask:numpy.ndarray = None,
mode:str = 'luma',
origin:numpy.ndarray = None,
trans:bool = True,
rotate:bool = True,
scale:bool = False,
shear:bool = False,
imethod:str = 'linear',
maxpts:int = 250000,
maxiter:int = 100,
smooth:list = [0.005],
init_m:dict = None,
) -> numpy.ndarray:
# IMPORT DONE HERE TO SAVE TIME AT MODULE INIT
from . import sampler
s = sampler.Sampler()
if not imethod in s._kernels:
raise ValueError('Invalid interpolation method (kernel function).')
sk = s._kernels[imethod]
zsk = s._kernels['lanczos3']
if not isinstance(i1, numpy.ndarray) or not isinstance(i2, numpy.ndarray):
raise ValueError('Invalid types.')
if i1.ndim < 2 or i1.ndim > 3 or i2.ndim < 2 or i2.ndim > 3:
raise ValueError('Invalid dimensions.')
ishape = i1.shape
if ishape[0] != i2.shape[0] or ishape[1] != i2.shape[1]:
raise ValueError('Dimension mismatch.')
if not imask is None:
if not isinstance(imask, numpy.ndarray):
raise ValueError('Invalid imask parameter.')
elif imask.ndim != 2:
raise ValueError('Invalid imask.ndim value.')
elif imask.shape[0] != ishape[0] or imask.shape[1] != ishape[1]:
raise ValueError('Invalid imask.shape.')
if imask.dtype != numpy.bool:
imask = (imask > 0).astype(numpy.uint8)
else:
imask = imask.astype(numpy.uint8)
i1o = i1
i2o = i2
if isinstance(smooth, list) and len(smooth) > 0:
try:
i1 = image_smooth_fft(i1o, smooth[0])
i2 = image_smooth_fft(i2o, smooth[0])
except:
raise
if isinstance(init_m, dict):
if origin is None:
if 'origin' in init_m:
origin = init_m['origin']
else:
origin = 0.5 * numpy.asarray(ishape, numpy.float64)
if 'trans' in init_m:
transp = init_m['trans']
else:
transp = numpy.zeros(2, numpy.float64)
if 'rotate' in init_m:
rotatep = init_m['rotate']
else:
rotatep = numpy.zeros(1, numpy.float64)
if 'scale' in init_m:
scalep = init_m['scale']
else:
scalep = numpy.ones(1, numpy.float64)
if 'shear' in init_m:
shearp = init_m['shear']
else:
shearp = numpy.zeros(1, numpy.float64)
else:
if origin is None:
origin = 0.5 * numpy.asarray(ishape, numpy.float64)
transp = numpy.zeros(2, numpy.float64)
rotatep = numpy.zeros(1, numpy.float64)
scalep = numpy.ones(1, numpy.float64)
shearp = numpy.zeros(1, numpy.float64)
m = {
'trans': transp,
'rotate': rotatep,
'scale': scalep,
'shear': shearp,
}
try:
moi = sampler.trans_matrix({'trans': origin})
mo = sampler.trans_matrix({'trans': -origin}) #pylint: disable=invalid-unary-operand-type
t = numpy.linalg.inv(sampler.trans_matrix(m))
except:
raise
s0 = numpy.arange(0.0, float(ishape[0]), 1.0).astype(numpy.float64)
s1 = numpy.arange(0.0, float(ishape[1]), 1.0).astype(numpy.float64)
(c1, c0) = numpy.meshgrid(s1, s0)
c0.shape = (c0.size,1,)
c1.shape = (c1.size,1,)
c01 = numpy.concatenate((c0,c1), axis=1)
step = (1.0 / 512.0)
dg0 = sampler._sample_grid_coords(
i1, c01 + step * numpy.asarray([1.0,1.0]), zsk[0], zsk[1])
dg1 = dg0.copy()
cxy = sampler._sample_grid_coords(
i1, c01 + step * numpy.asarray([1.0,-1.0]), zsk[0], zsk[1])
dg0 += cxy
dg1 -= cxy
cxy = sampler._sample_grid_coords(
i1, c01 + step * numpy.asarray([-1.0,1.0]), zsk[0], zsk[1])
dg0 -= cxy
dg1 += cxy
cxy = sampler._sample_grid_coords(
i1, c01 + step * numpy.asarray([-1.0,-1.0]), zsk[0], zsk[1])
dg0 -= cxy
dg1 -= cxy
dg0 *= 128.0
dg1 *= 128.0
sf = max([1.0, numpy.sqrt(float(ishape[0] * ishape[1]) / float(maxpts))])
s0 = numpy.arange(-0.25, float(ishape[0]), sf).astype(numpy.float64)
s1 = numpy.arange(-0.25, float(ishape[1]), sf).astype(numpy.float64)
(c1, c0) = numpy.meshgrid(s1, s0)
c0.shape = (c0.size,1,)
c1.shape = (c1.size,1,)
dg0.shape = ishape
dg1.shape = ishape
lsk = s._kernels['linear']
c01 = numpy.concatenate((c0,c1), axis=1)
if not imask is None:
cmask = sampler._sample_grid_coords(imask.astype(numpy.uint8),
c01, lsk[0], lsk[1]) >= 0.5
c0 = c0[cmask]
c1 = c1[cmask]
else:
cmask = sampler._sample_grid_coords((i1 >= 0.5).astype(numpy.uint8),
c01, lsk[0], lsk[1]) >= 0.5
c0 = c0[cmask]
c1 = c1[cmask]
c01 = numpy.concatenate((c0,c1), axis=1)
d = sampler._sample_grid_coords(i1, c01, sk[0], sk[1])
dg0 = sampler._sample_grid_coords(dg0, c01, sk[0], sk[1])
dg1 = sampler._sample_grid_coords(dg1, c01, sk[0], sk[1])
dg0.shape = (dg0.size,1,)
dg1.shape = (dg1.size,1,)
dg01 = numpy.concatenate((dg0, dg1), axis=1)
nc = 0
if trans:
nc += 2
if rotate:
nc += 1
if scale:
nc += 1
if shear:
nc += 1
i1r = numpy.zeros(c0.size * nc, dtype=numpy.float64).reshape((c0.size, nc,))
nc = 0
if trans:
transp[0] = 1.0e-6
t = numpy.matmul(moi, numpy.matmul(
numpy.linalg.inv(sampler.trans_matrix(m)), mo))
tc01 = numpy.concatenate(
(t[0,0]*c0+t[0,1]*c1+t[0,2], t[1,0]*c0+t[1,1]*c1+t[1,2]), axis=1)
i1r[:,nc] = -1.0e6 * numpy.sum((tc01 - c01) * dg01, axis=1)
nc += 1
transp[0] = 0.0
transp[1] = 1.0e-6
t = numpy.matmul(moi, numpy.matmul(
numpy.linalg.inv(sampler.trans_matrix(m)), mo))
tc01 = numpy.concatenate(
(t[0,0]*c0+t[0,1]*c1+t[0,2], t[1,0]*c0+t[1,1]*c1+t[1,2]), axis=1)
i1r[:,nc] = -1.0e6 * numpy.sum((tc01 - c01) * dg01, axis=1)
nc += 1
transp[1] = 0.0
if rotate:
rotatep[0] = 1.0e-6
t = numpy.matmul(moi, numpy.matmul(
numpy.linalg.inv(sampler.trans_matrix(m)), mo))
tc01 = numpy.concatenate(
(t[0,0]*c0+t[0,1]*c1+t[0,2], t[1,0]*c0+t[1,1]*c1+t[1,2]), axis=1)
i1r[:,nc] = -1.0e6 * numpy.sum((tc01 - c01) * dg01, axis=1)
nc += 1
rotatep[0] = 0.0
if scale:
scalep[0] = 1.000001
t = numpy.matmul(moi, numpy.matmul(
numpy.linalg.inv(sampler.trans_matrix(m)), mo))
tc01 = numpy.concatenate(
(t[0,0]*c0+t[0,1]*c1+t[0,2], t[1,0]*c0+t[1,1]*c1+t[1,2]), axis=1)
i1r[:,nc] = -1.0e6 * numpy.sum((tc01 - c01) * dg01, axis=1)
nc += 1
scalep[0] = 1.0
if shear:
shearp[0] = 1.0e-6
t = numpy.matmul(moi, numpy.matmul(
numpy.linalg.inv(sampler.trans_matrix(m)), mo))
tc01 = numpy.concatenate(
(t[0,0]*c0+t[0,1]*c1+t[0,2], t[1,0]*c0+t[1,1]*c1+t[1,2]), axis=1)
i1r[:,nc] = -1.0e6 * numpy.sum((tc01 - c01) * dg01, axis=1)
nc += 1
shearp[0] = 0.0
ss = numpy.inf * numpy.ones(maxiter+1, dtype=numpy.float64)
pss = ss[0]
stable = 0
if isinstance(init_m, dict):
t = numpy.matmul(numpy.linalg.inv(sampler.trans_matrix(m)), mo)
tm = numpy.repeat(t.reshape((t.shape[0], t.shape[1], 1,)),
maxiter+1, axis=2)
else:
tm = numpy.repeat(mo.reshape((mo.shape[0], mo.shape[1], 1,)),
maxiter+1, axis=2)
i2msk = (i2 >= 0.5).astype(numpy.uint8)
while maxiter > 0:
t = numpy.matmul(numpy.linalg.inv(tm[:,:,maxiter]), mo)
tc01 = numpy.concatenate(
(t[0,0]*c0+t[0,1]*c1+t[0,2], t[1,0]*c0+t[1,1]*c1+t[1,2]), axis=1)
msk = (sampler._sample_grid_coords(i2msk, tc01, lsk[0], lsk[1]) >= 0.5)
if numpy.sum(msk) < 32:
raise RuntimeError('Too little image overlap!')
f = sampler._sample_grid_coords(i2, tc01[msk,:], sk[0], sk[1])
cm = i1r[msk,:]
dm = d[msk]
sc = numpy.sum(dm) / numpy.sum(f)
dm = dm - sc * f
sol = numpy.linalg.lstsq(
numpy.matmul(cm.T, cm), numpy.matmul(cm.T, dm), rcond=None)[0]
nc = 0
if trans:
transp[0] = sol[nc]
nc += 1
transp[1] = sol[nc]
nc += 1
if rotate:
rotatep[0] = sol[nc]
nc += 1
if scale:
scalep[0] = sol[nc]
nc += 1
if shear:
shearp[0] = sol[nc]
nc += 1
maxiter -= 1
tm[:,:,maxiter] = numpy.matmul(numpy.linalg.inv(sampler.trans_matrix(m)),
tm[:,:,maxiter+1])
ss[maxiter] = numpy.sum(dm * dm) / float(dm.size)
if not numpy.isinf(pss) and ((pss - ss[maxiter]) / pss) < 1.0e-6:
stable += 1
if stable > 2:
break
else:
stable = 0
pss = ss[maxiter]
t = numpy.matmul(tm[:,:,numpy.argmin(ss)], moi)
ti = list(sampler.trans_matrix_inv(numpy.linalg.inv(t)))
if not trans:
ti[0] = numpy.zeros(2, numpy.float64)
if not rotate:
ti[1] = numpy.zeros(1, numpy.float64)
if not scale:
ti[2] = numpy.ones(2, numpy.float64)
if not shear:
ti[3] = numpy.zeros(1, numpy.float64)
return tuple(ti)
# image resampling (cheap!)
def image_resample(image:numpy.ndarray, new_shape:tuple) -> numpy.ndarray:
"""
Cheap (!) image resampling
Parameters
----------
image : ndarray
Image to be resampled
new_shape : tuple
Shape of resampled image
Returns
-------
out_image : ndarray
Resampled image
"""
im_shape = image.shape
if len(im_shape) < 2:
raise ValueError('Invalid image array.')
if isinstance(new_shape, int) and new_shape > 1:
max_shape = max(im_shape)
sf = float(new_shape) / float(max_shape)
new_shape = (int(sf * float(im_shape[0])), int(sf * float(im_shape[1])))
elif isinstance(new_shape, float) and new_shape > 0.0 and new_shape <= 8.0:
new_shape = (int(new_shape * float(im_shape[0])),
int(new_shape * float(im_shape[1])))
if not isinstance(new_shape, tuple) or len(new_shape) != 2:
raise ValueError('Invalid new_shape parameter')
if not isinstance(new_shape[0], int) or new_shape[0] < 1:
raise ValueError('Invalid new_shape[0] value')
if not isinstance(new_shape[1], int) or new_shape[1] < 1:
raise ValueError('Invalid new_shape[1] value')
# IMPORT DONE HERE TO SAVE TIME AT MODULE INIT
from .jitfunc import image_resample_u1, image_resample_f4
if len(im_shape) < 3:
re_shape = (im_shape[0], im_shape[1], 1)
try:
image.shape = re_shape
except:
raise RuntimeError('Error setting necessary planes in shape.')
if image.dtype == numpy.uint8:
rs_image = image_resample_u1(image, new_shape[0], new_shape[1])
else:
rs_image = image_resample_f4(image, new_shape[0], new_shape[1])
rs_shape = rs_image.shape
if rs_shape[2] == 1:
rs_image.shape = (rs_shape[0], rs_shape[1])
return rs_image
# rotate image (90 degrees left, right; or 180 degrees)
def image_rotate(image:numpy.ndarray, how:str = None) -> numpy.ndarray:
"""
Rotate an image
Parameters
----------
image : ndarray
Image to be rotated
how : str
Rotation flag, either of 'flip' (180 degree), 'left', or 'right'
Returns
-------
rotated : ndarray
Rotated image
"""
if not how or not isinstance(how, str) or not how[0].lower() in 'flr':
return image
im_shape = image.shape
has_planes = (len(im_shape) > 2)
how = how[0].lower()
if how == 'f':
if has_planes:
return image[::-1, ::-1, :]
else:
return image[::-1, ::-1]
elif how == 'r':
if has_planes:
return numpy.transpose(image, (1, 0, 2,))[:, ::-1, :]
else:
return numpy.transpose(image, (1, 0,))[:, ::-1]
else:
if has_planes:
return numpy.transpose(image, (1, 0, 2,))[::-1, :, :]
else:
return numpy.transpose(image, (1, 0,))[::-1, :]
# sample grid
def image_sample_grid(
image:numpy.ndarray,
sampling:Union[numpy.ndarray,list,tuple,int,float],
kernel:Union[str,tuple] = 'resample',
) -> numpy.ndarray:
"""
Sample grid of image (flexible resampling)
Parameters
----------
image : ndarray
Image array
sampling : ndarray, list, tuple, int, float
Sampling specification (see Sampler.sample_grid)
kernel : str, tuple
Kernel specification (see Sampler.sample_grid)
Returns
-------
sampled : ndarray
Sampled image
"""
# IMPORT DONE HERE TO SAVE TIME AT MODULE INIT
from .sampler import Sampler
s = Sampler()
if image.dtype == numpy.uint8:
out_type = 'uint8'
else:
out_type = 'float64'
try:
return s.sample_grid(image, sampling, kernel, out_type)
except:
raise
# segment lesion
def image_segment_lesion(
image:numpy.ndarray,
fwhm:float = 0.02,
) -> numpy.ndarray:
try:
gimage = image_gray(image, rgb_format=False)
sgimage = image_smooth_fft(gimage, fwhm)
simage = image_smooth_fft(image, fwhm)
except:
raise
ic = image_center(image)
icd = numpy.sqrt(0.325 * (ic[0] * ic[0] + ic[1] * ic[1]))
s0 = numpy.arange(0.0, float(image.shape[0]), 1.0)
s1 = numpy.arange(0.0, float(image.shape[1]), 1.0)
(c1,c0) = numpy.meshgrid(s1 - ic[1], s0 - ic[0])
bmask = numpy.sqrt(c0 * c0 + c1 * c1) >= icd
fmask = numpy.sqrt(c0 * c0 + c1 * c1) <= (0.5 * icd)
back_mean = numpy.mean(sgimage[bmask])
back_std = numpy.std(sgimage[bmask])
fore_mean = numpy.mean(sgimage[fmask])
if fore_mean < (back_mean - 1.5 * back_std) or fore_mean > (back_mean + 1.5 * back_std):
lower_mean = (fore_mean < back_mean)
ftest = numpy.arange(0.1, 1.5, 0.1)
fmean_res = ftest.copy()
fstd_res = ftest.copy()
for (idx, ft) in enumerate(ftest):
fmask = numpy.sqrt(c0 * c0 + c1 * c1) <= (ft * icd)
fmean_res[idx] = numpy.mean(sgimage[fmask])
fstd_res[idx] = numpy.std(sgimage[fmask])
print(fmean_res)
print(fstd_res)
else:
pass
# smooth image using fft
def image_smooth_fft(image:numpy.ndarray, fwhm:float) -> numpy.ndarray:
"""
Smooth an image using FFT/inverse-FFT
Parameters
----------
image : ndarray
Image array
fwhm : float
FWHM parameter (kernel value)
Returns
-------
smoothed : ndarray
Smoothed image
"""
# IMPORT DONE HERE TO SAVE TIME AT MODULE INIT
from .jitfunc import conv_kernel
# deal with invalid/special values
if fwhm <= 0.0:
return image
elif fwhm <= 0.36:
fwhm = fwhm * numpy.sqrt(float(image.size))
# place kernel into image
k = conv_kernel(numpy.float(fwhm))
ki = k.repeat(k.size).reshape((k.size,k.size))
ki = ki * ki.T
im_shape = image.shape
if image.dtype != numpy.uint8:
from_uint8 = False
if len(im_shape) < 3:
ka = numpy.zeros_like(image)
else:
ka = numpy.zeros(im_shape[0] * im_shape[1],
dtype=numpy.float32).reshape((im_shape[0], im_shape[1],))
else:
from_uint8 = True
image = image.astype(numpy.float32)
ka = numpy.zeros(im_shape[0] * im_shape[1],
dtype=numpy.float32).reshape((im_shape[0], im_shape[1],))
kh = ki.shape[0] // 2
kh0 = min(kh, ka.shape[0]-1)
kh1 = min(kh, ka.shape[1]-1)
ka[0:kh0+1,0:kh1+1] += ki[kh:kh+kh0+1,kh:kh+kh1+1]
ka[0:kh0+1,-kh1:] += ki[kh:kh+kh0+1,0:kh1]
ka[-kh0:,0:kh1+1] += ki[0:kh0,kh:kh+kh1+1]
ka[-kh0:,-kh1:] += ki[0:kh0,0:kh1]
ka /= numpy.sum(ka)
# then perform 2D FFT
if len(image.shape) < 3:
out = numpy.fft.ifftn(numpy.fft.fft2(image) * numpy.fft.fft2(ka)).real
else:
out = numpy.zeros(image.size, dtype=image.dtype).reshape(image.shape)
for p in range(image.shape[2]):
out[:,:,p] = numpy.fft.ifft2(numpy.fft.fft2(image[:,:,p]) * numpy.fft.fft2(ka)).real
if from_uint8:
out = numpy.trunc(out).astype(numpy.uint8)
return out
# outer-boundary smoothing
def image_smooth_outer(im:numpy.ndarray, boundary:int) -> numpy.ndarray:
# IMPORT DONE HERE TO SAVE TIME AT MODULE INIT
import scipy.ndimage as ndimage
from .sampler import _gauss_kernel
if len(im.shape) > 2:
raise ValueError('Image must be single-plane.')
if im.dtype != numpy.bool:
im = im > 0
vim = im.astype(numpy.float64)
if not isinstance(boundary, int) or boundary <= 0:
return vim
if boundary == 1:
vim[numpy.logical_and(ndimage.binary_dilation(im), numpy.logical_not(im))] = 0.5
return vim
imb = numpy.logical_and(im, numpy.logical_not(ndimage.binary_erosion(im)))
imd = ndimage.morphology.distance_transform_edt(numpy.logical_not(imb)).astype(numpy.int32)
maxd = int(numpy.amax(imd))
k = _gauss_kernel(float(boundary))
kh = k.size // 2
k = k[kh+boundary:]
k = k / k[0]
if k.size <= maxd:
k = numpy.concatenate((k, numpy.zeros(1+maxd-k.size)), axis=0)
im = numpy.logical_not(im)
vim[im] = k[imd[im]]
return vim
# scale-smoothing
def image_smooth_scale(im:numpy.ndarray, fwhm:float) -> numpy.ndarray:
# IMPORT DONE HERE TO SAVE TIME AT MODULE INIT
import scipy.ndimage as ndimage
if len(im.shape) > 2:
raise ValueError('Image must be single-plane.')
if im.dtype != numpy.bool:
im = im > 0
imb = numpy.logical_and(ndimage.binary_dilation(im), numpy.logical_not(im))
sim = image_smooth_fft(im.astype(numpy.float32), fwhm)
return numpy.minimum(sim / numpy.mean(sim[imb]), 1.0)
# color LUT operation
def lut_lookup(
values:numpy.ndarray,
pos_lut:numpy.ndarray,
neg_lut:numpy.ndarray = None,
default:List = None,
trans_fac:float = 1.0,
trans_off:float = 0.0,
above_pos_col:List = None,
below_neg_col:List = None,
):
"""
Color lookup from a look-up table (LUT)
Parameters
----------
values : ndarray
Numeric values for which to lookup a color from the LUT
pos_lut : ndarray
Cx3 color lookup table (for positive values)
neg_lut : ndarray
Cx3 color lookup table (for negative values, default None)
default : list
Default RGB color triplet (default: black/0,0,0)
trans_fac : float
Transformation factor (scaling of values, default = 1.0)
trans_off : float
Offset for transformation (lower threshold, default = 0.0)
above_pos_col : list
RGB color triplet for values above table length
below_neg_col : list
RGB color triplet for values below negative values table length
Returns
-------
colors : ndarray
Vx3 RGB triplets
"""
if pos_lut.ndim != 2:
raise ValueError('Invalid LUT')
elif pos_lut.shape[1] != 3:
raise ValueError('Invalid LUT')
try:
num_vals = values.size
values = values.reshape((num_vals,))
except:
raise
num_cols = pos_lut.shape[0]
if not neg_lut is None:
if neg_lut.ndim != 2:
raise ValueError('Invalid LUT')
elif neg_lut.shape[1] != 3:
raise ValueError('Invalid LUT')
elif neg_lut.shape[0] != num_cols:
raise ValueError('Negative LUT must match in number of colors')
if not isinstance(default, list):
default = [0, 0, 0]
elif len(default) != 3:
default = [0, 0, 0]
else:
default = [].extend(default)
if not isinstance(default[0], int) or default[0] < 0:
default[0] = 0
elif default[0] > 255:
default[0] = 255
if not isinstance(default[1], int) or default[1] < 0:
default[1] = 0
elif default[1] > 255:
default[1] = 255
if not isinstance(default[2], int) or default[2] < 0:
default[2] = 0
elif default[2] > 255:
default[2] = 255
if not above_pos_col is None:
if not isinstance(above_pos_col, list) or len(above_pos_col) != 3:
raise ValueError('Invalid above_pos_col parameter')
if (not isinstance(above_pos_col[0], int) or
not isinstance(above_pos_col[1], int) or
not isinstance(above_pos_col[2], int) or
above_pos_col[0] < 0 or above_pos_col[0] > 255 or
above_pos_col[1] < 0 or above_pos_col[1] > 255 or
above_pos_col[2] < 0 or above_pos_col[2] > 255):
raise ValueError('Invalid above_pos_col parameter')
if not below_neg_col is None:
if not isinstance(below_neg_col, list) or len(below_neg_col) != 3:
raise ValueError('Invalid below_neg_col parameter')
if (not isinstance(below_neg_col[0], int) or
not isinstance(below_neg_col[1], int) or
not isinstance(below_neg_col[2], int) or
below_neg_col[0] < 0 or below_neg_col[0] > 255 or
below_neg_col[1] < 0 or below_neg_col[1] > 255 or
below_neg_col[2] < 0 or below_neg_col[2] > 255):
raise ValueError('Invalid below_neg_col parameter')
zero = numpy.zeros(1, dtype=values.dtype)
if trans_fac != 1.0:
values = trans_fac * values
else:
values = values.copy()
if not neg_lut is None and trans_off > 0:
vs = numpy.sign(values)
values = vs * numpy.maximum(zero, numpy.abs(values) - trans_off)
elif trans_off != 0:
values = values - trans_off
if above_pos_col is None:
values *= float(num_cols - 1)
else:
values *= float(num_cols)
ispos = (values > 0.0)
if not neg_lut is None:
isneg = (values < 0.0)
values = numpy.trunc(values).astype(numpy.int32)
colors = numpy.zeros((num_vals, 3), dtype=numpy.uint8, order='C')
colors[:,0] = default[0]
colors[:,1] = default[1]
colors[:,2] = default[2]
if above_pos_col is None:
values[values >= num_cols] = num_cols - 1
colors[ispos, 0] = pos_lut[values[ispos], 0]
colors[ispos, 1] = pos_lut[values[ispos], 1]
colors[ispos, 2] = pos_lut[values[ispos], 2]
else:
above = (values >= num_cols)
below = ispos and (not above)
colors[below, 0] = pos_lut[values[below], 0]
colors[below, 1] = pos_lut[values[below], 1]
colors[below, 2] = pos_lut[values[below], 2]
colors[above, 0] = above_pos_col[0]
colors[above, 1] = above_pos_col[1]
colors[above, 2] = above_pos_col[2]
if neg_lut is not None:
values = -values
if below_neg_col is None:
values[values >= num_cols] = num_cols - 1
colors[isneg, 0] = neg_lut[values[isneg], 0]
colors[isneg, 1] = neg_lut[values[isneg], 1]
colors[isneg, 2] = neg_lut[values[isneg], 2]
else:
above = (values >= num_cols)
below = isneg and (not above)
colors[below, 0] = pos_lut[values[below], 0]
colors[below, 1] = pos_lut[values[below], 1]
colors[below, 2] = pos_lut[values[below], 2]
colors[above, 0] = below_neg_col[0]
colors[above, 1] = below_neg_col[1]
colors[above, 2] = below_neg_col[2]
return colors
# radial sampling (TODO!)
# read image
def read_image(image_file:str) -> numpy.ndarray:
# IMPORT DONE HERE TO SAVE TIME AT MODULE INIT
import imageio
try:
return imageio.imread(image_file)
except:
raise
# rgb -> hue, saturation, lightness, value
def rgb2hslv(r:numpy.ndarray, g:numpy.ndarray, b:numpy.ndarray):
"""
Convert RGB to HSLV values
Parameters
----------
r, g, b : ndarray
Arrays with red, green, blue channel values (any dims, must match!)
Returns
-------
(h, sl, l, sv, v) : tuple
Hue, saturation, lightness, and value arrays
"""
if isinstance(r, list):
r = numpy.asarray(r)
if isinstance(g, list):
g = numpy.asarray(g)
if isinstance(b, list):
b = numpy.asarray(b)
if r.shape != g.shape or r.shape != b.shape:
raise ValueError('Invalid shape/dims.')
if r.dtype != g.dtype or r.dtype != b.dtype:
raise ValueError('Invalid datatype combination.')
rm = numpy.logical_and(r >= g, r >= b)
gm = numpy.logical_and(g > r, g >= b)
bm = | numpy.logical_and(b > r, b > g) | numpy.logical_and |
# -*- coding: utf-8 -*-
import logging
import numpy as np
from scipy.spatial.distance import cdist, pdist, squareform
# TODO: make this robust to having b0s
def swap_sampling_eddy(points, shell_idx, verbose=1):
"""
Optimize the bvecs of fixed multi-shell scheme for eddy
currents correction (fsl EDDY).
Bruteforce approach to maximally spread the bvec,
shell per shell.
For each shell:
For each vector:
1) find the closest neighbor,
2) flips it,
3) if global system energy is better, keep it flipped
repeat until convergence.
Parameters
----------
points: numpy.array, bvecs normalized to 1.
shell_idx: numpy.array, Shell index for bvecs in points.
verbose: 0 = silent, 1 = summary upon completion, 2 = print iterations.
Return
------
points: numpy.array, bvecs normalized to 1.
shell_idx: numpy.array, Shell index for bvecs in points.
"""
new_points = points.copy()
Ks = compute_ks_from_shell_idx(shell_idx)
maxIter = 100
for shell in range(len(Ks)):
# Extract points from shell
shell_pts = points[shell_idx == shell].copy()
logging.debug('Shell = {}'.format(shell))
# System energy matrix
# TODO: test other energy functions such as electron repulsion
dist = squareform(pdist(shell_pts, 'Euclidean')) + 2 * np.eye(shell_pts.shape[0])
it = 0
converged = False
while (it < maxIter) and not converged:
converged = True
# For each bvec on the shell
for pts_idx in range(len(shell_pts)):
# Find closest neighbor w.r.t. metric of dist
toMove = np.argmin(dist[pts_idx])
# Compute new column of system matrix with flipped toMove point
new_col = cdist(shell_pts, -shell_pts[None, toMove]).squeeze()
old_pts_ener = dist[toMove].sum()
new_pts_ener = new_col.sum()
if new_pts_ener > old_pts_ener:
# Swap sign of point toMove
shell_pts[toMove] *= -1
dist[:, toMove] = new_col
dist[toMove, :] = new_col
converged = False
logging.debug('Swapped {} ({:.2f} --> \
{:.2f})'.format(toMove,
old_pts_ener,
new_pts_ener))
it += 1
new_points[shell_idx == shell] = shell_pts
logging.info('Eddy current swap optimization finished.')
return new_points, shell_idx
def compute_ks_from_shell_idx(shell_idx):
"""
Recover number of points per shell from point-wise shell index.
Parameters
----------
shell_idx: numpy.array
Shell index of sampling scheme.
Return
------
Ks: list
number of samples for each shell, starting from lowest.
"""
K = len(set(shell_idx))
Ks = []
for idx in range(K):
Ks.append(np.sum(shell_idx == idx))
return Ks
def add_b0s(points, shell_idx, b0_every=10, finish_b0=False, verbose=1):
"""
Add interleaved b0s to sampling scheme.
Parameters
----------
points: numpy.array, bvecs normalized to 1.
shell_idx: numpy.array, Shell index for bvecs in points.
b0_every: integer, final scheme will have a b0 every b0_every samples
finish_b0: boolean, Option to add a b0 as last sample.
verbose: 0 = silent, 1 = summary upon completion, 2 = print iterations.
Return
------
points: numpy.array
bvecs normalized to 1.
shell_idx: numpy.array
Shell index for bvecs in points.
"""
new_points = []
new_shell_idx = []
for idx in range(shell_idx.shape[0]):
if not idx % (b0_every - 1):
# insert b0
new_points.append(np.array([0.0, 0.0, 0.0]))
new_shell_idx.append(-1)
new_points.append(points[idx])
new_shell_idx.append(shell_idx[idx])
if finish_b0 and (new_shell_idx[-1] != -1):
# insert b0
new_points.append(np.array([0.0, 0.0, 0.0]))
new_shell_idx.append(-1)
logging.info('Interleaved {} b0s'.format(len(new_shell_idx) -
shell_idx.shape[0]))
return np.array(new_points), np.array(new_shell_idx)
def correct_b0s_philips(points, shell_idx, verbose=1):
"""
Replace the [0.0, 0.0, 0.0] value of b0s bvecs
by existing bvecs in the sampling scheme.
This is useful because Recon 1.0 of Philips allocates memory
proportional to (total nb. of diff. bvals) x (total nb. diff. bvecs)
and we can't leave multiple b0s with b-vector [0.0, 0.0, 0.0] and b-value 0
because (b-vector, b-value) pairs have to be unique.
Parameters
----------
points: numpy.array
bvecs normalized to 1
shell_idx: numpy.array
Shell index for bvecs in points.
verbose: 0 = silent, 1 = summary upon completion, 2 = print iterations.
Return
------
points: numpy.array
bvecs normalized to 1
shell_idx: numpy.array
Shell index for bvecs in points
"""
new_points = points.copy()
non_b0_pts = points[np.where(shell_idx != -1)]
# Assume non-collinearity of non-b0s bvecs (i.e. Caruyer sampler type)
new_points[np.where(shell_idx == -1)[0]] = non_b0_pts
logging.info('Done adapting b0s for Philips scanner.')
return new_points, shell_idx
def compute_min_duty_cycle_bruteforce(points, shell_idx, bvals, ker_size=10,
Niter=100000, verbose=1, plotting=False,
rand_seed=0):
"""
Optimize the ordering of non-b0s sample to optimize gradient duty-cycle.
Philips scanner (and other) will find the peak power requirements with its
duty cycle model (this is an approximation) and increase the TR accordingly
to the hardware needs. This minimize this effects by:
1) Randomly permuting the non-b0s samples
2) Finding the peak X, Y, and Z amplitude with a sliding-window
3) Compute peak power needed as max(peak_x, peak_y, peak_z)
4) Keeps the permutation yielding the lowest peak power
Parameters
----------
points: numpy.array
bvecs normalized to 1
shell_idx: numpy.array
Shell index for bvecs in points.
bvals: list
increasing bvals, b0 last.
ker_size: int
kernel size for the sliding window.
Niter: int
number of bruteforce iterations.
verbose: 0 = silent, 1 = summary upon completion, 2 = print iterations.
plotting: bool
plot the energy at each iteration.
rand_seed: int
seed for the random permutations.
Return
------
points: numpy.array
bvecs normalized to 1.
shell_idx: numpy.array
Shell index for bvecs in points.
"""
logging.debug('Shuffling Data (N_iter = {}, \
ker_size = {})'.format(Niter, ker_size))
if plotting:
store_best_value = []
non_b0s_mask = shell_idx != -1
N_dir = non_b0s_mask.sum()
q_scheme = np.abs(points * np.sqrt(np.array([bvals[idx] for idx in shell_idx]))[:, None])
q_scheme_current = q_scheme.copy()
ordering_best = np.arange(N_dir)
power_best = compute_peak_power(q_scheme_current, ker_size=ker_size)
if plotting:
store_best_value.append((0, power_best))
np.random.seed(rand_seed)
for it in range(Niter):
if not it % np.ceil(Niter/10.):
logging.debug('Iter {} / {} : {}'.format(it, Niter, power_best))
ordering_current = np.random.permutation(N_dir)
q_scheme_current[non_b0s_mask] = q_scheme[non_b0s_mask][ordering_current]
power_current = compute_peak_power(q_scheme_current, ker_size=ker_size)
if power_current < power_best:
ordering_best = ordering_current.copy()
power_best = power_current
if plotting:
store_best_value.append((it+1, power_best))
logging.debug('Iter {} / {} : {}'.format(Niter, Niter, power_best))
logging.info('Duty cycle optimization finished.')
if plotting:
store_best_value = np.array(store_best_value)
import pylab as pl
pl.plot(store_best_value[:, 0], store_best_value[:, 1], '-o')
pl.show()
new_points = points.copy()
new_points[non_b0s_mask] = points[non_b0s_mask][ordering_best]
new_shell_idx = shell_idx.copy()
new_shell_idx[non_b0s_mask] = shell_idx[non_b0s_mask][ordering_best]
return new_points, new_shell_idx
def compute_peak_power(q_scheme, ker_size=10):
"""
Parameters
------
q_scheme: nd.array
Scheme of acquisition.
ker_size: int
Kernel size (default=10).
Return
------
Max peak power from q_scheme.
"""
# Note: np.convolve inverses the filter
ker = np.ones(ker_size)
pow_x = np.convolve(q_scheme[:, 0], ker, 'full')[:-(ker_size-1)]
pow_y = np.convolve(q_scheme[:, 1], ker, 'full')[:-(ker_size-1)]
pow_z = np.convolve(q_scheme[:, 2], ker, 'full')[:-(ker_size-1)]
max_pow_x = np.max(pow_x)
max_pow_y = np.max(pow_y)
max_pow_z = np.max(pow_z)
return np.max([max_pow_x, max_pow_y, max_pow_z])
def compute_bvalue_lin_q(bmin=0.0, bmax=3000.0, nb_of_b_inside=2,
exclude_bmin=True, verbose=1):
"""
Compute bvals linearly distributed in q-value in the
interval [bmin, bmax].
Parameters
----------
bmin: float
Minimum b-value, lower b-value bounds.
bmax: float
Maximum b-value, upper b-value bounds.
nb_of_b_inside: int
number of b-value excluding bmin and bmax.
exclude_bmin: bool
exclude bmin from the interval, useful if bmin = 0.0.
verbose: 0 = silent, 1 = summary upon completion, 2 = print iterations.
Return
------
bvals: list
increasing bvals.
"""
bvals = list(np.linspace(np.sqrt(bmin),
np.sqrt(bmax),
nb_of_b_inside + 2)**2)
if exclude_bmin:
bvals = bvals[1:]
logging.info('bvals linear in q: {}'.format(bvals))
return bvals
def compute_bvalue_lin_b(bmin=0.0, bmax=3000.0, nb_of_b_inside=2,
exclude_bmin=True, verbose=1):
"""
Compute bvals linearly distributed in b-value in the
interval [bmin, bmax].
Parameters
----------
bmin: float
Minimum b-value, lower b-value bounds.
bmax: float
Maximum b-value, upper b-value bounds.
nb_of_b_inside: int
number of b-value excluding bmin and bmax.
exclude_bmin: boolean
exclude bmin from the interval, useful if bmin = 0.0.
verbose: 0 = silent, 1 = summary upon completion, 2 = print iterations.
Return
------
bvals: list
increasing bvals.
"""
bvals = list( | np.linspace(bmin, bmax, nb_of_b_inside + 2) | numpy.linspace |
import warnings
import numpy as np
import matplotlib.pyplot as plt
from matplotlib.lines import Line2D
from automon.automon.automon_coordinator import AdcdHelper
from automon import AutomonNode
from test_utils.functions_to_monitor import func_entropy, func_variance, func_inner_product, func_rozenbrock
from test_utils.node_stream import NodeStreamFrequency, NodeStreamFirstAndSecondMomentum, NodeStreamAverage
def entropy_automon_draw_constraints(node):
def prep_domain_grid():
X_domain = np.arange(node.domain[0][0], node.domain[0][1], 0.02)
Y_domain = | np.arange(node.domain[0][0], node.domain[0][1], 0.02) | numpy.arange |
import torch
import torch.nn as nn
import numpy as np
import math
class ForwardKinematics:
def __init__(self, args, edges):
self.topology = [-1] * (len(edges) + 1)
self.rotation_map = []
for i, edge in enumerate(edges):
self.topology[edge[1]] = edge[0]
self.rotation_map.append(edge[1])
self.world = args.fk_world
self.pos_repr = args.pos_repr
self.quater = args.rotation == 'quaternion'
def forward_from_raw(self, raw, offset, world=None, quater=None):
if world is None: world = self.world
if quater is None: quater = self.quater
if self.pos_repr == '3d':
position = raw[:, -3:, :]
rotation = raw[:, :-3, :]
elif self.pos_repr == '4d':
raise Exception('Not support')
if quater:
rotation = rotation.reshape((rotation.shape[0], -1, 4, rotation.shape[-1]))
identity = torch.tensor((1, 0, 0, 0), dtype=torch.float, device=raw.device)
else:
rotation = rotation.reshape((rotation.shape[0], -1, 3, rotation.shape[-1]))
identity = torch.zeros((3, ), dtype=torch.float, device=raw.device)
identity = identity.reshape((1, 1, -1, 1))
new_shape = list(rotation.shape)
new_shape[1] += 1
new_shape[2] = 1
rotation_final = identity.repeat(new_shape)
for i, j in enumerate(self.rotation_map):
rotation_final[:, j, :, :] = rotation[:, i, :, :]
return self.forward(rotation_final, position, offset, world=world, quater=quater)
'''
rotation should have shape batch_size * Joint_num * (3/4) * Time
position should have shape batch_size * 3 * Time
offset should have shape batch_size * Joint_num * 3
output have shape batch_size * Time * Joint_num * 3
'''
def forward(self, rotation: torch.Tensor, position: torch.Tensor, offset: torch.Tensor, order='xyz', quater=False, world=True):
if not quater and rotation.shape[-2] != 3: raise Exception('Unexpected shape of rotation')
if quater and rotation.shape[-2] != 4: raise Exception('Unexpected shape of rotation')
rotation = rotation.permute(0, 3, 1, 2)
position = position.permute(0, 2, 1)
result = torch.empty(rotation.shape[:-1] + (3, ), device=position.device)
norm = torch.norm(rotation, dim=-1, keepdim=True)
#norm[norm < 1e-10] = 1
rotation = rotation / norm
if quater:
transform = self.transform_from_quaternion(rotation)
else:
transform = self.transform_from_euler(rotation, order)
offset = offset.reshape((-1, 1, offset.shape[-2], offset.shape[-1], 1))
result[..., 0, :] = position
for i, pi in enumerate(self.topology):
if pi == -1:
assert i == 0
continue
transform[..., i, :, :] = torch.matmul(transform[..., pi, :, :], transform[..., i, :, :])
result[..., i, :] = torch.matmul(transform[..., i, :, :], offset[..., i, :, :]).squeeze()
if world: result[..., i, :] += result[..., pi, :]
return result
def from_local_to_world(self, res: torch.Tensor):
res = res.clone()
for i, pi in enumerate(self.topology):
if pi == 0 or pi == -1:
continue
res[..., i, :] += res[..., pi, :]
return res
@staticmethod
def transform_from_euler(rotation, order):
rotation = rotation / 180 * math.pi
transform = torch.matmul(ForwardKinematics.transform_from_axis(rotation[..., 1], order[1]),
ForwardKinematics.transform_from_axis(rotation[..., 2], order[2]))
transform = torch.matmul(ForwardKinematics.transform_from_axis(rotation[..., 0], order[0]), transform)
return transform
@staticmethod
def transform_from_axis(euler, axis):
transform = torch.empty(euler.shape[0:3] + (3, 3), device=euler.device)
cos = torch.cos(euler)
sin = torch.sin(euler)
cord = ord(axis) - ord('x')
transform[..., cord, :] = transform[..., :, cord] = 0
transform[..., cord, cord] = 1
if axis == 'x':
transform[..., 1, 1] = transform[..., 2, 2] = cos
transform[..., 1, 2] = -sin
transform[..., 2, 1] = sin
if axis == 'y':
transform[..., 0, 0] = transform[..., 2, 2] = cos
transform[..., 0, 2] = sin
transform[..., 2, 0] = -sin
if axis == 'z':
transform[..., 0, 0] = transform[..., 1, 1] = cos
transform[..., 0, 1] = -sin
transform[..., 1, 0] = sin
return transform
@staticmethod
def transform_from_quaternion(quater: torch.Tensor):
qw = quater[..., 0]
qx = quater[..., 1]
qy = quater[..., 2]
qz = quater[..., 3]
x2 = qx + qx
y2 = qy + qy
z2 = qz + qz
xx = qx * x2
yy = qy * y2
wx = qw * x2
xy = qx * y2
yz = qy * z2
wy = qw * y2
xz = qx * z2
zz = qz * z2
wz = qw * z2
m = torch.empty(quater.shape[:-1] + (3, 3), device=quater.device)
m[..., 0, 0] = 1.0 - (yy + zz)
m[..., 0, 1] = xy - wz
m[..., 0, 2] = xz + wy
m[..., 1, 0] = xy + wz
m[..., 1, 1] = 1.0 - (xx + zz)
m[..., 1, 2] = yz - wx
m[..., 2, 0] = xz - wy
m[..., 2, 1] = yz + wx
m[..., 2, 2] = 1.0 - (xx + yy)
return m
class InverseKinematics:
def __init__(self, rotations: torch.Tensor, positions: torch.Tensor, offset, parents, constrains):
self.rotations = rotations
self.rotations.requires_grad_(True)
self.position = positions
self.position.requires_grad_(True)
self.parents = parents
self.offset = offset
self.constrains = constrains
self.optimizer = torch.optim.Adam([self.position, self.rotations], lr=1e-3, betas=(0.9, 0.999))
self.crit = nn.MSELoss()
def step(self):
self.optimizer.zero_grad()
glb = self.forward(self.rotations, self.position, self.offset, order='', quater=True, world=True)
loss = self.crit(glb, self.constrains)
loss.backward()
self.optimizer.step()
self.glb = glb
return loss.item()
def tloss(self, time):
return self.crit(self.glb[time, :], self.constrains[time, :])
def all_loss(self):
res = [self.tloss(t).detach().numpy() for t in range(self.constrains.shape[0])]
return | np.array(res) | numpy.array |
import numpy as np
import matplotlib.pyplot as plt
class ValueLogger(object):
def __init__(self, show_plot=True):
self.plotter = {}
if show_plot:
ValuePlotter.show_plot()
def add_plot(self, name, xlabel, filter_size=None):
self.plotter[name] = ValuePlotter(xlabel=xlabel, ylabel=name, filter_size=filter_size)
def __call__(self, **kwargs):
for key, value in kwargs.items():
if key in self.plotter.keys():
self.plotter[key].add(value)
self.plotter[key].plot()
class ValuePlotter(object):
plots_count = 0
def __init__(self, xlabel, ylabel, filter_size=None):
self.xlabel = xlabel
self.ylabel = ylabel
self.filter_size = filter_size
ValuePlotter.plots_count += 1
self.index = self.plots_count
self.values = []
@classmethod
def show_plot(cls):
plt.ion()
plt.show()
def add(self, value):
self.values.append(value)
def plot(self):
plt.subplot(self.plots_count, 1, self.index)
plt.cla()
plt.xlabel(self.xlabel)
plt.ylabel(self.ylabel)
plt.plot(self.values)
if self.filter_size is not None:
plt.plot(self._filter_values(self.values))
plt.pause(0.001)
def _filter_values(self, values):
padded_values = np.concatenate(
[
np.full(self.filter_size // 2, values[0]),
np.array(values),
| np.full(self.filter_size // 2 - 1, values[-1]) | numpy.full |
import sys
import numpy as np
import matplotlib.patches as patches
import matplotlib.pyplot as plt
from scipy.interpolate import griddata
'''
usage: python wmap1.py <pmesh> <loc>
<pmesh> is the filename of pmesh file
<loc> is either 'bot' or 'top'
'''
def kart2frac(kart, latmat):
"""
convert cart coords into frac
:param kart: a list of cart coords
:param latmat: [va, vb, vc] in cart
:return: np array [a, b, c]: frac coords
"""
p = np.matmul(np.array(kart), np.linalg.inv(np.array(latmat)))
return p
def xyzarray2frac(x, y, z, latmat):
"""
convert frac into cart
:param x:
:param y:
:param z:
:param latmat: [va, vb, vc] in cart
:return: nx3 mat
"""
length = min([len(x), len(y), len(z)])
abc = np.empty((length, 3))
abc[:] = np.nan
for i in range(length):
kart = kart2frac([x[i], y[i], z[i]], latmat)
abc[i][0] = kart[0]
abc[i][1] = kart[1]
abc[i][2] = kart[2]
return abc
def parse_cif(cif_name='iso.cif'):
"""
parse lattice vectors from 'iso.cif'
:return: la 1x3 np array
lb 1x3 np array
lc 1x3 np array
theta_c_rad angle between c axis and z axis
"""
with open(cif_name) as f_iso:
content = f_iso.readlines()
u = np.zeros(6)
for e in [line.strip().split() for line in content if len(line.strip().split()) == 2]:
if 'cell_length_a' in e[0]:
u[0] = float(e[1])
elif 'cell_length_b' in e[0]:
u[1] = float(e[1])
elif 'cell_length_c' in e[0]:
u[2] = float(e[1])
elif 'cell_angle_alpha' in e[0]:
u[3] = float(e[1])
elif 'cell_angle_beta' in e[0]:
u[4] = float(e[1])
elif 'cell_angle_gamma' in e[0]:
u[5] = float(e[1])
a, b, c, alpha, beta, gamma = u
cosdelta_up = np.cos(np.radians(alpha)) - np.cos(np.radians(beta))*np.cos( | np.radians(gamma) | numpy.radians |
from pytest_check import check
import numpy as np
import fenics
import fenics_adjoint as fa
import ufl
import theano
from fenics_pymc3 import create_fenics_theano_op
from fenics_pymc3 import FenicsVJPOp
from fecr import evaluate_primal, evaluate_pullback
theano.config.optimizer = "fast_compile"
theano.config.compute_test_value = "ignore"
mesh = fa.UnitSquareMesh(3, 2)
V = fenics.FunctionSpace(mesh, "P", 1)
def assemble_fenics(u, kappa0, kappa1):
f = fa.Expression(
"10*exp(-(pow(x[0] - 0.5, 2) + pow(x[1] - 0.5, 2)) / 0.02)", degree=2
)
inner, grad, dx = ufl.inner, ufl.grad, ufl.dx
J_form = 0.5 * inner(kappa0 * grad(u), grad(u)) * dx - kappa1 * f * u * dx
J = fa.assemble(J_form)
return J
templates = (fa.Function(V), fa.Constant(0.0), fa.Constant(0.0))
inputs = (np.ones(V.dim()), | np.ones(1) | numpy.ones |
"""
Test Surrogates Overview
========================
"""
# Author: <NAME> <<EMAIL>>
# License: new BSD
from PIL import Image
import numpy as np
import scripts.surrogates_overview as exo
import scripts.image_classifier as imgclf
import sklearn.datasets
import sklearn.linear_model
SAMPLES = 10
BATCH = 50
SAMPLE_IRIS = False
IRIS_SAMPLES = 50000
def test_bilmey_image():
"""Tests surrogate image bLIMEy."""
# Load the image
doggo_img = Image.open('surrogates_overview/img/doggo.jpg')
doggo_array = np.array(doggo_img)
# Load the classifier
clf = imgclf.ImageClassifier()
explain_classes = [('tennis ball', 852),
('golden retriever', 207),
('Labrador retriever', 208)]
# Configure widgets to select occlusion colour, segmentation granularity
# and explained class
colour_selection = {
i: i for i in ['mean', 'black', 'white', 'randomise-patch', 'green']
}
granularity_selection = {'low': 13, 'medium': 30, 'high': 50}
# Generate explanations
blimey_image_collection = {}
for gran_name, gran_number in granularity_selection.items():
blimey_image_collection[gran_name] = {}
for col_name in colour_selection:
blimey_image_collection[gran_name][col_name] = \
exo.build_image_blimey(
doggo_array,
clf.predict_proba,
explain_classes,
explanation_size=5,
segments_number=gran_number,
occlusion_colour=col_name,
samples_number=SAMPLES,
batch_size=BATCH,
random_seed=42)
exp = []
for gran_ in blimey_image_collection:
for col_ in blimey_image_collection[gran_]:
exp.append(blimey_image_collection[gran_][col_]['surrogates'])
assert len(exp) == len(EXP_IMG)
for e, E in zip(exp, EXP_IMG):
assert sorted(list(e.keys())) == sorted(list(E.keys()))
for key in e.keys():
assert e[key]['name'] == E[key]['name']
assert len(e[key]['explanation']) == len(E[key]['explanation'])
for e_, E_ in zip(e[key]['explanation'], E[key]['explanation']):
assert e_[0] == E_[0]
assert np.allclose(e_[1], E_[1], atol=.001, equal_nan=True)
def test_bilmey_tabular():
"""Tests surrogate tabular bLIMEy."""
# Load the iris data set
iris = sklearn.datasets.load_iris()
iris_X = iris.data # [:, :2] # take the first two features only
iris_y = iris.target
iris_labels = iris.target_names
iris_feature_names = iris.feature_names
label2class = {lab: i for i, lab in enumerate(iris_labels)}
# Fit the classifier
logreg = sklearn.linear_model.LogisticRegression(C=1e5)
logreg.fit(iris_X, iris_y)
# explained class
_dtype = iris_X.dtype
explained_instances = {
'setosa': np.array([5, 3.5, 1.5, 0.25]).astype(_dtype),
'versicolor': np.array([5.5, 2.75, 4.5, 1.25]).astype(_dtype),
'virginica': np.array([7, 3, 5.5, 2.25]).astype(_dtype)
}
petal_length_idx = iris_feature_names.index('petal length (cm)')
petal_length_bins = [1, 2, 3, 4, 5, 6, 7]
petal_width_idx = iris_feature_names.index('petal width (cm)')
petal_width_bins = [0, .5, 1, 1.5, 2, 2.5]
discs_ = []
for i, ix in enumerate(petal_length_bins): # X-axis
for iix in petal_length_bins[i + 1:]:
for j, jy in enumerate(petal_width_bins): # Y-axis
for jjy in petal_width_bins[j + 1:]:
discs_.append({
petal_length_idx: [ix, iix],
petal_width_idx: [jy, jjy]
})
for inst_i in explained_instances:
for cls_i in iris_labels:
for disc_i, disc in enumerate(discs_):
inst = explained_instances[inst_i]
cls = label2class[cls_i]
exp = exo.build_tabular_blimey(
inst, cls, iris_X, iris_y, logreg.predict_proba, disc,
IRIS_SAMPLES, SAMPLE_IRIS, 42)
key = '{}&{}&{}'.format(inst_i, cls, disc_i)
exp_ = EXP_TAB[key]
assert exp['explanation'].shape[0] == exp_.shape[0]
assert np.allclose(
exp['explanation'], exp_, atol=.001, equal_nan=True)
EXP_IMG = [
{207: {'explanation': [(13, -0.24406872165780585),
(11, -0.20456180387430317),
(9, -0.1866779131424261),
(4, 0.15001224157793785),
(3, 0.11589480417160983)],
'name': 'golden retriever'},
208: {'explanation': [(13, -0.08395966359346249),
(0, -0.0644986107387837),
(9, 0.05845584633658977),
(1, 0.04369763085720947),
(11, -0.035958188394941866)],
'name': '<NAME>'},
852: {'explanation': [(13, 0.3463529698715463),
(11, 0.2678050131923326),
(4, -0.10639863421417416),
(6, 0.08345792378117327),
(9, 0.07366945242386444)],
'name': '<NAME>'}},
{207: {'explanation': [(13, -0.0624167912596456),
(7, 0.06083359545295548),
(3, 0.0495953943686462),
(11, -0.04819787147412231),
(2, -0.03858823761391199)],
'name': '<NAME>'},
208: {'explanation': [(13, -0.08408428146916162),
(7, 0.07704235920590158),
(3, 0.06646468388122273),
(11, -0.0638326572126609),
(2, -0.052621478002380796)],
'name': '<NAME>'},
852: {'explanation': [(11, 0.35248212611685886),
(13, 0.2516925608037859),
(2, 0.13682853028454384),
(9, 0.12930134856644754),
(6, 0.1257747954095489)],
'name': '<NAME>'}},
{207: {'explanation': [(3, 0.21351937934930917),
(10, 0.16933456312772083),
(11, -0.13447244552856766),
(8, 0.11058919217055371),
(2, -0.06269239798368743)],
'name': '<NAME>'},
208: {'explanation': [(8, 0.05995551486884414),
(9, -0.05375302972380482),
(11, -0.051997353324246445),
(6, 0.04213181405953071),
(2, -0.039169895361928275)],
'name': '<NAME>'},
852: {'explanation': [(7, 0.31382219776986503),
(11, 0.24126214884275987),
(13, 0.21075924370226598),
(2, 0.11937652039885377),
(8, -0.11911265319329697)],
'name': '<NAME>'}},
{207: {'explanation': [(3, 0.39254403293049134),
(9, 0.19357165018747347),
(6, 0.16592079671652987),
(0, 0.14042059731407297),
(1, 0.09793027079765507)],
'name': '<NAME>'},
208: {'explanation': [(9, -0.19351859273276703),
(1, -0.15262967987262344),
(3, 0.12205127112235375),
(2, 0.11352141032313934),
(6, -0.11164209893429898)],
'name': '<NAME>'},
852: {'explanation': [(7, 0.17213007100844877),
(0, -0.1583030948868859),
(3, -0.13748574615069775),
(5, 0.13273283867075436),
(11, 0.12309551170070354)],
'name': '<NAME>'}},
{207: {'explanation': [(3, 0.4073533182995105),
(10, 0.20711667988142463),
(8, 0.15360813290032324),
(6, 0.1405424759832785),
(1, 0.1332920685413575)],
'name': '<NAME>'},
208: {'explanation': [(9, -0.14747910525112617),
(1, -0.13977061235228924),
(2, 0.10526833898161611),
(6, -0.10416022118399552),
(3, 0.09555992655161764)],
'name': '<NAME>'},
852: {'explanation': [(11, 0.2232260929107954),
(7, 0.21638443149433054),
(5, 0.21100464215582274),
(13, 0.145614853795006),
(1, -0.11416523431311262)],
'name': '<NAME>'}},
{207: {'explanation': [(1, 0.14700178977744183),
(0, 0.10346667279328238),
(2, 0.10346667279328238),
(7, 0.10346667279328238),
(8, 0.10162900633690726)],
'name': '<NAME>'},
208: {'explanation': [(10, -0.10845134816658476),
(8, -0.1026920429226184),
(6, -0.10238154733842847),
(18, 0.10094164937411244),
(16, 0.08646888450232793)],
'name': '<NAME>'},
852: {'explanation': [(18, -0.20542297091894474),
(13, 0.2012751176130666),
(8, -0.19194747162742365),
(20, 0.14686930696710473),
(15, 0.11796990086271067)],
'name': '<NAME>'}},
{207: {'explanation': [(13, 0.12446259821701779),
(17, 0.11859084421095789),
(15, 0.09690553833007137),
(12, -0.08869743701731962),
(4, 0.08124900427893789)],
'name': '<NAME>'},
208: {'explanation': [(10, -0.09478194981909983),
(20, -0.09173392507039077),
(9, 0.08768898801254493),
(17, -0.07553994244536394),
(4, 0.07422905503397653)],
'name': '<NAME>'},
852: {'explanation': [(21, 0.1327882942965061),
(1, 0.1238236573086363),
(18, -0.10911712271717902),
(19, 0.09707191051320978),
(6, 0.08593672504338913)],
'name': '<NAME>'}},
{207: {'explanation': [(6, 0.14931728779865114),
(14, 0.14092073957103526),
(1, 0.11071480021464616),
(4, 0.10655287976934531),
(8, 0.08705404649152573)],
'name': '<NAME>'},
208: {'explanation': [(8, -0.12242580400886727),
(9, 0.12142729544158742),
(14, -0.1148252787068248),
(16, -0.09562322208795092),
(4, 0.09350160975513132)],
'name': '<NAME>'},
852: {'explanation': [(6, 0.04227675072263027),
(9, -0.03107924340879173),
(14, 0.028007115650713045),
(13, 0.02771190348545554),
(19, 0.02640441416071482)],
'name': '<NAME>'}},
{207: {'explanation': [(19, 0.14313680656283245),
(18, 0.12866508562342843),
(8, 0.11809779264185447),
(0, 0.11286255403442104),
(2, 0.11286255403442104)],
'name': '<NAME>'},
208: {'explanation': [(9, 0.2397917428082761),
(14, -0.19435572812170654),
(6, -0.1760894833446507),
(18, -0.12243333818399058),
(15, 0.10986343675377105)],
'name': '<NAME>'},
852: {'explanation': [(14, 0.15378038774613365),
(9, -0.14245940635481966),
(6, 0.10213601012183973),
(20, 0.1009180838986786),
(3, 0.09780065767815548)],
'name': '<NAME>'}},
{207: {'explanation': [(15, 0.06525850448807077),
(9, 0.06286791243851698),
(19, 0.055189970374185854),
(8, 0.05499197604401475),
(13, 0.04748220842936177)],
'name': '<NAME>'},
208: {'explanation': [(6, -0.31549091899770765),
(5, 0.1862302670824446),
(8, -0.17381478451341995),
(10, -0.17353516098662508),
(14, -0.13591542421754205)],
'name': '<NAME>'},
852: {'explanation': [(14, 0.2163853942943355),
(6, 0.17565046338282214),
(1, 0.12446193028474549),
(9, -0.11365789839746396),
(10, 0.09239073691962967)],
'name': '<NAME>'}},
{207: {'explanation': [(19, 0.1141207265647932),
(36, -0.08861425922625768),
(30, 0.07219209872026074),
(9, -0.07150939547859836),
(38, -0.06988288637544438)],
'name': '<NAME>'},
208: {'explanation': [(29, 0.10531073909547647),
(13, 0.08279642208039652),
(34, -0.0817952443980797),
(33, -0.08086848205765082),
(12, 0.08086848205765082)],
'name': '<NAME>'},
852: {'explanation': [(13, -0.1330452414595897),
(4, 0.09942366413042845),
(12, -0.09881995683190645),
(33, 0.09881995683190645),
(19, -0.09596925317560831)],
'name': '<NAME>'}},
{207: {'explanation': [(37, 0.08193926967758253),
(35, 0.06804043021426347),
(15, 0.06396269230810163),
(11, 0.062255657227065296),
(8, 0.05529200233091672)],
'name': '<NAME>'},
208: {'explanation': [(19, 0.05711957286614678),
(27, -0.050230108135410824),
(16, -0.04743034616549999),
(5, -0.046717346734255705),
(9, -0.04419100026638039)],
'name': '<NAME>'},
852: {'explanation': [(3, -0.08390967998497496),
(30, -0.07037680222442452),
(22, 0.07029819368543713),
(8, -0.06861396187180349),
(37, -0.06662511956402824)],
'name': '<NAME>'}},
{207: {'explanation': [(19, 0.048418845359024805),
(9, -0.0423869575883795),
(30, 0.04012650790044438),
(36, -0.03787242980067195),
(10, 0.036557999380695635)],
'name': '<NAME>'},
208: {'explanation': [(10, 0.12120686823129677),
(17, 0.10196564232230493),
(7, 0.09495133975425854),
(25, -0.0759657891182803),
(2, -0.07035244568286837)],
'name': '<NAME>'},
852: {'explanation': [(3, -0.0770578003457272),
(28, 0.0769372258280398),
(6, -0.06044725989272927),
(22, 0.05550155775286349),
(31, -0.05399028046597057)],
'name': '<NAME>'}},
{207: {'explanation': [(14, 0.05371383110181226),
(0, -0.04442539316084218),
(18, 0.042589475382826494),
(19, 0.04227647855354252),
(17, 0.041685661662754295)],
'name': '<NAME>'},
208: {'explanation': [(29, 0.14419601354489464),
(17, 0.11785174500536676),
(36, 0.1000501679652906),
(10, 0.09679790134851017),
(35, 0.08710376081189208)],
'name': '<NAME>'},
852: {'explanation': [(8, -0.02486237985832769),
(3, -0.022559886154747102),
(11, -0.021878686669239856),
(36, 0.021847953817988534),
(19, -0.018317598300716522)],
'name': '<NAME>'}},
{207: {'explanation': [(37, 0.08098729255605368),
(35, 0.06639102704982619),
(15, 0.06033721190370432),
(34, 0.05826267856117829),
(28, 0.05549505160798173)],
'name': '<NAME>'},
208: {'explanation': [(17, 0.13839012042250542),
(10, 0.11312187488346881),
(7, 0.10729071207480922),
(25, -0.09529127965797404),
(11, -0.09279834572979286)],
'name': '<NAME>'},
852: {'explanation': [(3, -0.028385651836694076),
(22, 0.023364702783498722),
(8, -0.023097812578270233),
(30, -0.022931236620034406),
(37, -0.022040170736525342)],
'name': '<NAME>'}}
]
EXP_TAB = {
'setosa&0&0': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&1': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&2': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&3': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&4': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&5': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&6': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&7': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&8': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&9': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&10': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&11': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&12': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&13': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&14': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&15': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&16': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&17': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&18': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&19': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&20': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&21': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&22': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&23': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&24': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&25': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&26': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&27': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&28': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&29': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&30': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&31': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&32': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&33': np.array([0.7936433456054741, 0.01258375207649658]),
'setosa&0&34': np.array([0.7974072911132786, 0.006894018772033576]),
'setosa&0&35': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&36': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&37': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&38': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&39': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&40': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&41': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&42': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&43': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&44': np.array([0.7936433456054741, 0.01258375207649658]),
'setosa&0&45': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&46': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&47': np.array([0.06151571389390039, 0.524561199322281]),
'setosa&0&48': np.array([0.4329463382004908, 0.057167210150691136]),
'setosa&0&49': np.array([0.4656481363306145, 0.007982539480288167]),
'setosa&0&50': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&51': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&52': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&53': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&54': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&55': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&56': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&57': np.array([0.06151571389390039, 0.524561199322281]),
'setosa&0&58': np.array([0.06151571389390039, 0.524561199322281]),
'setosa&0&59': np.array([0.4329463382004908, 0.057167210150691136]),
'setosa&0&60': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&61': np.array([0.00988785935411159, 0.9698143912008228]),
'setosa&0&62': np.array([0.009595083643662688, 0.5643652067423869]),
'setosa&0&63': np.array([0.13694026920485936, 0.36331091829858003]),
'setosa&0&64': np.array([0.3094460464703627, 0.11400643817329122]),
'setosa&0&65': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&66': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&67': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&68': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&69': np.array([0.00988785935411159, 0.9698143912008228]),
'setosa&0&70': np.array([0.00988785935411159, 0.9698143912008228]),
'setosa&0&71': np.array([0.00988785935411159, 0.9698143912008228]),
'setosa&0&72': np.array([0.009595083643662688, 0.5643652067423869]),
'setosa&0&73': np.array([0.009595083643662688, 0.5643652067423869]),
'setosa&0&74': np.array([0.13694026920485936, 0.36331091829858003]),
'setosa&0&75': np.array([0.0, 0.95124502153736]),
'setosa&0&76': np.array([0.0, 0.9708703761803881]),
'setosa&0&77': np.array([0.0, 0.5659706098422994]),
'setosa&0&78': np.array([0.0, 0.3962828716108186]),
'setosa&0&79': np.array([0.0, 0.2538069363248767]),
'setosa&0&80': np.array([0.0, 0.95124502153736]),
'setosa&0&81': np.array([0.0, 0.95124502153736]),
'setosa&0&82': np.array([0.0, 0.95124502153736]),
'setosa&0&83': np.array([0.0, 0.95124502153736]),
'setosa&0&84': np.array([0.0, 0.9708703761803881]),
'setosa&0&85': np.array([0.0, 0.9708703761803881]),
'setosa&0&86': np.array([0.0, 0.9708703761803881]),
'setosa&0&87': np.array([0.0, 0.5659706098422994]),
'setosa&0&88': np.array([0.0, 0.5659706098422994]),
'setosa&0&89': np.array([0.0, 0.3962828716108186]),
'setosa&0&90': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&91': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&92': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&93': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&94': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&95': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&96': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&97': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&98': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&99': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&100': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&101': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&102': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&103': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&104': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&105': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&106': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&107': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&108': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&109': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&110': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&111': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&112': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&113': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&114': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&115': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&116': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&117': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&118': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&119': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&120': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&121': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&122': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&123': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&124': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&125': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&126': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&127': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&128': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&129': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&130': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&131': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&132': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&133': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&134': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&135': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&136': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&137': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&138': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&139': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&140': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&141': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&142': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&143': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&144': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&145': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&146': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&147': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&148': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&149': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&150': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&151': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&152': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&153': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&154': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&155': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&156': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&157': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&158': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&159': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&160': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&161': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&162': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&163': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&164': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&165': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&166': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&167': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&168': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&169': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&170': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&171': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&172': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&173': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&174': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&175': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&176': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&177': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&178': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&179': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&180': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&181': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&182': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&183': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&184': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&185': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&186': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&187': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&188': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&189': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&190': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&191': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&192': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&193': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&194': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&195': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&196': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&197': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&198': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&199': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&200': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&201': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&202': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&203': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&204': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&205': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&206': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&207': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&208': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&209': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&210': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&211': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&212': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&213': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&214': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&215': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&216': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&217': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&218': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&219': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&220': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&221': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&222': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&223': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&224': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&225': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&226': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&227': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&228': np.array([0.7936433456054741, 0.01258375207649658]),
'setosa&0&229': np.array([0.7974072911132786, 0.006894018772033576]),
'setosa&0&230': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&231': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&232': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&233': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&234': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&235': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&236': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&237': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&238': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&239': np.array([0.7936433456054741, 0.01258375207649658]),
'setosa&0&240': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&241': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&242': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&243': np.array([0.7936433456054741, 0.01258375207649658]),
'setosa&0&244': np.array([0.7974072911132786, 0.006894018772033576]),
'setosa&0&245': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&246': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&247': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&248': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&249': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&250': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&251': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&252': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&253': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&254': np.array([0.7936433456054741, 0.01258375207649658]),
'setosa&0&255': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&256': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&257': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&258': np.array([0.7936433456054741, 0.01258375207649658]),
'setosa&0&259': np.array([0.7974072911132786, 0.006894018772033576]),
'setosa&0&260': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&261': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&262': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&263': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&264': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&265': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&266': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&267': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&268': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&269': np.array([0.7936433456054741, 0.01258375207649658]),
'setosa&0&270': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&271': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&272': np.array([0.06151571389390039, 0.524561199322281]),
'setosa&0&273': np.array([0.4329463382004908, 0.057167210150691136]),
'setosa&0&274': np.array([0.4656481363306145, 0.007982539480288167]),
'setosa&0&275': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&276': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&277': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&278': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&279': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&280': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&281': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&282': np.array([0.06151571389390039, 0.524561199322281]),
'setosa&0&283': np.array([0.06151571389390039, 0.524561199322281]),
'setosa&0&284': np.array([0.4329463382004908, 0.057167210150691136]),
'setosa&0&285': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&286': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&287': np.array([0.06151571389390039, 0.524561199322281]),
'setosa&0&288': np.array([0.4329463382004908, 0.057167210150691136]),
'setosa&0&289': np.array([0.4656481363306145, 0.007982539480288167]),
'setosa&0&290': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&291': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&292': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&293': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&294': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&295': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&296': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&297': np.array([0.06151571389390039, 0.524561199322281]),
'setosa&0&298': np.array([0.06151571389390039, 0.524561199322281]),
'setosa&0&299': np.array([0.4329463382004908, 0.057167210150691136]),
'setosa&0&300': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&301': np.array([0.00988785935411159, 0.9698143912008228]),
'setosa&0&302': np.array([0.009595083643662688, 0.5643652067423869]),
'setosa&0&303': np.array([0.13694026920485936, 0.36331091829858003]),
'setosa&0&304': np.array([0.3094460464703627, 0.11400643817329122]),
'setosa&0&305': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&306': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&307': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&308': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&309': np.array([0.00988785935411159, 0.9698143912008228]),
'setosa&0&310': np.array([0.00988785935411159, 0.9698143912008228]),
'setosa&0&311': np.array([0.00988785935411159, 0.9698143912008228]),
'setosa&0&312': np.array([0.009595083643662688, 0.5643652067423869]),
'setosa&0&313': np.array([0.009595083643662688, 0.5643652067423869]),
'setosa&0&314': np.array([0.13694026920485936, 0.36331091829858003]),
'setosa&1&0': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&1': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&2': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&3': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&4': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&5': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&6': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&7': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&8': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&9': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&10': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&11': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&12': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&13': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&14': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&15': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&16': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&17': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&18': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&19': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&20': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&21': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&22': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&23': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&24': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&25': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&26': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&27': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&28': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&29': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&30': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&31': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&32': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&33': np.array([-0.4446001433508151, 0.6107546840046902]),
'setosa&1&34': np.array([-0.26192650167775977, 0.33491141590339474]),
'setosa&1&35': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&36': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&37': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&38': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&39': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&40': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&41': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&42': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&43': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&44': np.array([-0.4446001433508151, 0.6107546840046902]),
'setosa&1&45': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&46': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&47': np.array([0.5826506963750848, -0.22335655671229107]),
'setosa&1&48': np.array([0.33108168891715983, 0.13647816746351163]),
'setosa&1&49': np.array([0.4079256832347186, 0.038455640985860955]),
'setosa&1&50': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&51': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&52': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&53': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&54': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&55': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&56': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&57': np.array([0.5826506963750848, -0.22335655671229107]),
'setosa&1&58': np.array([0.5826506963750848, -0.22335655671229107]),
'setosa&1&59': np.array([0.33108168891715983, 0.13647816746351163]),
'setosa&1&60': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&61': np.array([0.5041830043657418, -0.5392782673950876]),
'setosa&1&62': np.array([0.25657760110071476, 0.12592645350389123]),
'setosa&1&63': np.array([0.13717260713320106, 0.3627779907901665]),
'setosa&1&64': np.array([0.3093950298647913, 0.1140298206733954]),
'setosa&1&65': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&66': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&67': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&68': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&69': np.array([0.5041830043657418, -0.5392782673950876]),
'setosa&1&70': np.array([0.5041830043657418, -0.5392782673950876]),
'setosa&1&71': np.array([0.5041830043657418, -0.5392782673950876]),
'setosa&1&72': np.array([0.25657760110071476, 0.12592645350389123]),
'setosa&1&73': np.array([0.25657760110071476, 0.12592645350389123]),
'setosa&1&74': np.array([0.13717260713320106, 0.3627779907901665]),
'setosa&1&75': np.array([0.0, -0.4756207622944677]),
'setosa&1&76': np.array([0.0, -0.4854334805210761]),
'setosa&1&77': np.array([0.0, 0.16885577975809635]),
'setosa&1&78': np.array([0.0, 0.395805885538554]),
'setosa&1&79': np.array([0.0, 0.2538072707138344]),
'setosa&1&80': np.array([0.0, -0.4756207622944677]),
'setosa&1&81': np.array([0.0, -0.4756207622944677]),
'setosa&1&82': np.array([0.0, -0.4756207622944677]),
'setosa&1&83': np.array([0.0, -0.4756207622944677]),
'setosa&1&84': np.array([0.0, -0.4854334805210761]),
'setosa&1&85': np.array([0.0, -0.4854334805210761]),
'setosa&1&86': np.array([0.0, -0.4854334805210761]),
'setosa&1&87': np.array([0.0, 0.16885577975809635]),
'setosa&1&88': np.array([0.0, 0.16885577975809635]),
'setosa&1&89': np.array([0.0, 0.395805885538554]),
'setosa&1&90': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&91': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&92': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&93': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&94': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&95': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&96': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&97': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&98': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&99': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&100': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&101': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&102': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&103': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&104': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&105': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&106': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&107': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&108': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&109': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&110': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&111': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&112': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&113': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&114': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&115': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&116': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&117': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&118': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&119': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&120': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&121': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&122': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&123': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&124': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&125': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&126': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&127': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&128': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&129': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&130': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&131': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&132': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&133': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&134': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&135': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&136': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&137': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&138': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&139': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&140': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&141': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&142': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&143': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&144': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&145': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&146': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&147': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&148': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&149': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&150': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&151': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&152': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&153': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&154': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&155': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&156': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&157': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&158': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&159': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&160': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&161': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&162': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&163': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&164': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&165': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&166': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&167': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&168': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&169': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&170': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&171': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&172': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&173': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&174': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&175': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&176': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&177': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&178': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&179': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&180': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&181': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&182': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&183': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&184': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&185': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&186': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&187': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&188': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&189': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&190': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&191': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&192': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&193': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&194': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&195': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&196': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&197': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&198': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&199': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&200': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&201': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&202': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&203': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&204': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&205': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&206': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&207': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&208': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&209': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&210': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&211': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&212': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&213': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&214': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&215': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&216': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&217': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&218': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&219': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&220': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&221': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&222': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&223': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&224': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&225': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&226': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&227': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&228': np.array([-0.4446001433508151, 0.6107546840046902]),
'setosa&1&229': np.array([-0.26192650167775977, 0.33491141590339474]),
'setosa&1&230': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&231': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&232': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&233': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&234': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&235': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&236': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&237': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&238': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&239': np.array([-0.4446001433508151, 0.6107546840046902]),
'setosa&1&240': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&241': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&242': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&243': np.array([-0.4446001433508151, 0.6107546840046902]),
'setosa&1&244': np.array([-0.26192650167775977, 0.33491141590339474]),
'setosa&1&245': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&246': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&247': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&248': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&249': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&250': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&251': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&252': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&253': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&254': np.array([-0.4446001433508151, 0.6107546840046902]),
'setosa&1&255': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&256': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&257': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&258': np.array([-0.4446001433508151, 0.6107546840046902]),
'setosa&1&259': np.array([-0.26192650167775977, 0.33491141590339474]),
'setosa&1&260': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&261': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&262': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&263': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&264': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&265': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&266': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&267': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&268': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&269': np.array([-0.4446001433508151, 0.6107546840046902]),
'setosa&1&270': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&271': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&272': np.array([0.5826506963750848, -0.22335655671229107]),
'setosa&1&273': np.array([0.33108168891715983, 0.13647816746351163]),
'setosa&1&274': np.array([0.4079256832347186, 0.038455640985860955]),
'setosa&1&275': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&276': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&277': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&278': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&279': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&280': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&281': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&282': np.array([0.5826506963750848, -0.22335655671229107]),
'setosa&1&283': np.array([0.5826506963750848, -0.22335655671229107]),
'setosa&1&284': np.array([0.33108168891715983, 0.13647816746351163]),
'setosa&1&285': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&286': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&287': np.array([0.5826506963750848, -0.22335655671229107]),
'setosa&1&288': np.array([0.33108168891715983, 0.13647816746351163]),
'setosa&1&289': np.array([0.4079256832347186, 0.038455640985860955]),
'setosa&1&290': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&291': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&292': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&293': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&294': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&295': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&296': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&297': np.array([0.5826506963750848, -0.22335655671229107]),
'setosa&1&298': np.array([0.5826506963750848, -0.22335655671229107]),
'setosa&1&299': np.array([0.33108168891715983, 0.13647816746351163]),
'setosa&1&300': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&301': np.array([0.5041830043657418, -0.5392782673950876]),
'setosa&1&302': np.array([0.25657760110071476, 0.12592645350389123]),
'setosa&1&303': np.array([0.13717260713320106, 0.3627779907901665]),
'setosa&1&304': np.array([0.3093950298647913, 0.1140298206733954]),
'setosa&1&305': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&306': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&307': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&308': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&309': np.array([0.5041830043657418, -0.5392782673950876]),
'setosa&1&310': np.array([0.5041830043657418, -0.5392782673950876]),
'setosa&1&311': np.array([0.5041830043657418, -0.5392782673950876]),
'setosa&1&312': np.array([0.25657760110071476, 0.12592645350389123]),
'setosa&1&313': np.array([0.25657760110071476, 0.12592645350389123]),
'setosa&1&314': np.array([0.13717260713320106, 0.3627779907901665]),
'setosa&2&0': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&1': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&2': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&3': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&4': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&5': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&6': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&7': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&8': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&9': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&10': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&11': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&12': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&13': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&14': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&15': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&16': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&17': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&18': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&19': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&20': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&21': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&22': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&23': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&24': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&25': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&26': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&27': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&28': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&29': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&30': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&31': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&32': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&33': np.array([-0.34904320225465857, -0.6233384360811872]),
'setosa&2&34': np.array([-0.5354807894355184, -0.3418054346754283]),
'setosa&2&35': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&36': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&37': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&38': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&39': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&40': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&41': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&42': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&43': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&44': np.array([-0.34904320225465857, -0.6233384360811872]),
'setosa&2&45': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&46': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&47': np.array([-0.6441664102689847, -0.3012046426099901]),
'setosa&2&48': np.array([-0.7640280271176497, -0.19364537761420375]),
'setosa&2&49': np.array([-0.8735738195653328, -0.046438180466149094]),
'setosa&2&50': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&51': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&52': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&53': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&54': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&55': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&56': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&57': np.array([-0.6441664102689847, -0.3012046426099901]),
'setosa&2&58': np.array([-0.6441664102689847, -0.3012046426099901]),
'setosa&2&59': np.array([-0.7640280271176497, -0.19364537761420375]),
'setosa&2&60': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&61': np.array([-0.5140708637198534, -0.43053612380573514]),
'setosa&2&62': np.array([-0.2661726847443776, -0.6902916602462779]),
'setosa&2&63': np.array([-0.2741128763380603, -0.7260889090887469]),
'setosa&2&64': np.array([-0.6188410763351541, -0.22803625884668638]),
'setosa&2&65': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&66': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&67': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&68': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&69': np.array([-0.5140708637198534, -0.43053612380573514]),
'setosa&2&70': np.array([-0.5140708637198534, -0.43053612380573514]),
'setosa&2&71': np.array([-0.5140708637198534, -0.43053612380573514]),
'setosa&2&72': np.array([-0.2661726847443776, -0.6902916602462779]),
'setosa&2&73': np.array([-0.2661726847443776, -0.6902916602462779]),
'setosa&2&74': np.array([-0.2741128763380603, -0.7260889090887469]),
'setosa&2&75': np.array([0.0, -0.47562425924289314]),
'setosa&2&76': np.array([0.0, -0.48543689565931186]),
'setosa&2&77': np.array([0.0, -0.7348263896003956]),
'setosa&2&78': np.array([0.0, -0.7920887571493729]),
'setosa&2&79': np.array([0.0, -0.507614207038711]),
'setosa&2&80': np.array([0.0, -0.47562425924289314]),
'setosa&2&81': np.array([0.0, -0.47562425924289314]),
'setosa&2&82': np.array([0.0, -0.47562425924289314]),
'setosa&2&83': np.array([0.0, -0.47562425924289314]),
'setosa&2&84': np.array([0.0, -0.48543689565931186]),
'setosa&2&85': np.array([0.0, -0.48543689565931186]),
'setosa&2&86': np.array([0.0, -0.48543689565931186]),
'setosa&2&87': np.array([0.0, -0.7348263896003956]),
'setosa&2&88': np.array([0.0, -0.7348263896003956]),
'setosa&2&89': np.array([0.0, -0.7920887571493729]),
'setosa&2&90': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&91': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&92': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&93': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&94': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&95': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&96': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&97': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&98': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&99': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&100': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&101': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&102': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&103': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&104': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&105': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&106': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&107': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&108': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&109': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&110': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&111': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&112': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&113': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&114': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&115': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&116': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&117': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&118': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&119': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&120': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&121': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&122': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&123': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&124': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&125': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&126': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&127': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&128': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&129': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&130': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&131': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&132': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&133': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&134': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&135': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&136': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&137': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&138': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&139': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&140': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&141': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&142': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&143': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&144': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&145': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&146': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&147': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&148': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&149': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&150': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&151': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&152': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&153': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&154': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&155': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&156': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&157': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&158': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&159': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&160': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&161': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&162': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&163': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&164': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&165': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&166': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&167': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&168': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&169': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&170': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&171': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&172': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&173': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&174': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&175': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&176': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&177': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&178': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&179': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&180': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&181': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&182': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&183': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&184': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&185': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&186': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&187': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&188': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&189': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&190': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&191': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&192': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&193': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&194': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&195': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&196': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&197': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&198': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&199': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&200': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&201': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&202': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&203': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&204': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&205': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&206': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&207': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&208': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&209': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&210': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&211': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&212': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&213': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&214': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&215': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&216': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&217': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&218': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&219': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&220': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&221': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&222': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&223': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&224': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&225': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&226': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&227': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&228': np.array([-0.34904320225465857, -0.6233384360811872]),
'setosa&2&229': np.array([-0.5354807894355184, -0.3418054346754283]),
'setosa&2&230': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&231': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&232': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&233': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&234': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&235': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&236': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&237': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&238': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&239': np.array([-0.34904320225465857, -0.6233384360811872]),
'setosa&2&240': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&241': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&242': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&243': np.array([-0.34904320225465857, -0.6233384360811872]),
'setosa&2&244': np.array([-0.5354807894355184, -0.3418054346754283]),
'setosa&2&245': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&246': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&247': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&248': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&249': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&250': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&251': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&252': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&253': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&254': np.array([-0.34904320225465857, -0.6233384360811872]),
'setosa&2&255': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&256': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&257': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&258': np.array([-0.34904320225465857, -0.6233384360811872]),
'setosa&2&259': np.array([-0.5354807894355184, -0.3418054346754283]),
'setosa&2&260': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&261': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&262': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&263': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&264': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&265': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&266': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&267': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&268': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&269': np.array([-0.34904320225465857, -0.6233384360811872]),
'setosa&2&270': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&271': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&272': np.array([-0.6441664102689847, -0.3012046426099901]),
'setosa&2&273': np.array([-0.7640280271176497, -0.19364537761420375]),
'setosa&2&274': np.array([-0.8735738195653328, -0.046438180466149094]),
'setosa&2&275': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&276': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&277': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&278': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&279': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&280': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&281': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&282': np.array([-0.6441664102689847, -0.3012046426099901]),
'setosa&2&283': np.array([-0.6441664102689847, -0.3012046426099901]),
'setosa&2&284': np.array([-0.7640280271176497, -0.19364537761420375]),
'setosa&2&285': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&286': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&287': np.array([-0.6441664102689847, -0.3012046426099901]),
'setosa&2&288': np.array([-0.7640280271176497, -0.19364537761420375]),
'setosa&2&289': np.array([-0.8735738195653328, -0.046438180466149094]),
'setosa&2&290': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&291': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&292': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&293': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&294': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&295': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&296': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&297': np.array([-0.6441664102689847, -0.3012046426099901]),
'setosa&2&298': np.array([-0.6441664102689847, -0.3012046426099901]),
'setosa&2&299': np.array([-0.7640280271176497, -0.19364537761420375]),
'setosa&2&300': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&301': np.array([-0.5140708637198534, -0.43053612380573514]),
'setosa&2&302': np.array([-0.2661726847443776, -0.6902916602462779]),
'setosa&2&303': np.array([-0.2741128763380603, -0.7260889090887469]),
'setosa&2&304': np.array([-0.6188410763351541, -0.22803625884668638]),
'setosa&2&305': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&306': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&307': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&308': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&309': np.array([-0.5140708637198534, -0.43053612380573514]),
'setosa&2&310': np.array([-0.5140708637198534, -0.43053612380573514]),
'setosa&2&311': np.array([-0.5140708637198534, -0.43053612380573514]),
'setosa&2&312': np.array([-0.2661726847443776, -0.6902916602462779]),
'setosa&2&313': np.array([-0.2661726847443776, -0.6902916602462779]),
'setosa&2&314': np.array([-0.2741128763380603, -0.7260889090887469]),
'versicolor&0&0': np.array([-0.7431524521056113, -0.24432235603856345]),
'versicolor&0&1': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&2': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&3': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&4': np.array([-0.9706534384443797, 0.007448195602953232]),
'versicolor&0&5': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&6': np.array([-0.967167257194905, -0.011919414234523772]),
'versicolor&0&7': np.array([-0.953200964337313, -0.027163424176667752]),
'versicolor&0&8': np.array([-0.8486399726113752, -0.13537345771621853]),
'versicolor&0&9': np.array([-0.9658161779555727, -0.01446062269877741]),
'versicolor&0&10': np.array([-0.9493506964095418, -0.0312186903717912]),
'versicolor&0&11': np.array([-0.7870031444780577, -0.1952404625292782]),
'versicolor&0&12': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&13': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&14': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&15': np.array([-0.7431524521056113, -0.24432235603856345]),
'versicolor&0&16': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&17': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&18': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&19': np.array([-0.9706534384443797, 0.007448195602953232]),
'versicolor&0&20': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&21': np.array([-0.967167257194905, -0.011919414234523772]),
'versicolor&0&22': np.array([-0.953200964337313, -0.027163424176667752]),
'versicolor&0&23': np.array([-0.8486399726113752, -0.13537345771621853]),
'versicolor&0&24': np.array([-0.9658161779555727, -0.01446062269877741]),
'versicolor&0&25': np.array([-0.9493506964095418, -0.0312186903717912]),
'versicolor&0&26': np.array([-0.7870031444780577, -0.1952404625292782]),
'versicolor&0&27': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&28': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&29': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&30': np.array([-0.19685199412911655, -0.7845879230594393]),
'versicolor&0&31': np.array([-0.07476043598366228, -0.9062715528546994]),
'versicolor&0&32': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&33': np.array([-0.7936433456054744, 0.012583752076496493]),
'versicolor&0&34': np.array([-0.7974072911132788, 0.006894018772033604]),
'versicolor&0&35': np.array([-0.07476043598366228, -0.9062715528546994]),
'versicolor&0&36': np.array([-0.7779663027946229, -0.2981599980028888]),
'versicolor&0&37': np.array([-0.6669876551417979, -0.2911996622134135]),
'versicolor&0&38': np.array([-0.3355030348883163, -0.6305271339971502]),
'versicolor&0&39': np.array([-0.7658431164447598, -0.3248317507526541]),
'versicolor&0&40': np.array([-0.6459073168288453, -0.31573292128613833]),
'versicolor&0&41': np.array([-0.2519677855687844, -0.7134447168661863]),
'versicolor&0&42': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&43': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&44': np.array([-0.7936433456054744, 0.012583752076496493]),
'versicolor&0&45': np.array([0.05031696218434577, -0.929227611211748]),
'versicolor&0&46': np.array([0.017148644765919676, -0.9632117581295891]),
'versicolor&0&47': np.array([0.06151571389390039, 0.524561199322281]),
'versicolor&0&48': np.array([0.4329463382004908, 0.057167210150691136]),
'versicolor&0&49': np.array([0.4656481363306145, 0.007982539480288167]),
'versicolor&0&50': np.array([0.017148644765919676, -0.9632117581295891]),
'versicolor&0&51': np.array([0.6614632074748169, -0.6030419328583525]),
'versicolor&0&52': np.array([0.5519595359123358, -0.6434192906054143]),
'versicolor&0&53': np.array([0.14241819268815753, -0.8424615476000691]),
'versicolor&0&54': np.array([0.667423576348749, -0.6594086777766442]),
'versicolor&0&55': np.array([0.5429872243487625, -0.6697888833280774]),
'versicolor&0&56': np.array([0.1140907502997574, -0.8737800276630269]),
'versicolor&0&57': np.array([0.06151571389390039, 0.524561199322281]),
'versicolor&0&58': np.array([0.06151571389390039, 0.524561199322281]),
'versicolor&0&59': np.array([0.4329463382004908, 0.057167210150691136]),
'versicolor&0&60': np.array([0.029402442458921384, -0.9481684282717414]),
'versicolor&0&61': np.array([0.009887859354111524, -0.9698143912008228]),
'versicolor&0&62': np.array([0.009595083643662688, 0.5643652067423869]),
'versicolor&0&63': np.array([0.13694026920485936, 0.36331091829858003]),
'versicolor&0&64': np.array([0.3094460464703627, 0.11400643817329122]),
'versicolor&0&65': np.array([0.009887859354111524, -0.9698143912008228]),
'versicolor&0&66': np.array([0.42809266524335826, -0.40375108595117376]),
'versicolor&0&67': np.array([0.45547700380103057, -0.6083463409799501]),
'versicolor&0&68': np.array([0.19002455311770447, -0.8848597943731074]),
'versicolor&0&69': np.array([0.436966114193701, -0.4638042290788281]),
'versicolor&0&70': np.array([0.45424510803217066, -0.6425314361631614]),
'versicolor&0&71': np.array([0.1746467870122951, -0.9073062742839755]),
'versicolor&0&72': np.array([0.009595083643662688, 0.5643652067423869]),
'versicolor&0&73': np.array([0.009595083643662688, 0.5643652067423869]),
'versicolor&0&74': np.array([0.13694026920485936, 0.36331091829858003]),
'versicolor&0&75': np.array([0.0, -0.95124502153736]),
'versicolor&0&76': np.array([0.0, -0.9708703761803881]),
'versicolor&0&77': np.array([0.0, 0.5659706098422994]),
'versicolor&0&78': np.array([0.0, 0.3962828716108186]),
'versicolor&0&79': np.array([0.0, 0.2538069363248767]),
'versicolor&0&80': np.array([0.0, -0.9708703761803881]),
'versicolor&0&81': np.array([0.0, -0.3631376646911367]),
'versicolor&0&82': np.array([0.0, -0.5804857652839247]),
'versicolor&0&83': np.array([0.0, -0.8943993997517804]),
'versicolor&0&84': np.array([0.0, -0.4231275527222919]),
'versicolor&0&85': np.array([0.0, -0.6164235822373675]),
'versicolor&0&86': np.array([0.0, -0.9166476163222441]),
'versicolor&0&87': np.array([0.0, 0.5659706098422994]),
'versicolor&0&88': np.array([0.0, 0.5659706098422994]),
'versicolor&0&89': np.array([0.0, 0.3962828716108186]),
'versicolor&0&90': np.array([-0.7431524521056113, -0.24432235603856345]),
'versicolor&0&91': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&92': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&93': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&94': np.array([-0.9706534384443797, 0.007448195602953232]),
'versicolor&0&95': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&96': np.array([-0.967167257194905, -0.011919414234523772]),
'versicolor&0&97': np.array([-0.953200964337313, -0.027163424176667752]),
'versicolor&0&98': np.array([-0.8486399726113752, -0.13537345771621853]),
'versicolor&0&99': np.array([-0.9658161779555727, -0.01446062269877741]),
'versicolor&0&100': np.array([-0.9493506964095418, -0.0312186903717912]),
'versicolor&0&101': np.array([-0.7870031444780577, -0.1952404625292782]),
'versicolor&0&102': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&103': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&104': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&105': np.array([-0.19685199412911655, -0.7845879230594393]),
'versicolor&0&106': np.array([-0.07476043598366228, -0.9062715528546994]),
'versicolor&0&107': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&108': np.array([-0.7936433456054744, 0.012583752076496493]),
'versicolor&0&109': np.array([-0.7974072911132788, 0.006894018772033604]),
'versicolor&0&110': np.array([-0.07476043598366228, -0.9062715528546994]),
'versicolor&0&111': np.array([-0.7779663027946229, -0.2981599980028888]),
'versicolor&0&112': np.array([-0.6669876551417979, -0.2911996622134135]),
'versicolor&0&113': np.array([-0.3355030348883163, -0.6305271339971502]),
'versicolor&0&114': np.array([-0.7658431164447598, -0.3248317507526541]),
'versicolor&0&115': np.array([-0.6459073168288453, -0.31573292128613833]),
'versicolor&0&116': np.array([-0.2519677855687844, -0.7134447168661863]),
'versicolor&0&117': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&118': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&119': np.array([-0.7936433456054744, 0.012583752076496493]),
'versicolor&0&120': np.array([-0.05855179950109871, -0.9211684729232403]),
'versicolor&0&121': np.array([-0.020067537725011863, -0.960349531159508]),
'versicolor&0&122': np.array([-0.5775164514598086, 0.6278692602817483]),
'versicolor&0&123': np.array([-0.6813845327458135, 0.6599725404733693]),
'versicolor&0&124': np.array([-0.5182062652425321, 0.3958533237517639]),
'versicolor&0&125': np.array([-0.020067537725011863, -0.960349531159508]),
'versicolor&0&126': np.array([-0.5107107533700952, 0.0075507123577884866]),
'versicolor&0&127': np.array([-0.1464063320531759, -0.4788055402156298]),
'versicolor&0&128': np.array([-0.061109248092233844, -0.8620287767000373]),
'versicolor&0&129': np.array([-0.4706137753079746, -0.057389625790424635]),
'versicolor&0&130': np.array([-0.06804620923037683, -0.5677904519730453]),
'versicolor&0&131': np.array([-0.020216773196675246, -0.9057119888626176]),
'versicolor&0&132': np.array([-0.5775164514598086, 0.6278692602817483]),
'versicolor&0&133': np.array([-0.5775164514598086, 0.6278692602817483]),
'versicolor&0&134': np.array([-0.6813845327458135, 0.6599725404733693]),
'versicolor&0&135': np.array([-0.19684482070614498, -0.7845939961595055]),
'versicolor&0&136': np.array([-0.07475231751447156, -0.9062785678426409]),
'versicolor&0&137': np.array([-0.6782037543706109, 0.2956007367698983]),
'versicolor&0&138': np.array([-0.7694171988675237, 0.276633135028249]),
'versicolor&0&139': np.array([-0.8063011502229427, 0.4134300066735808]),
'versicolor&0&140': np.array([-0.07475231751447156, -0.9062785678426409]),
'versicolor&0&141': np.array([-0.7985789197998611, 0.0026209054759345337]),
'versicolor&0&142': np.array([-0.7182275903095532, -0.11963032135457498]),
'versicolor&0&143': np.array([-0.2798927835773098, -0.6581136857450849]),
'versicolor&0&144': np.array([-0.7920119433269182, -0.0142751249964083]),
'versicolor&0&145': np.array([-0.6943081428778407, -0.14852813120265815]),
'versicolor&0&146': np.array([-0.16106555563262584, -0.777621649099753]),
'versicolor&0&147': np.array([-0.6782037543706109, 0.2956007367698983]),
'versicolor&0&148': np.array([-0.6782037543706109, 0.2956007367698983]),
'versicolor&0&149': np.array([-0.7694171988675237, 0.276633135028249]),
'versicolor&0&150': np.array([-0.7431524521056113, -0.24432235603856345]),
'versicolor&0&151': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&152': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&153': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&154': np.array([-0.9706534384443797, 0.007448195602953232]),
'versicolor&0&155': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&156': np.array([-0.967167257194905, -0.011919414234523772]),
'versicolor&0&157': np.array([-0.953200964337313, -0.027163424176667752]),
'versicolor&0&158': np.array([-0.8486399726113752, -0.13537345771621853]),
'versicolor&0&159': np.array([-0.9658161779555727, -0.01446062269877741]),
'versicolor&0&160': np.array([-0.9493506964095418, -0.0312186903717912]),
'versicolor&0&161': np.array([-0.7870031444780577, -0.1952404625292782]),
'versicolor&0&162': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&163': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&164': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&165': np.array([-0.19685199412911655, -0.7845879230594393]),
'versicolor&0&166': np.array([-0.07476043598366228, -0.9062715528546994]),
'versicolor&0&167': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&168': np.array([-0.7936433456054744, 0.012583752076496493]),
'versicolor&0&169': np.array([-0.7974072911132788, 0.006894018772033604]),
'versicolor&0&170': np.array([-0.07476043598366228, -0.9062715528546994]),
'versicolor&0&171': np.array([-0.7779663027946229, -0.2981599980028888]),
'versicolor&0&172': np.array([-0.6669876551417979, -0.2911996622134135]),
'versicolor&0&173': np.array([-0.3355030348883163, -0.6305271339971502]),
'versicolor&0&174': np.array([-0.7658431164447598, -0.3248317507526541]),
'versicolor&0&175': np.array([-0.6459073168288453, -0.31573292128613833]),
'versicolor&0&176': np.array([-0.2519677855687844, -0.7134447168661863]),
'versicolor&0&177': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&178': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&179': np.array([-0.7936433456054744, 0.012583752076496493]),
'versicolor&0&180': np.array([-0.05855179950109871, -0.9211684729232403]),
'versicolor&0&181': np.array([-0.020067537725011863, -0.960349531159508]),
'versicolor&0&182': np.array([-0.5775164514598086, 0.6278692602817483]),
'versicolor&0&183': np.array([-0.6813845327458135, 0.6599725404733693]),
'versicolor&0&184': np.array([-0.5182062652425321, 0.3958533237517639]),
'versicolor&0&185': np.array([-0.020067537725011863, -0.960349531159508]),
'versicolor&0&186': np.array([-0.5107107533700952, 0.0075507123577884866]),
'versicolor&0&187': np.array([-0.1464063320531759, -0.4788055402156298]),
'versicolor&0&188': np.array([-0.061109248092233844, -0.8620287767000373]),
'versicolor&0&189': np.array([-0.4706137753079746, -0.057389625790424635]),
'versicolor&0&190': np.array([-0.06804620923037683, -0.5677904519730453]),
'versicolor&0&191': np.array([-0.020216773196675246, -0.9057119888626176]),
'versicolor&0&192': np.array([-0.5775164514598086, 0.6278692602817483]),
'versicolor&0&193': np.array([-0.5775164514598086, 0.6278692602817483]),
'versicolor&0&194': np.array([-0.6813845327458135, 0.6599725404733693]),
'versicolor&0&195': np.array([-0.19684482070614498, -0.7845939961595055]),
'versicolor&0&196': np.array([-0.07475231751447156, -0.9062785678426409]),
'versicolor&0&197': np.array([-0.6782037543706109, 0.2956007367698983]),
'versicolor&0&198': np.array([-0.7694171988675237, 0.276633135028249]),
'versicolor&0&199': np.array([-0.8063011502229427, 0.4134300066735808]),
'versicolor&0&200': np.array([-0.07475231751447156, -0.9062785678426409]),
'versicolor&0&201': np.array([-0.7985789197998611, 0.0026209054759345337]),
'versicolor&0&202': np.array([-0.7182275903095532, -0.11963032135457498]),
'versicolor&0&203': np.array([-0.2798927835773098, -0.6581136857450849]),
'versicolor&0&204': np.array([-0.7920119433269182, -0.0142751249964083]),
'versicolor&0&205': np.array([-0.6943081428778407, -0.14852813120265815]),
'versicolor&0&206': np.array([-0.16106555563262584, -0.777621649099753]),
'versicolor&0&207': np.array([-0.6782037543706109, 0.2956007367698983]),
'versicolor&0&208': np.array([-0.6782037543706109, 0.2956007367698983]),
'versicolor&0&209': np.array([-0.7694171988675237, 0.276633135028249]),
'versicolor&0&210': np.array([-0.7431524521056113, -0.24432235603856345]),
'versicolor&0&211': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&212': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&213': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&214': np.array([-0.9706534384443797, 0.007448195602953232]),
'versicolor&0&215': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&216': np.array([-0.967167257194905, -0.011919414234523772]),
'versicolor&0&217': np.array([-0.953200964337313, -0.027163424176667752]),
'versicolor&0&218': np.array([-0.8486399726113752, -0.13537345771621853]),
'versicolor&0&219': np.array([-0.9658161779555727, -0.01446062269877741]),
'versicolor&0&220': np.array([-0.9493506964095418, -0.0312186903717912]),
'versicolor&0&221': np.array([-0.7870031444780577, -0.1952404625292782]),
'versicolor&0&222': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&223': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&224': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&225': np.array([-0.04777085826693217, -0.931704979630315]),
'versicolor&0&226': np.array([-0.016252316132452975, -0.9640854286687816]),
'versicolor&0&227': np.array([-0.44101924439572626, 0.5583264842761904]),
'versicolor&0&228': np.array([-0.5844994389588399, 0.5715208832363579]),
'versicolor&0&229': np.array([-0.46216647196120714, 0.35468591243823655]),
'versicolor&0&230': np.array([-0.016252316132452975, -0.9640854286687816]),
'versicolor&0&231': np.array([-0.3707180757031537, -0.1977196581472426]),
'versicolor&0&232': np.array([-0.1043459833293615, -0.5233314327065356]),
'versicolor&0&233': np.array([-0.049289647556763364, -0.8736084405111605]),
'versicolor&0&234': np.array([-0.34078174031874375, -0.25874482325965437]),
'versicolor&0&235': np.array([-0.050841051273783675, -0.5877587283589205]),
'versicolor&0&236': np.array([-0.0161720977425142, -0.9096817855236822]),
'versicolor&0&237': np.array([-0.44101924439572626, 0.5583264842761904]),
'versicolor&0&238': np.array([-0.44101924439572626, 0.5583264842761904]),
'versicolor&0&239': np.array([-0.5844994389588399, 0.5715208832363579]),
'versicolor&0&240': np.array([-0.11329659732608087, -0.8671819100849522]),
'versicolor&0&241': np.array([-0.040390637135858574, -0.9402832917474078]),
'versicolor&0&242': np.array([-0.5276460255602035, 0.28992233541586077]),
'versicolor&0&243': np.array([-0.6392402874163683, 0.24114611970435948]),
'versicolor&0&244': np.array([-0.6814868825686854, 0.35066801608083215]),
'versicolor&0&245': np.array([-0.040390637135858574, -0.9402832917474078]),
'versicolor&0&246': np.array([-0.6425009695928476, -0.24851992476830956]),
'versicolor&0&247': np.array([-0.5151243662384031, -0.3255567772442641]),
'versicolor&0&248': np.array([-0.16157511199607094, -0.7754323813403634]),
'versicolor&0&249': np.array([-0.6300442788906601, -0.28361140069713875]),
'versicolor&0&250': np.array([-0.4875864856121089, -0.3614122096616301]),
'versicolor&0&251': np.array([-0.08968204532514226, -0.8491191210330045]),
'versicolor&0&252': np.array([-0.5276460255602035, 0.28992233541586077]),
'versicolor&0&253': np.array([-0.5276460255602035, 0.28992233541586077]),
'versicolor&0&254': np.array([-0.6392402874163683, 0.24114611970435948]),
'versicolor&0&255': np.array([-0.19685199412911655, -0.7845879230594393]),
'versicolor&0&256': np.array([-0.07476043598366228, -0.9062715528546994]),
'versicolor&0&257': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&258': np.array([-0.7936433456054744, 0.012583752076496493]),
'versicolor&0&259': np.array([-0.7974072911132788, 0.006894018772033604]),
'versicolor&0&260': np.array([-0.07476043598366228, -0.9062715528546994]),
'versicolor&0&261': np.array([-0.7779663027946229, -0.2981599980028888]),
'versicolor&0&262': np.array([-0.6669876551417979, -0.2911996622134135]),
'versicolor&0&263': np.array([-0.3355030348883163, -0.6305271339971502]),
'versicolor&0&264': np.array([-0.7658431164447598, -0.3248317507526541]),
'versicolor&0&265': np.array([-0.6459073168288453, -0.31573292128613833]),
'versicolor&0&266': np.array([-0.2519677855687844, -0.7134447168661863]),
'versicolor&0&267': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&268': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&269': np.array([-0.7936433456054744, 0.012583752076496493]),
'versicolor&0&270': np.array([0.05031696218434577, -0.929227611211748]),
'versicolor&0&271': np.array([0.017148644765919676, -0.9632117581295891]),
'versicolor&0&272': np.array([0.06151571389390039, 0.524561199322281]),
'versicolor&0&273': np.array([0.4329463382004908, 0.057167210150691136]),
'versicolor&0&274': np.array([0.4656481363306145, 0.007982539480288167]),
'versicolor&0&275': np.array([0.017148644765919676, -0.9632117581295891]),
'versicolor&0&276': np.array([0.6614632074748169, -0.6030419328583525]),
'versicolor&0&277': np.array([0.5519595359123358, -0.6434192906054143]),
'versicolor&0&278': np.array([0.14241819268815753, -0.8424615476000691]),
'versicolor&0&279': np.array([0.667423576348749, -0.6594086777766442]),
'versicolor&0&280': np.array([0.5429872243487625, -0.6697888833280774]),
'versicolor&0&281': np.array([0.1140907502997574, -0.8737800276630269]),
'versicolor&0&282': np.array([0.06151571389390039, 0.524561199322281]),
'versicolor&0&283': np.array([0.06151571389390039, 0.524561199322281]),
'versicolor&0&284': np.array([0.4329463382004908, 0.057167210150691136]),
'versicolor&0&285': np.array([0.05031696218434577, -0.929227611211748]),
'versicolor&0&286': np.array([0.017148644765919676, -0.9632117581295891]),
'versicolor&0&287': np.array([0.06151571389390039, 0.524561199322281]),
'versicolor&0&288': np.array([0.4329463382004908, 0.057167210150691136]),
'versicolor&0&289': np.array([0.4656481363306145, 0.007982539480288167]),
'versicolor&0&290': np.array([0.017148644765919676, -0.9632117581295891]),
'versicolor&0&291': np.array([0.6614632074748169, -0.6030419328583525]),
'versicolor&0&292': np.array([0.5519595359123358, -0.6434192906054143]),
'versicolor&0&293': np.array([0.14241819268815753, -0.8424615476000691]),
'versicolor&0&294': np.array([0.667423576348749, -0.6594086777766442]),
'versicolor&0&295': np.array([0.5429872243487625, -0.6697888833280774]),
'versicolor&0&296': np.array([0.1140907502997574, -0.8737800276630269]),
'versicolor&0&297': np.array([0.06151571389390039, 0.524561199322281]),
'versicolor&0&298': np.array([0.06151571389390039, 0.524561199322281]),
'versicolor&0&299': np.array([0.4329463382004908, 0.057167210150691136]),
'versicolor&0&300': np.array([0.029402442458921384, -0.9481684282717414]),
'versicolor&0&301': np.array([0.009887859354111524, -0.9698143912008228]),
'versicolor&0&302': np.array([0.009595083643662688, 0.5643652067423869]),
'versicolor&0&303': np.array([0.13694026920485936, 0.36331091829858003]),
'versicolor&0&304': np.array([0.3094460464703627, 0.11400643817329122]),
'versicolor&0&305': np.array([0.009887859354111524, -0.9698143912008228]),
'versicolor&0&306': np.array([0.42809266524335826, -0.40375108595117376]),
'versicolor&0&307': np.array([0.45547700380103057, -0.6083463409799501]),
'versicolor&0&308': np.array([0.19002455311770447, -0.8848597943731074]),
'versicolor&0&309': np.array([0.436966114193701, -0.4638042290788281]),
'versicolor&0&310': np.array([0.45424510803217066, -0.6425314361631614]),
'versicolor&0&311': np.array([0.1746467870122951, -0.9073062742839755]),
'versicolor&0&312': np.array([0.009595083643662688, 0.5643652067423869]),
'versicolor&0&313': np.array([0.009595083643662688, 0.5643652067423869]),
'versicolor&0&314': np.array([0.13694026920485936, 0.36331091829858003]),
'versicolor&1&0': np.array([0.37157553889555184, 0.1221600832023858]),
'versicolor&1&1': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&2': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&3': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&4': np.array([0.4964962439921071, 0.3798215458387346]),
'versicolor&1&5': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&6': np.array([0.2805345936193346, 0.6595182922149835]),
'versicolor&1&7': np.array([0.08302493125394889, 0.6186280682763334]),
'versicolor&1&8': np.array([0.22125635302655813, 0.2925832702358638]),
'versicolor&1&9': np.array([0.2365788606456636, 0.7120007179768731]),
'versicolor&1&10': np.array([0.022347126801293967, 0.6718013300441928]),
'versicolor&1&11': np.array([0.10063786451829529, 0.4085974066833644]),
'versicolor&1&12': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&13': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&14': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&15': np.array([0.37157553889555184, 0.1221600832023858]),
'versicolor&1&16': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&17': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&18': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&19': np.array([0.4964962439921071, 0.3798215458387346]),
'versicolor&1&20': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&21': np.array([0.2805345936193346, 0.6595182922149835]),
'versicolor&1&22': np.array([0.08302493125394889, 0.6186280682763334]),
'versicolor&1&23': np.array([0.22125635302655813, 0.2925832702358638]),
'versicolor&1&24': np.array([0.2365788606456636, 0.7120007179768731]),
'versicolor&1&25': np.array([0.022347126801293967, 0.6718013300441928]),
'versicolor&1&26': np.array([0.10063786451829529, 0.4085974066833644]),
'versicolor&1&27': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&28': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&29': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&30': np.array([-0.32199975656257646, 0.7482293552463756]),
'versicolor&1&31': np.array([-0.43843349141088417, 0.8642740701867917]),
'versicolor&1&32': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&33': np.array([0.44460014335081516, 0.6107546840046902]),
'versicolor&1&34': np.array([0.2619265016777598, 0.33491141590339474]),
'versicolor&1&35': np.array([-0.43843349141088417, 0.8642740701867917]),
'versicolor&1&36': np.array([0.20183015430619713, 0.7445346002055082]),
'versicolor&1&37': np.array([-0.05987874887638573, 0.6927937290176818]),
'versicolor&1&38': np.array([-0.2562642052727569, 0.6920266972283227]),
'versicolor&1&39': np.array([0.1736438124560164, 0.7898174616442941]),
'versicolor&1&40': np.array([-0.10114089899940126, 0.7326610366533243]),
'versicolor&1&41': np.array([-0.34479806250338163, 0.7789143553916729]),
'versicolor&1&42': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&43': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&44': np.array([0.44460014335081516, 0.6107546840046902]),
'versicolor&1&45': np.array([0.7749499208750119, 0.8147189440804429]),
'versicolor&1&46': np.array([0.8040309195416899, 0.8445152504134819]),
'versicolor&1&47': np.array([0.5826506963750848, -0.22335655671229107]),
'versicolor&1&48': np.array([0.33108168891715983, 0.13647816746351163]),
'versicolor&1&49': np.array([0.4079256832347186, 0.038455640985860955]),
'versicolor&1&50': np.array([0.8040309195416899, 0.8445152504134819]),
'versicolor&1&51': np.array([0.18555813792691386, 0.6940923833143309]),
'versicolor&1&52': np.array([0.32639262064172164, 0.6296083447134281]),
'versicolor&1&53': np.array([0.6964303997553315, 0.7444536452136676]),
'versicolor&1&54': np.array([0.18216358701833335, 0.747615101407194]),
'versicolor&1&55': np.array([0.33549445287370383, 0.6526039763053625]),
'versicolor&1&56': np.array([0.7213651642695392, 0.7718874443854203]),
'versicolor&1&57': np.array([0.5826506963750848, -0.22335655671229107]),
'versicolor&1&58': np.array([0.5826506963750848, -0.22335655671229107]),
'versicolor&1&59': np.array([0.33108168891715983, 0.13647816746351163]),
'versicolor&1&60': np.array([0.4933316375690332, 0.5272416708629276]),
'versicolor&1&61': np.array([0.5041830043657418, 0.5392782673950876]),
'versicolor&1&62': np.array([0.25657760110071476, 0.12592645350389123]),
'versicolor&1&63': np.array([0.13717260713320106, 0.3627779907901665]),
'versicolor&1&64': np.array([0.3093950298647913, 0.1140298206733954]),
'versicolor&1&65': np.array([0.5041830043657418, 0.5392782673950876]),
'versicolor&1&66': np.array([0.1413116283690917, 0.7479856297394165]),
'versicolor&1&67': np.array([0.189773257421942, 0.6552150653012478]),
'versicolor&1&68': np.array([0.40694846236352233, 0.5109051764198169]),
'versicolor&1&69': np.array([0.1390424906594644, 0.7991613016301518]),
'versicolor&1&70': np.array([0.1945777487290197, 0.6743932844312892]),
'versicolor&1&71': np.array([0.415695226122737, 0.5230815102377903]),
'versicolor&1&72': np.array([0.25657760110071476, 0.12592645350389123]),
'versicolor&1&73': np.array([0.25657760110071476, 0.12592645350389123]),
'versicolor&1&74': np.array([0.13717260713320106, 0.3627779907901665]),
'versicolor&1&75': np.array([0.0, 0.4756207622944677]),
'versicolor&1&76': np.array([0.0, 0.4854334805210761]),
'versicolor&1&77': np.array([0.0, 0.16885577975809635]),
'versicolor&1&78': np.array([0.0, 0.395805885538554]),
'versicolor&1&79': np.array([0.0, 0.2538072707138344]),
'versicolor&1&80': np.array([0.0, 0.4854334805210761]),
'versicolor&1&81': np.array([0.0, 0.7613919530844643]),
'versicolor&1&82': np.array([0.0, 0.6668230985485095]),
'versicolor&1&83': np.array([0.0, 0.4904755652105692]),
'versicolor&1&84': np.array([0.0, 0.8121046082359693]),
'versicolor&1&85': np.array([0.0, 0.6855766903749089]),
'versicolor&1&86': np.array([0.0, 0.5008471974438506]),
'versicolor&1&87': np.array([0.0, 0.16885577975809635]),
'versicolor&1&88': np.array([0.0, 0.16885577975809635]),
'versicolor&1&89': np.array([0.0, 0.395805885538554]),
'versicolor&1&90': np.array([0.37157553889555184, 0.1221600832023858]),
'versicolor&1&91': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&92': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&93': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&94': np.array([0.4964962439921071, 0.3798215458387346]),
'versicolor&1&95': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&96': np.array([0.2805345936193346, 0.6595182922149835]),
'versicolor&1&97': np.array([0.08302493125394889, 0.6186280682763334]),
'versicolor&1&98': np.array([0.22125635302655813, 0.2925832702358638]),
'versicolor&1&99': np.array([0.2365788606456636, 0.7120007179768731]),
'versicolor&1&100': np.array([0.022347126801293967, 0.6718013300441928]),
'versicolor&1&101': np.array([0.10063786451829529, 0.4085974066833644]),
'versicolor&1&102': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&103': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&104': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&105': np.array([-0.32199975656257646, 0.7482293552463756]),
'versicolor&1&106': np.array([-0.43843349141088417, 0.8642740701867917]),
'versicolor&1&107': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&108': np.array([0.44460014335081516, 0.6107546840046902]),
'versicolor&1&109': np.array([0.2619265016777598, 0.33491141590339474]),
'versicolor&1&110': np.array([-0.43843349141088417, 0.8642740701867917]),
'versicolor&1&111': np.array([0.20183015430619713, 0.7445346002055082]),
'versicolor&1&112': np.array([-0.05987874887638573, 0.6927937290176818]),
'versicolor&1&113': np.array([-0.2562642052727569, 0.6920266972283227]),
'versicolor&1&114': np.array([0.1736438124560164, 0.7898174616442941]),
'versicolor&1&115': np.array([-0.10114089899940126, 0.7326610366533243]),
'versicolor&1&116': np.array([-0.34479806250338163, 0.7789143553916729]),
'versicolor&1&117': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&118': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&119': np.array([0.44460014335081516, 0.6107546840046902]),
'versicolor&1&120': np.array([0.8224435822504677, 0.05315271528828394]),
'versicolor&1&121': np.array([0.820222886307464, 0.055413714884152906]),
'versicolor&1&122': np.array([0.8393089066702096, 0.0788980157959197]),
'versicolor&1&123': np.array([0.8282924295054531, 0.0752641855714259]),
'versicolor&1&124': np.array([0.8476206690613984, 0.02146454924522743]),
'versicolor&1&125': np.array([0.820222886307464, 0.055413714884152906]),
'versicolor&1&126': np.array([0.69362517791403, 0.2579390890424607]),
'versicolor&1&127': np.array([0.7261791877801502, 0.16248655642013624]),
'versicolor&1&128': np.array([0.8190416077589757, 0.05661509439536992]),
'versicolor&1&129': np.array([0.6654762076749751, 0.2949291633432878]),
'versicolor&1&130': np.array([0.7118161070185614, 0.17683644094125878]),
'versicolor&1&131': np.array([0.8165214253946836, 0.059175619390630096]),
'versicolor&1&132': np.array([0.8393089066702096, 0.0788980157959197]),
'versicolor&1&133': np.array([0.8393089066702096, 0.0788980157959197]),
'versicolor&1&134': np.array([0.8282924295054531, 0.0752641855714259]),
'versicolor&1&135': np.array([0.5188109114552927, 0.03638964581864269]),
'versicolor&1&136': np.array([0.5131478569192371, 0.04203387599862816]),
'versicolor&1&137': np.array([0.73294627367007, 0.4610490766898855]),
'versicolor&1&138': np.array([0.5965042032375719, 0.48856644624972617]),
'versicolor&1&139': np.array([0.5436097000280874, 0.1461891067488832]),
'versicolor&1&140': np.array([0.5131478569192371, 0.04203387599862816]),
'versicolor&1&141': np.array([0.32513442685780247, 0.6124765483184536]),
'versicolor&1&142': np.array([0.1812883360919208, 0.5504982486874137]),
'versicolor&1&143': np.array([0.4788153032824012, 0.08625929936974323]),
'versicolor&1&144': np.array([0.28490718210609345, 0.6650298146522879]),
'versicolor&1&145': np.array([0.1313204067730033, 0.597079642504441]),
'versicolor&1&146': np.array([0.46583127837967303, 0.09875847161509169]),
'versicolor&1&147': np.array([0.73294627367007, 0.4610490766898855]),
'versicolor&1&148': np.array([0.73294627367007, 0.4610490766898855]),
'versicolor&1&149': np.array([0.5965042032375719, 0.48856644624972617]),
'versicolor&1&150': np.array([0.37157553889555184, 0.1221600832023858]),
'versicolor&1&151': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&152': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&153': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&154': np.array([0.4964962439921071, 0.3798215458387346]),
'versicolor&1&155': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&156': np.array([0.2805345936193346, 0.6595182922149835]),
'versicolor&1&157': np.array([0.08302493125394889, 0.6186280682763334]),
'versicolor&1&158': np.array([0.22125635302655813, 0.2925832702358638]),
'versicolor&1&159': np.array([0.2365788606456636, 0.7120007179768731]),
'versicolor&1&160': np.array([0.022347126801293967, 0.6718013300441928]),
'versicolor&1&161': np.array([0.10063786451829529, 0.4085974066833644]),
'versicolor&1&162': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&163': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&164': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&165': np.array([-0.32199975656257646, 0.7482293552463756]),
'versicolor&1&166': np.array([-0.43843349141088417, 0.8642740701867917]),
'versicolor&1&167': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&168': np.array([0.44460014335081516, 0.6107546840046902]),
'versicolor&1&169': np.array([0.2619265016777598, 0.33491141590339474]),
'versicolor&1&170': np.array([-0.43843349141088417, 0.8642740701867917]),
'versicolor&1&171': np.array([0.20183015430619713, 0.7445346002055082]),
'versicolor&1&172': np.array([-0.05987874887638573, 0.6927937290176818]),
'versicolor&1&173': np.array([-0.2562642052727569, 0.6920266972283227]),
'versicolor&1&174': np.array([0.1736438124560164, 0.7898174616442941]),
'versicolor&1&175': np.array([-0.10114089899940126, 0.7326610366533243]),
'versicolor&1&176': np.array([-0.34479806250338163, 0.7789143553916729]),
'versicolor&1&177': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&178': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&179': np.array([0.44460014335081516, 0.6107546840046902]),
'versicolor&1&180': np.array([0.8224435822504677, 0.05315271528828394]),
'versicolor&1&181': np.array([0.820222886307464, 0.055413714884152906]),
'versicolor&1&182': np.array([0.8393089066702096, 0.0788980157959197]),
'versicolor&1&183': np.array([0.8282924295054531, 0.0752641855714259]),
'versicolor&1&184': np.array([0.8476206690613984, 0.02146454924522743]),
'versicolor&1&185': np.array([0.820222886307464, 0.055413714884152906]),
'versicolor&1&186': np.array([0.69362517791403, 0.2579390890424607]),
'versicolor&1&187': np.array([0.7261791877801502, 0.16248655642013624]),
'versicolor&1&188': np.array([0.8190416077589757, 0.05661509439536992]),
'versicolor&1&189': np.array([0.6654762076749751, 0.2949291633432878]),
'versicolor&1&190': np.array([0.7118161070185614, 0.17683644094125878]),
'versicolor&1&191': np.array([0.8165214253946836, 0.059175619390630096]),
'versicolor&1&192': np.array([0.8393089066702096, 0.0788980157959197]),
'versicolor&1&193': np.array([0.8393089066702096, 0.0788980157959197]),
'versicolor&1&194': np.array([0.8282924295054531, 0.0752641855714259]),
'versicolor&1&195': np.array([0.5188109114552927, 0.03638964581864269]),
'versicolor&1&196': np.array([0.5131478569192371, 0.04203387599862816]),
'versicolor&1&197': np.array([0.73294627367007, 0.4610490766898855]),
'versicolor&1&198': np.array([0.5965042032375719, 0.48856644624972617]),
'versicolor&1&199': np.array([0.5436097000280874, 0.1461891067488832]),
'versicolor&1&200': np.array([0.5131478569192371, 0.04203387599862816]),
'versicolor&1&201': np.array([0.32513442685780247, 0.6124765483184536]),
'versicolor&1&202': np.array([0.1812883360919208, 0.5504982486874137]),
'versicolor&1&203': np.array([0.4788153032824012, 0.08625929936974323]),
'versicolor&1&204': np.array([0.28490718210609345, 0.6650298146522879]),
'versicolor&1&205': np.array([0.1313204067730033, 0.597079642504441]),
'versicolor&1&206': np.array([0.46583127837967303, 0.09875847161509169]),
'versicolor&1&207': np.array([0.73294627367007, 0.4610490766898855]),
'versicolor&1&208': np.array([0.73294627367007, 0.4610490766898855]),
'versicolor&1&209': np.array([0.5965042032375719, 0.48856644624972617]),
'versicolor&1&210': np.array([0.37157553889555184, 0.1221600832023858]),
'versicolor&1&211': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&212': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&213': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&214': np.array([0.4964962439921071, 0.3798215458387346]),
'versicolor&1&215': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&216': np.array([0.2805345936193346, 0.6595182922149835]),
'versicolor&1&217': np.array([0.08302493125394889, 0.6186280682763334]),
'versicolor&1&218': np.array([0.22125635302655813, 0.2925832702358638]),
'versicolor&1&219': np.array([0.2365788606456636, 0.7120007179768731]),
'versicolor&1&220': np.array([0.022347126801293967, 0.6718013300441928]),
'versicolor&1&221': np.array([0.10063786451829529, 0.4085974066833644]),
'versicolor&1&222': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&223': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&224': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&225': np.array([0.6253337666017573, 0.21983620140147825]),
'versicolor&1&226': np.array([0.6178968870349187, 0.22747652768125623]),
'versicolor&1&227': np.array([0.7245803616608639, 0.18141483095066183]),
'versicolor&1&228': np.array([0.6762617119303499, 0.19305674697949574]),
'versicolor&1&229': np.array([0.7182033715159247, 0.0970420677941148]),
'versicolor&1&230': np.array([0.6178968870349187, 0.22747652768125623]),
'versicolor&1&231': np.array([0.4976586558055923, 0.5393318265947251]),
'versicolor&1&232': np.array([0.4361093214026388, 0.4279491486345008]),
'versicolor&1&233': np.array([0.613985959011319, 0.23148898930908424]),
'versicolor&1&234': np.array([0.46747697713468217, 0.586607956360002]),
'versicolor&1&235': np.array([0.41044950174869577, 0.45415985894965977]),
'versicolor&1&236': np.array([0.6057447478066579, 0.23993389556303918]),
'versicolor&1&237': np.array([0.7245803616608639, 0.18141483095066183]),
'versicolor&1&238': np.array([0.7245803616608639, 0.18141483095066183]),
'versicolor&1&239': np.array([0.6762617119303499, 0.19305674697949574]),
'versicolor&1&240': np.array([0.056623968925773045, 0.43360725859686644]),
'versicolor&1&241': np.array([0.020169511418752378, 0.47015948158260334]),
'versicolor&1&242': np.array([0.5806365328450954, 0.47262706807712623]),
'versicolor&1&243': np.array([0.4146290154471569, 0.4964318942067898]),
'versicolor&1&244': np.array([0.3351719071445682, 0.20616862401308342]),
'versicolor&1&245': np.array([0.020169511418752378, 0.47015948158260334]),
'versicolor&1&246': np.array([0.24022705822940116, 0.7185371033867092]),
'versicolor&1&247': np.array([0.010447231513465048, 0.6616528865917504]),
'versicolor&1&248': np.array([0.024556360933646205, 0.4723948285969902]),
'versicolor&1&249': np.array([0.21321406009810842, 0.7648907754638917]),
'versicolor&1&250': np.array([-0.027450681014480036, 0.6999336015080245]),
'versicolor&1&251': np.array([-0.0164329511444131, 0.5132208276383963]),
'versicolor&1&252': np.array([0.5806365328450954, 0.47262706807712623]),
'versicolor&1&253': np.array([0.5806365328450954, 0.47262706807712623]),
'versicolor&1&254': np.array([0.4146290154471569, 0.4964318942067898]),
'versicolor&1&255': np.array([-0.32199975656257646, 0.7482293552463756]),
'versicolor&1&256': np.array([-0.43843349141088417, 0.8642740701867917]),
'versicolor&1&257': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&258': np.array([0.44460014335081516, 0.6107546840046902]),
'versicolor&1&259': np.array([0.2619265016777598, 0.33491141590339474]),
'versicolor&1&260': np.array([-0.43843349141088417, 0.8642740701867917]),
'versicolor&1&261': np.array([0.20183015430619713, 0.7445346002055082]),
'versicolor&1&262': np.array([-0.05987874887638573, 0.6927937290176818]),
'versicolor&1&263': np.array([-0.2562642052727569, 0.6920266972283227]),
'versicolor&1&264': np.array([0.1736438124560164, 0.7898174616442941]),
'versicolor&1&265': np.array([-0.10114089899940126, 0.7326610366533243]),
'versicolor&1&266': np.array([-0.34479806250338163, 0.7789143553916729]),
'versicolor&1&267': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&268': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&269': np.array([0.44460014335081516, 0.6107546840046902]),
'versicolor&1&270': np.array([0.7749499208750119, 0.8147189440804429]),
'versicolor&1&271': np.array([0.8040309195416899, 0.8445152504134819]),
'versicolor&1&272': np.array([0.5826506963750848, -0.22335655671229107]),
'versicolor&1&273': np.array([0.33108168891715983, 0.13647816746351163]),
'versicolor&1&274': np.array([0.4079256832347186, 0.038455640985860955]),
'versicolor&1&275': np.array([0.8040309195416899, 0.8445152504134819]),
'versicolor&1&276': np.array([0.18555813792691386, 0.6940923833143309]),
'versicolor&1&277': np.array([0.32639262064172164, 0.6296083447134281]),
'versicolor&1&278': np.array([0.6964303997553315, 0.7444536452136676]),
'versicolor&1&279': np.array([0.18216358701833335, 0.747615101407194]),
'versicolor&1&280': np.array([0.33549445287370383, 0.6526039763053625]),
'versicolor&1&281': np.array([0.7213651642695392, 0.7718874443854203]),
'versicolor&1&282': np.array([0.5826506963750848, -0.22335655671229107]),
'versicolor&1&283': np.array([0.5826506963750848, -0.22335655671229107]),
'versicolor&1&284': np.array([0.33108168891715983, 0.13647816746351163]),
'versicolor&1&285': np.array([0.7749499208750119, 0.8147189440804429]),
'versicolor&1&286': np.array([0.8040309195416899, 0.8445152504134819]),
'versicolor&1&287': np.array([0.5826506963750848, -0.22335655671229107]),
'versicolor&1&288': np.array([0.33108168891715983, 0.13647816746351163]),
'versicolor&1&289': np.array([0.4079256832347186, 0.038455640985860955]),
'versicolor&1&290': np.array([0.8040309195416899, 0.8445152504134819]),
'versicolor&1&291': np.array([0.18555813792691386, 0.6940923833143309]),
'versicolor&1&292': np.array([0.32639262064172164, 0.6296083447134281]),
'versicolor&1&293': np.array([0.6964303997553315, 0.7444536452136676]),
'versicolor&1&294': np.array([0.18216358701833335, 0.747615101407194]),
'versicolor&1&295': np.array([0.33549445287370383, 0.6526039763053625]),
'versicolor&1&296': np.array([0.7213651642695392, 0.7718874443854203]),
'versicolor&1&297': np.array([0.5826506963750848, -0.22335655671229107]),
'versicolor&1&298': np.array([0.5826506963750848, -0.22335655671229107]),
'versicolor&1&299': np.array([0.33108168891715983, 0.13647816746351163]),
'versicolor&1&300': np.array([0.4933316375690332, 0.5272416708629276]),
'versicolor&1&301': np.array([0.5041830043657418, 0.5392782673950876]),
'versicolor&1&302': np.array([0.25657760110071476, 0.12592645350389123]),
'versicolor&1&303': np.array([0.13717260713320106, 0.3627779907901665]),
'versicolor&1&304': np.array([0.3093950298647913, 0.1140298206733954]),
'versicolor&1&305': np.array([0.5041830043657418, 0.5392782673950876]),
'versicolor&1&306': np.array([0.1413116283690917, 0.7479856297394165]),
'versicolor&1&307': np.array([0.189773257421942, 0.6552150653012478]),
'versicolor&1&308': np.array([0.40694846236352233, 0.5109051764198169]),
'versicolor&1&309': np.array([0.1390424906594644, 0.7991613016301518]),
'versicolor&1&310': np.array([0.1945777487290197, 0.6743932844312892]),
'versicolor&1&311': np.array([0.415695226122737, 0.5230815102377903]),
'versicolor&1&312': np.array([0.25657760110071476, 0.12592645350389123]),
'versicolor&1&313': np.array([0.25657760110071476, 0.12592645350389123]),
'versicolor&1&314': np.array([0.13717260713320106, 0.3627779907901665]),
'versicolor&2&0': np.array([0.37157691321004915, 0.12216227283618836]),
'versicolor&2&1': np.array([0.24630541996506908, 0.24630541996506994]),
'versicolor&2&2': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&3': np.array([0.2953784217387408, -0.6750352694420283]),
'versicolor&2&4': np.array([0.4741571944522723, -0.3872697414416878]),
'versicolor&2&5': np.array([0.24630541996506908, 0.24630541996506994]),
'versicolor&2&6': np.array([0.68663266357557, -0.6475988779804592]),
'versicolor&2&7': np.array([0.8701760330833639, -0.5914646440996656]),
'versicolor&2&8': np.array([0.6273836195848199, -0.15720981251964872]),
'versicolor&2&9': np.array([0.7292373173099087, -0.6975400952780954]),
'versicolor&2&10': np.array([0.9270035696082471, -0.640582639672401]),
'versicolor&2&11': np.array([0.6863652799597699, -0.21335694415409426]),
'versicolor&2&12': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&13': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&14': np.array([0.2953784217387408, -0.6750352694420283]),
'versicolor&2&15': np.array([0.37157691321004915, 0.12216227283618836]),
'versicolor&2&16': np.array([0.24630541996506908, 0.24630541996506994]),
'versicolor&2&17': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&18': np.array([0.2953784217387408, -0.6750352694420283]),
'versicolor&2&19': np.array([0.4741571944522723, -0.3872697414416878]),
'versicolor&2&20': np.array([0.24630541996506908, 0.24630541996506994]),
'versicolor&2&21': np.array([0.68663266357557, -0.6475988779804592]),
'versicolor&2&22': np.array([0.8701760330833639, -0.5914646440996656]),
'versicolor&2&23': np.array([0.6273836195848199, -0.15720981251964872]),
'versicolor&2&24': np.array([0.7292373173099087, -0.6975400952780954]),
'versicolor&2&25': np.array([0.9270035696082471, -0.640582639672401]),
'versicolor&2&26': np.array([0.6863652799597699, -0.21335694415409426]),
'versicolor&2&27': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&28': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&29': np.array([0.2953784217387408, -0.6750352694420283]),
'versicolor&2&30': np.array([0.5188517506916897, 0.036358567813067386]),
'versicolor&2&31': np.array([0.5131939273945454, 0.04199748266790813]),
'versicolor&2&32': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&33': np.array([0.34904320225465857, -0.6233384360811872]),
'versicolor&2&34': np.array([0.5354807894355184, -0.3418054346754283]),
'versicolor&2&35': np.array([0.5131939273945454, 0.04199748266790813]),
'versicolor&2&36': np.array([0.5761361484884252, -0.44637460220261904]),
'versicolor&2&37': np.array([0.7268664040181829, -0.40159406680426807]),
'versicolor&2&38': np.array([0.5917672401610737, -0.061499563231173816]),
'versicolor&2&39': np.array([0.5921993039887428, -0.46498571089163954]),
'versicolor&2&40': np.array([0.7470482158282458, -0.4169281153671854]),
'versicolor&2&41': np.array([0.5967658480721675, -0.06546963852548916]),
'versicolor&2&42': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&43': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&44': np.array([0.34904320225465857, -0.6233384360811872]),
'versicolor&2&45': np.array([-0.8252668830593566, 0.11450866713130668]),
'versicolor&2&46': np.array([-0.8211795643076095, 0.11869650771610692]),
'versicolor&2&47': np.array([-0.6441664102689847, -0.3012046426099901]),
'versicolor&2&48': np.array([-0.7640280271176497, -0.19364537761420375]),
'versicolor&2&49': np.array([-0.8735738195653328, -0.046438180466149094]),
'versicolor&2&50': np.array([-0.8211795643076095, 0.11869650771610692]),
'versicolor&2&51': np.array([-0.8470213454017305, -0.0910504504559782]),
'versicolor&2&52': np.array([-0.8783521565540571, 0.01381094589198601]),
'versicolor&2&53': np.array([-0.8388485924434891, 0.09800790238640067]),
'versicolor&2&54': np.array([-0.8495871633670822, -0.08820642363054954]),
'versicolor&2&55': np.array([-0.8784816772224661, 0.017184907022714958]),
'versicolor&2&56': np.array([-0.835455914569297, 0.10189258327760495]),
'versicolor&2&57': np.array([-0.6441664102689847, -0.3012046426099901]),
'versicolor&2&58': np.array([-0.6441664102689847, -0.3012046426099901]),
'versicolor&2&59': np.array([-0.7640280271176497, -0.19364537761420375]),
'versicolor&2&60': np.array([-0.5227340800279543, 0.4209267574088147]),
'versicolor&2&61': np.array([-0.5140708637198534, 0.4305361238057349]),
'versicolor&2&62': np.array([-0.2661726847443776, -0.6902916602462779]),
'versicolor&2&63': np.array([-0.2741128763380603, -0.7260889090887469]),
'versicolor&2&64': np.array([-0.6188410763351541, -0.22803625884668638]),
'versicolor&2&65': np.array([-0.5140708637198534, 0.4305361238057349]),
'versicolor&2&66': np.array([-0.56940429361245, -0.3442345437882425]),
'versicolor&2&67': np.array([-0.6452502612229726, -0.04686872432129788]),
'versicolor&2&68': np.array([-0.596973015481227, 0.37395461795328944]),
'versicolor&2&69': np.array([-0.5760086048531655, -0.3353570725513232]),
'versicolor&2&70': np.array([-0.6488228567611906, -0.03186184826812757]),
'versicolor&2&71': np.array([-0.5903420131350324, 0.384224764046184]),
'versicolor&2&72': np.array([-0.2661726847443776, -0.6902916602462779]),
'versicolor&2&73': np.array([-0.2661726847443776, -0.6902916602462779]),
'versicolor&2&74': np.array([-0.2741128763380603, -0.7260889090887469]),
'versicolor&2&75': np.array([0.0, 0.47562425924289314]),
'versicolor&2&76': np.array([0.0, 0.4854368956593117]),
'versicolor&2&77': np.array([0.0, -0.7348263896003956]),
'versicolor&2&78': np.array([0.0, -0.7920887571493729]),
'versicolor&2&79': np.array([0.0, -0.507614207038711]),
'versicolor&2&80': np.array([0.0, 0.4854368956593117]),
'versicolor&2&81': np.array([0.0, -0.3982542883933272]),
'versicolor&2&82': np.array([0.0, -0.08633733326458487]),
'versicolor&2&83': np.array([0.0, 0.4039238345412103]),
'versicolor&2&84': np.array([0.0, -0.38897705551367706]),
'versicolor&2&85': np.array([0.0, -0.06915310813754129]),
'versicolor&2&86': np.array([0.0, 0.41580041887839214]),
'versicolor&2&87': np.array([0.0, -0.7348263896003956]),
'versicolor&2&88': np.array([0.0, -0.7348263896003956]),
'versicolor&2&89': np.array([0.0, -0.7920887571493729]),
'versicolor&2&90': np.array([0.37157691321004915, 0.12216227283618836]),
'versicolor&2&91': np.array([0.24630541996506908, 0.24630541996506994]),
'versicolor&2&92': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&93': np.array([0.2953784217387408, -0.6750352694420283]),
'versicolor&2&94': np.array([0.4741571944522723, -0.3872697414416878]),
'versicolor&2&95': np.array([0.24630541996506908, 0.24630541996506994]),
'versicolor&2&96': np.array([0.68663266357557, -0.6475988779804592]),
'versicolor&2&97': np.array([0.8701760330833639, -0.5914646440996656]),
'versicolor&2&98': np.array([0.6273836195848199, -0.15720981251964872]),
'versicolor&2&99': np.array([0.7292373173099087, -0.6975400952780954]),
'versicolor&2&100': np.array([0.9270035696082471, -0.640582639672401]),
'versicolor&2&101': np.array([0.6863652799597699, -0.21335694415409426]),
'versicolor&2&102': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&103': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&104': np.array([0.2953784217387408, -0.6750352694420283]),
'versicolor&2&105': np.array([0.5188517506916897, 0.036358567813067386]),
'versicolor&2&106': np.array([0.5131939273945454, 0.04199748266790813]),
'versicolor&2&107': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&108': np.array([0.34904320225465857, -0.6233384360811872]),
'versicolor&2&109': np.array([0.5354807894355184, -0.3418054346754283]),
'versicolor&2&110': np.array([0.5131939273945454, 0.04199748266790813]),
'versicolor&2&111': np.array([0.5761361484884252, -0.44637460220261904]),
'versicolor&2&112': np.array([0.7268664040181829, -0.40159406680426807]),
'versicolor&2&113': np.array([0.5917672401610737, -0.061499563231173816]),
'versicolor&2&114': np.array([0.5921993039887428, -0.46498571089163954]),
'versicolor&2&115': np.array([0.7470482158282458, -0.4169281153671854]),
'versicolor&2&116': np.array([0.5967658480721675, -0.06546963852548916]),
'versicolor&2&117': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&118': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&119': np.array([0.34904320225465857, -0.6233384360811872]),
'versicolor&2&120': np.array([-0.7638917827493686, 0.868015757634957]),
'versicolor&2&121': np.array([-0.8001553485824509, 0.9049358162753539]),
'versicolor&2&122': np.array([-0.26179245521040034, -0.7067672760776678]),
'versicolor&2&123': np.array([-0.14690789675963867, -0.7352367260447958]),
'versicolor&2&124': np.array([-0.32941440381886555, -0.4173178729969913]),
'versicolor&2&125': np.array([-0.8001553485824509, 0.9049358162753539]),
'versicolor&2&126': np.array([-0.18291442454393395, -0.2654898014002494]),
'versicolor&2&127': np.array([-0.5797728557269727, 0.3163189837954924]),
'versicolor&2&128': np.array([-0.7579323596667402, 0.8054136823046655]),
'versicolor&2&129': np.array([-0.1948624323669993, -0.23753953755286383]),
'versicolor&2&130': np.array([-0.6437698977881832, 0.3909540110317858]),
'versicolor&2&131': np.array([-0.7963046521980063, 0.846536369471985]),
'versicolor&2&132': np.array([-0.26179245521040034, -0.7067672760776678]),
'versicolor&2&133': np.array([-0.26179245521040034, -0.7067672760776678]),
'versicolor&2&134': np.array([-0.14690789675963867, -0.7352367260447958]),
'versicolor&2&135': np.array([-0.3219660907491514, 0.7482043503408669]),
'versicolor&2&136': np.array([-0.43839553940476644, 0.8642446918440131]),
'versicolor&2&137': np.array([-0.05474251929945989, -0.7566498134597841]),
'versicolor&2&138': np.array([0.17291299562995102, -0.7651995812779756]),
'versicolor&2&139': np.array([0.2626914501948546, -0.5596191134224637]),
'versicolor&2&140': np.array([-0.43839553940476644, 0.8642446918440131]),
'versicolor&2&141': np.array([0.4734444929420575, -0.6150974537943872]),
'versicolor&2&142': np.array([0.5369392542176313, -0.430867927332838]),
'versicolor&2&143': np.array([-0.19892251970509112, 0.5718543863753405]),
'versicolor&2&144': np.array([0.5071047612208237, -0.6507546896558788]),
'versicolor&2&145': np.array([0.5629877361048359, -0.4485515113017818]),
'versicolor&2&146': np.array([-0.3047657227470458, 0.6788631774846587]),
'versicolor&2&147': np.array([-0.05474251929945989, -0.7566498134597841]),
'versicolor&2&148': np.array([-0.05474251929945989, -0.7566498134597841]),
'versicolor&2&149': np.array([0.17291299562995102, -0.7651995812779756]),
'versicolor&2&150': np.array([0.37157691321004915, 0.12216227283618836]),
'versicolor&2&151': np.array([0.24630541996506908, 0.24630541996506994]),
'versicolor&2&152': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&153': np.array([0.2953784217387408, -0.6750352694420283]),
'versicolor&2&154': np.array([0.4741571944522723, -0.3872697414416878]),
'versicolor&2&155': np.array([0.24630541996506908, 0.24630541996506994]),
'versicolor&2&156': np.array([0.68663266357557, -0.6475988779804592]),
'versicolor&2&157': np.array([0.8701760330833639, -0.5914646440996656]),
'versicolor&2&158': np.array([0.6273836195848199, -0.15720981251964872]),
'versicolor&2&159': np.array([0.7292373173099087, -0.6975400952780954]),
'versicolor&2&160': np.array([0.9270035696082471, -0.640582639672401]),
'versicolor&2&161': np.array([0.6863652799597699, -0.21335694415409426]),
'versicolor&2&162': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&163': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&164': np.array([0.2953784217387408, -0.6750352694420283]),
'versicolor&2&165': np.array([0.5188517506916897, 0.036358567813067386]),
'versicolor&2&166': np.array([0.5131939273945454, 0.04199748266790813]),
'versicolor&2&167': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&168': np.array([0.34904320225465857, -0.6233384360811872]),
'versicolor&2&169': np.array([0.5354807894355184, -0.3418054346754283]),
'versicolor&2&170': np.array([0.5131939273945454, 0.04199748266790813]),
'versicolor&2&171': np.array([0.5761361484884252, -0.44637460220261904]),
'versicolor&2&172': np.array([0.7268664040181829, -0.40159406680426807]),
'versicolor&2&173': np.array([0.5917672401610737, -0.061499563231173816]),
'versicolor&2&174': np.array([0.5921993039887428, -0.46498571089163954]),
'versicolor&2&175': np.array([0.7470482158282458, -0.4169281153671854]),
'versicolor&2&176': np.array([0.5967658480721675, -0.06546963852548916]),
'versicolor&2&177': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&178': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&179': np.array([0.34904320225465857, -0.6233384360811872]),
'versicolor&2&180': np.array([-0.7638917827493686, 0.868015757634957]),
'versicolor&2&181': np.array([-0.8001553485824509, 0.9049358162753539]),
'versicolor&2&182': np.array([-0.26179245521040034, -0.7067672760776678]),
'versicolor&2&183': np.array([-0.14690789675963867, -0.7352367260447958]),
'versicolor&2&184': np.array([-0.32941440381886555, -0.4173178729969913]),
'versicolor&2&185': np.array([-0.8001553485824509, 0.9049358162753539]),
'versicolor&2&186': np.array([-0.18291442454393395, -0.2654898014002494]),
'versicolor&2&187': np.array([-0.5797728557269727, 0.3163189837954924]),
'versicolor&2&188': np.array([-0.7579323596667402, 0.8054136823046655]),
'versicolor&2&189': np.array([-0.1948624323669993, -0.23753953755286383]),
'versicolor&2&190': np.array([-0.6437698977881832, 0.3909540110317858]),
'versicolor&2&191': np.array([-0.7963046521980063, 0.846536369471985]),
'versicolor&2&192': np.array([-0.26179245521040034, -0.7067672760776678]),
'versicolor&2&193': np.array([-0.26179245521040034, -0.7067672760776678]),
'versicolor&2&194': np.array([-0.14690789675963867, -0.7352367260447958]),
'versicolor&2&195': np.array([-0.3219660907491514, 0.7482043503408669]),
'versicolor&2&196': np.array([-0.43839553940476644, 0.8642446918440131]),
'versicolor&2&197': np.array([-0.05474251929945989, -0.7566498134597841]),
'versicolor&2&198': np.array([0.17291299562995102, -0.7651995812779756]),
'versicolor&2&199': np.array([0.2626914501948546, -0.5596191134224637]),
'versicolor&2&200': np.array([-0.43839553940476644, 0.8642446918440131]),
'versicolor&2&201': np.array([0.4734444929420575, -0.6150974537943872]),
'versicolor&2&202': np.array([0.5369392542176313, -0.430867927332838]),
'versicolor&2&203': np.array([-0.19892251970509112, 0.5718543863753405]),
'versicolor&2&204': np.array([0.5071047612208237, -0.6507546896558788]),
'versicolor&2&205': np.array([0.5629877361048359, -0.4485515113017818]),
'versicolor&2&206': np.array([-0.3047657227470458, 0.6788631774846587]),
'versicolor&2&207': np.array([-0.05474251929945989, -0.7566498134597841]),
'versicolor&2&208': np.array([-0.05474251929945989, -0.7566498134597841]),
'versicolor&2&209': np.array([0.17291299562995102, -0.7651995812779756]),
'versicolor&2&210': np.array([0.37157691321004915, 0.12216227283618836]),
'versicolor&2&211': np.array([0.24630541996506908, 0.24630541996506994]),
'versicolor&2&212': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&213': np.array([0.2953784217387408, -0.6750352694420283]),
'versicolor&2&214': np.array([0.4741571944522723, -0.3872697414416878]),
'versicolor&2&215': np.array([0.24630541996506908, 0.24630541996506994]),
'versicolor&2&216': np.array([0.68663266357557, -0.6475988779804592]),
'versicolor&2&217': np.array([0.8701760330833639, -0.5914646440996656]),
'versicolor&2&218': np.array([0.6273836195848199, -0.15720981251964872]),
'versicolor&2&219': np.array([0.7292373173099087, -0.6975400952780954]),
'versicolor&2&220': np.array([0.9270035696082471, -0.640582639672401]),
'versicolor&2&221': np.array([0.6863652799597699, -0.21335694415409426]),
'versicolor&2&222': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&223': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&224': np.array([0.2953784217387408, -0.6750352694420283]),
'versicolor&2&225': np.array([-0.5775629083348267, 0.7118687782288384]),
'versicolor&2&226': np.array([-0.6016445709024666, 0.7366089009875252]),
'versicolor&2&227': np.array([-0.28356111726513855, -0.739741315226852]),
'versicolor&2&228': np.array([-0.0917622729715107, -0.7645776302158537]),
'versicolor&2&229': np.array([-0.25603689955471853, -0.451727980232351]),
'versicolor&2&230': np.array([-0.6016445709024666, 0.7366089009875252]),
'versicolor&2&231': np.array([-0.1269405801024398, -0.34161216844748166]),
'versicolor&2&232': np.array([-0.33176333807327857, 0.09538228407203546]),
'versicolor&2&233': np.array([-0.564696311454556, 0.6421194512020755]),
'versicolor&2&234': np.array([-0.12669523681593967, -0.32786313310034665]),
'versicolor&2&235': np.array([-0.35960845047491363, 0.1335988694092619]),
'versicolor&2&236': np.array([-0.589572650064144, 0.6697478899606418]),
'versicolor&2&237': np.array([-0.28356111726513855, -0.739741315226852]),
'versicolor&2&238': np.array([-0.28356111726513855, -0.739741315226852]),
'versicolor&2&239': np.array([-0.0917622729715107, -0.7645776302158537]),
'versicolor&2&240': np.array([0.05667262840030629, 0.4335746514880877]),
'versicolor&2&241': np.array([0.0202211257171063, 0.470123810164804]),
'versicolor&2&242': np.array([-0.052990507284891984, -0.7625494034929868]),
'versicolor&2&243': np.array([0.22461127196921116, -0.7375780139111495]),
'versicolor&2&244': np.array([0.3463149754241171, -0.5568366400939154]),
'versicolor&2&245': np.array([0.0202211257171063, 0.470123810164804]),
'versicolor&2&246': np.array([0.4022739113634462, -0.4700171786183992]),
'versicolor&2&247': np.array([0.5046771347249378, -0.33609610934748635]),
'versicolor&2&248': np.array([0.1370187510624256, 0.30303755274337163]),
'versicolor&2&249': np.array([0.41683021879255133, -0.4812793747667524]),
'versicolor&2&250': np.array([0.5150371666265885, -0.33852139184639396]),
'versicolor&2&251': np.array([0.10611499646955676, 0.33589829339460586]),
'versicolor&2&252': np.array([-0.052990507284891984, -0.7625494034929868]),
'versicolor&2&253': np.array([-0.052990507284891984, -0.7625494034929868]),
'versicolor&2&254': np.array([0.22461127196921116, -0.7375780139111495]),
'versicolor&2&255': np.array([0.5188517506916897, 0.036358567813067386]),
'versicolor&2&256': np.array([0.5131939273945454, 0.04199748266790813]),
'versicolor&2&257': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&258': np.array([0.34904320225465857, -0.6233384360811872]),
'versicolor&2&259': np.array([0.5354807894355184, -0.3418054346754283]),
'versicolor&2&260': np.array([0.5131939273945454, 0.04199748266790813]),
'versicolor&2&261': np.array([0.5761361484884252, -0.44637460220261904]),
'versicolor&2&262': np.array([0.7268664040181829, -0.40159406680426807]),
'versicolor&2&263': np.array([0.5917672401610737, -0.061499563231173816]),
'versicolor&2&264': np.array([0.5921993039887428, -0.46498571089163954]),
'versicolor&2&265': np.array([0.7470482158282458, -0.4169281153671854]),
'versicolor&2&266': np.array([0.5967658480721675, -0.06546963852548916]),
'versicolor&2&267': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&268': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&269': np.array([0.34904320225465857, -0.6233384360811872]),
'versicolor&2&270': np.array([-0.8252668830593566, 0.11450866713130668]),
'versicolor&2&271': np.array([-0.8211795643076095, 0.11869650771610692]),
'versicolor&2&272': np.array([-0.6441664102689847, -0.3012046426099901]),
'versicolor&2&273': np.array([-0.7640280271176497, -0.19364537761420375]),
'versicolor&2&274': np.array([-0.8735738195653328, -0.046438180466149094]),
'versicolor&2&275': np.array([-0.8211795643076095, 0.11869650771610692]),
'versicolor&2&276': np.array([-0.8470213454017305, -0.0910504504559782]),
'versicolor&2&277': np.array([-0.8783521565540571, 0.01381094589198601]),
'versicolor&2&278': np.array([-0.8388485924434891, 0.09800790238640067]),
'versicolor&2&279': np.array([-0.8495871633670822, -0.08820642363054954]),
'versicolor&2&280': np.array([-0.8784816772224661, 0.017184907022714958]),
'versicolor&2&281': np.array([-0.835455914569297, 0.10189258327760495]),
'versicolor&2&282': np.array([-0.6441664102689847, -0.3012046426099901]),
'versicolor&2&283': np.array([-0.6441664102689847, -0.3012046426099901]),
'versicolor&2&284': np.array([-0.7640280271176497, -0.19364537761420375]),
'versicolor&2&285': np.array([-0.8252668830593566, 0.11450866713130668]),
'versicolor&2&286': np.array([-0.8211795643076095, 0.11869650771610692]),
'versicolor&2&287': np.array([-0.6441664102689847, -0.3012046426099901]),
'versicolor&2&288': np.array([-0.7640280271176497, -0.19364537761420375]),
'versicolor&2&289': np.array([-0.8735738195653328, -0.046438180466149094]),
'versicolor&2&290': np.array([-0.8211795643076095, 0.11869650771610692]),
'versicolor&2&291': np.array([-0.8470213454017305, -0.0910504504559782]),
'versicolor&2&292': np.array([-0.8783521565540571, 0.01381094589198601]),
'versicolor&2&293': np.array([-0.8388485924434891, 0.09800790238640067]),
'versicolor&2&294': np.array([-0.8495871633670822, -0.08820642363054954]),
'versicolor&2&295': np.array([-0.8784816772224661, 0.017184907022714958]),
'versicolor&2&296': np.array([-0.835455914569297, 0.10189258327760495]),
'versicolor&2&297': np.array([-0.6441664102689847, -0.3012046426099901]),
'versicolor&2&298': np.array([-0.6441664102689847, -0.3012046426099901]),
'versicolor&2&299': np.array([-0.7640280271176497, -0.19364537761420375]),
'versicolor&2&300': np.array([-0.5227340800279543, 0.4209267574088147]),
'versicolor&2&301': np.array([-0.5140708637198534, 0.4305361238057349]),
'versicolor&2&302': np.array([-0.2661726847443776, -0.6902916602462779]),
'versicolor&2&303': np.array([-0.2741128763380603, -0.7260889090887469]),
'versicolor&2&304': np.array([-0.6188410763351541, -0.22803625884668638]),
'versicolor&2&305': np.array([-0.5140708637198534, 0.4305361238057349]),
'versicolor&2&306': np.array([-0.56940429361245, -0.3442345437882425]),
'versicolor&2&307': np.array([-0.6452502612229726, -0.04686872432129788]),
'versicolor&2&308': np.array([-0.596973015481227, 0.37395461795328944]),
'versicolor&2&309': np.array([-0.5760086048531655, -0.3353570725513232]),
'versicolor&2&310': np.array([-0.6488228567611906, -0.03186184826812757]),
'versicolor&2&311': np.array([-0.5903420131350324, 0.384224764046184]),
'versicolor&2&312': np.array([-0.2661726847443776, -0.6902916602462779]),
'versicolor&2&313': np.array([-0.2661726847443776, -0.6902916602462779]),
'versicolor&2&314': np.array([-0.2741128763380603, -0.7260889090887469]),
'virginica&0&0': np.array([-0.7431524521056113, -0.24432235603856345]),
'virginica&0&1': np.array([-0.4926091071260067, -0.49260910712601286]),
'virginica&0&2': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&3': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&4': np.array([-0.9706534384443797, 0.007448195602953232]),
'virginica&0&5': np.array([-0.4926091071260067, -0.49260910712601286]),
'virginica&0&6': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&7': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&8': np.array([-0.8486399726113752, -0.13537345771621853]),
'virginica&0&9': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&10': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&11': np.array([-0.7870031444780577, -0.1952404625292782]),
'virginica&0&12': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&13': np.array([-0.9569238464170641, -0.02354905845282574]),
'virginica&0&14': np.array([-0.9677320606992984, -0.012432557482778654]),
'virginica&0&15': np.array([-0.7431524521056113, -0.24432235603856345]),
'virginica&0&16': np.array([-0.4926091071260067, -0.49260910712601286]),
'virginica&0&17': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&18': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&19': np.array([-0.9706534384443797, 0.007448195602953232]),
'virginica&0&20': np.array([-0.4926091071260067, -0.49260910712601286]),
'virginica&0&21': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&22': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&23': np.array([-0.8486399726113752, -0.13537345771621853]),
'virginica&0&24': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&25': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&26': np.array([-0.7870031444780577, -0.1952404625292782]),
'virginica&0&27': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&28': np.array([-0.9569238464170641, -0.02354905845282574]),
'virginica&0&29': np.array([-0.9677320606992984, -0.012432557482778654]),
'virginica&0&30': np.array([-0.19685199412911655, -0.7845879230594393]),
'virginica&0&31': np.array([-0.07476043598366228, -0.9062715528546994]),
'virginica&0&32': np.array([-0.7770298852793477, -0.029443430477147373]),
'virginica&0&33': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&34': np.array([-0.7974072911132788, 0.006894018772033604]),
'virginica&0&35': np.array([-0.07476043598366228, -0.9062715528546994]),
'virginica&0&36': np.array([-0.7770298852793477, -0.029443430477147373]),
'virginica&0&37': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&38': np.array([-0.3355030348883163, -0.6305271339971502]),
'virginica&0&39': np.array([-0.7770298852793477, -0.029443430477147373]),
'virginica&0&40': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&41': np.array([-0.2519677855687844, -0.7134447168661863]),
'virginica&0&42': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&43': np.array([-0.7799744386472778, -0.026476616324402506]),
'virginica&0&44': np.array([-0.7942342242967624, -0.0119572163963601]),
'virginica&0&45': np.array([-0.05031696218434577, -0.929227611211748]),
'virginica&0&46': np.array([-0.017148644765919676, -0.9632117581295891]),
'virginica&0&47': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&48': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&49': np.array([-0.4656481363306145, 0.007982539480288167]),
'virginica&0&50': np.array([-0.017148644765919676, -0.9632117581295891]),
'virginica&0&51': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&52': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&53': np.array([-0.14241819268815753, -0.8424615476000691]),
'virginica&0&54': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&55': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&56': np.array([-0.1140907502997574, -0.8737800276630269]),
'virginica&0&57': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&58': np.array([-0.14198277461566922, -0.4577720226157396]),
'virginica&0&59': np.array([-0.4385442121294165, -0.05333645823279597]),
'virginica&0&60': np.array([0.029402442458921384, -0.9481684282717414]),
'virginica&0&61': np.array([0.009887859354111524, -0.9698143912008228]),
'virginica&0&62': np.array([0.009595083643662688, -0.5643652067423869]),
'virginica&0&63': np.array([0.13694026920485936, -0.36331091829858003]),
'virginica&0&64': np.array([0.3094460464703627, 0.11400643817329122]),
'virginica&0&65': np.array([0.009887859354111524, -0.9698143912008228]),
'virginica&0&66': np.array([0.009595083643662688, -0.5643652067423869]),
'virginica&0&67': np.array([0.13694026920485936, -0.36331091829858003]),
'virginica&0&68': np.array([0.19002455311770447, -0.8848597943731074]),
'virginica&0&69': np.array([0.009595083643662688, -0.5643652067423869]),
'virginica&0&70': np.array([0.13694026920485936, -0.36331091829858003]),
'virginica&0&71': np.array([0.1746467870122951, -0.9073062742839755]),
'virginica&0&72': np.array([0.13694026920485936, -0.36331091829858003]),
'virginica&0&73': np.array([0.11200181312407695, -0.5330612470996793]),
'virginica&0&74': np.array([0.19998284600732558, -0.3489062419702088]),
'virginica&0&75': np.array([0.0, -0.95124502153736]),
'virginica&0&76': np.array([0.0, -0.9708703761803881]),
'virginica&0&77': np.array([0.0, -0.5659706098422994]),
'virginica&0&78': np.array([0.0, -0.3962828716108186]),
'virginica&0&79': np.array([0.0, 0.2538069363248767]),
'virginica&0&80': np.array([0.0, -0.9708703761803881]),
'virginica&0&81': np.array([0.0, -0.5659706098422994]),
'virginica&0&82': np.array([0.0, -0.3962828716108186]),
'virginica&0&83': np.array([0.0, -0.8943993997517804]),
'virginica&0&84': np.array([0.0, -0.5659706098422994]),
'virginica&0&85': np.array([0.0, -0.3962828716108186]),
'virginica&0&86': np.array([0.0, -0.9166476163222441]),
'virginica&0&87': np.array([0.0, -0.3962828716108186]),
'virginica&0&88': np.array([0.0, -0.5466925844560601]),
'virginica&0&89': np.array([0.0, -0.38529908946531777]),
'virginica&0&90': np.array([-0.7431524521056113, -0.24432235603856345]),
'virginica&0&91': np.array([-0.4926091071260067, -0.49260910712601286]),
'virginica&0&92': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&93': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&94': np.array([-0.9706534384443797, 0.007448195602953232]),
'virginica&0&95': np.array([-0.4926091071260067, -0.49260910712601286]),
'virginica&0&96': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&97': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&98': np.array([-0.8486399726113752, -0.13537345771621853]),
'virginica&0&99': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&100': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&101': np.array([-0.7870031444780577, -0.1952404625292782]),
'virginica&0&102': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&103': np.array([-0.9569238464170641, -0.02354905845282574]),
'virginica&0&104': np.array([-0.9677320606992984, -0.012432557482778654]),
'virginica&0&105': np.array([-0.19685199412911655, -0.7845879230594393]),
'virginica&0&106': np.array([-0.07476043598366228, -0.9062715528546994]),
'virginica&0&107': np.array([-0.7770298852793477, -0.029443430477147373]),
'virginica&0&108': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&109': np.array([-0.7974072911132788, 0.006894018772033604]),
'virginica&0&110': np.array([-0.07476043598366228, -0.9062715528546994]),
'virginica&0&111': np.array([-0.7770298852793477, -0.029443430477147373]),
'virginica&0&112': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&113': np.array([-0.3355030348883163, -0.6305271339971502]),
'virginica&0&114': np.array([-0.7770298852793477, -0.029443430477147373]),
'virginica&0&115': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&116': np.array([-0.2519677855687844, -0.7134447168661863]),
'virginica&0&117': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&118': np.array([-0.7799744386472778, -0.026476616324402506]),
'virginica&0&119': np.array([-0.7942342242967624, -0.0119572163963601]),
'virginica&0&120': np.array([-0.05031696218434577, -0.929227611211748]),
'virginica&0&121': np.array([-0.017148644765919676, -0.9632117581295891]),
'virginica&0&122': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&123': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&124': np.array([-0.4656481363306145, 0.007982539480288167]),
'virginica&0&125': np.array([-0.017148644765919676, -0.9632117581295891]),
'virginica&0&126': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&127': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&128': np.array([-0.14241819268815753, -0.8424615476000691]),
'virginica&0&129': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&130': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&131': np.array([-0.1140907502997574, -0.8737800276630269]),
'virginica&0&132': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&133': np.array([-0.14198277461566922, -0.4577720226157396]),
'virginica&0&134': np.array([-0.4385442121294165, -0.05333645823279597]),
'virginica&0&135': np.array([-0.19684482070614498, -0.7845939961595055]),
'virginica&0&136': np.array([-0.07475231751447156, -0.9062785678426409]),
'virginica&0&137': np.array([-0.6782037543706109, -0.29560073676989834]),
'virginica&0&138': np.array([-0.7694171988675237, -0.276633135028249]),
'virginica&0&139': np.array([-0.8063011502229427, 0.4134300066735808]),
'virginica&0&140': np.array([-0.07475231751447156, -0.9062785678426409]),
'virginica&0&141': np.array([-0.6782037543706109, -0.29560073676989834]),
'virginica&0&142': np.array([-0.7694171988675237, -0.276633135028249]),
'virginica&0&143': np.array([-0.2798927835773098, -0.6581136857450849]),
'virginica&0&144': np.array([-0.6782037543706109, -0.29560073676989834]),
'virginica&0&145': np.array([-0.7694171988675237, -0.276633135028249]),
'virginica&0&146': np.array([-0.16106555563262584, -0.777621649099753]),
'virginica&0&147': np.array([-0.7694171988675237, -0.276633135028249]),
'virginica&0&148': np.array([-0.6898990333725056, -0.2534947697713122]),
'virginica&0&149': np.array([-0.769491694075929, -0.22884642137519118]),
'virginica&0&150': np.array([-0.7431524521056113, -0.24432235603856345]),
'virginica&0&151': np.array([-0.4926091071260067, -0.49260910712601286]),
'virginica&0&152': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&153': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&154': np.array([-0.9706534384443797, 0.007448195602953232]),
'virginica&0&155': np.array([-0.4926091071260067, -0.49260910712601286]),
'virginica&0&156': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&157': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&158': np.array([-0.8486399726113752, -0.13537345771621853]),
'virginica&0&159': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&160': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&161': np.array([-0.7870031444780577, -0.1952404625292782]),
'virginica&0&162': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&163': np.array([-0.9569238464170641, -0.02354905845282574]),
'virginica&0&164': np.array([-0.9677320606992984, -0.012432557482778654]),
'virginica&0&165': np.array([-0.19685199412911655, -0.7845879230594393]),
'virginica&0&166': np.array([-0.07476043598366228, -0.9062715528546994]),
'virginica&0&167': np.array([-0.7770298852793477, -0.029443430477147373]),
'virginica&0&168': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&169': np.array([-0.7974072911132788, 0.006894018772033604]),
'virginica&0&170': np.array([-0.07476043598366228, -0.9062715528546994]),
'virginica&0&171': np.array([-0.7770298852793477, -0.029443430477147373]),
'virginica&0&172': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&173': np.array([-0.3355030348883163, -0.6305271339971502]),
'virginica&0&174': np.array([-0.7770298852793477, -0.029443430477147373]),
'virginica&0&175': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&176': np.array([-0.2519677855687844, -0.7134447168661863]),
'virginica&0&177': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&178': np.array([-0.7799744386472778, -0.026476616324402506]),
'virginica&0&179': np.array([-0.7942342242967624, -0.0119572163963601]),
'virginica&0&180': np.array([-0.05031696218434577, -0.929227611211748]),
'virginica&0&181': np.array([-0.017148644765919676, -0.9632117581295891]),
'virginica&0&182': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&183': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&184': np.array([-0.4656481363306145, 0.007982539480288167]),
'virginica&0&185': np.array([-0.017148644765919676, -0.9632117581295891]),
'virginica&0&186': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&187': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&188': np.array([-0.14241819268815753, -0.8424615476000691]),
'virginica&0&189': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&190': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&191': np.array([-0.1140907502997574, -0.8737800276630269]),
'virginica&0&192': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&193': np.array([-0.14198277461566922, -0.4577720226157396]),
'virginica&0&194': np.array([-0.4385442121294165, -0.05333645823279597]),
'virginica&0&195': np.array([-0.19684482070614498, -0.7845939961595055]),
'virginica&0&196': np.array([-0.07475231751447156, -0.9062785678426409]),
'virginica&0&197': np.array([-0.6782037543706109, -0.29560073676989834]),
'virginica&0&198': np.array([-0.7694171988675237, -0.276633135028249]),
'virginica&0&199': np.array([-0.8063011502229427, 0.4134300066735808]),
'virginica&0&200': np.array([-0.07475231751447156, -0.9062785678426409]),
'virginica&0&201': np.array([-0.6782037543706109, -0.29560073676989834]),
'virginica&0&202': np.array([-0.7694171988675237, -0.276633135028249]),
'virginica&0&203': np.array([-0.2798927835773098, -0.6581136857450849]),
'virginica&0&204': np.array([-0.6782037543706109, -0.29560073676989834]),
'virginica&0&205': np.array([-0.7694171988675237, -0.276633135028249]),
'virginica&0&206': np.array([-0.16106555563262584, -0.777621649099753]),
'virginica&0&207': np.array([-0.7694171988675237, -0.276633135028249]),
'virginica&0&208': np.array([-0.6898990333725056, -0.2534947697713122]),
'virginica&0&209': np.array([-0.769491694075929, -0.22884642137519118]),
'virginica&0&210': np.array([-0.7431524521056113, -0.24432235603856345]),
'virginica&0&211': np.array([-0.4926091071260067, -0.49260910712601286]),
'virginica&0&212': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&213': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&214': np.array([-0.9706534384443797, 0.007448195602953232]),
'virginica&0&215': np.array([-0.4926091071260067, -0.49260910712601286]),
'virginica&0&216': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&217': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&218': np.array([-0.8486399726113752, -0.13537345771621853]),
'virginica&0&219': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&220': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&221': np.array([-0.7870031444780577, -0.1952404625292782]),
'virginica&0&222': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&223': np.array([-0.9569238464170641, -0.02354905845282574]),
'virginica&0&224': np.array([-0.9677320606992984, -0.012432557482778654]),
'virginica&0&225': np.array([-0.05031696218434577, -0.929227611211748]),
'virginica&0&226': np.array([-0.017148644765919676, -0.9632117581295891]),
'virginica&0&227': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&228': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&229': np.array([-0.4656481363306145, 0.007982539480288167]),
'virginica&0&230': np.array([-0.017148644765919676, -0.9632117581295891]),
'virginica&0&231': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&232': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&233': np.array([-0.14241819268815753, -0.8424615476000691]),
'virginica&0&234': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&235': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&236': np.array([-0.1140907502997574, -0.8737800276630269]),
'virginica&0&237': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&238': np.array([-0.14198277461566922, -0.4577720226157396]),
'virginica&0&239': np.array([-0.4385442121294165, -0.05333645823279597]),
'virginica&0&240': np.array([-0.11329659732608087, -0.8671819100849522]),
'virginica&0&241': np.array([-0.040390637135858574, -0.9402832917474078]),
'virginica&0&242': np.array([-0.5276460255602035, -0.28992233541586077]),
'virginica&0&243': np.array([-0.6392402874163683, -0.24114611970435948]),
'virginica&0&244': np.array([-0.6814868825686854, 0.35066801608083215]),
'virginica&0&245': np.array([-0.040390637135858574, -0.9402832917474078]),
'virginica&0&246': np.array([-0.5276460255602035, -0.28992233541586077]),
'virginica&0&247': np.array([-0.6392402874163683, -0.24114611970435948]),
'virginica&0&248': np.array([-0.16157511199607094, -0.7754323813403634]),
'virginica&0&249': np.array([-0.5276460255602035, -0.28992233541586077]),
'virginica&0&250': np.array([-0.6392402874163683, -0.24114611970435948]),
'virginica&0&251': np.array([-0.08968204532514226, -0.8491191210330045]),
'virginica&0&252': np.array([-0.6392402874163683, -0.24114611970435948]),
'virginica&0&253': np.array([-0.544626974647221, -0.24972982107967573]),
'virginica&0&254': np.array([-0.6426355680762406, -0.20016519137103667]),
'virginica&0&255': np.array([-0.19685199412911655, -0.7845879230594393]),
'virginica&0&256': np.array([-0.07476043598366228, -0.9062715528546994]),
'virginica&0&257': np.array([-0.7770298852793477, -0.029443430477147373]),
'virginica&0&258': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&259': np.array([-0.7974072911132788, 0.006894018772033604]),
'virginica&0&260': np.array([-0.07476043598366228, -0.9062715528546994]),
'virginica&0&261': np.array([-0.7770298852793477, -0.029443430477147373]),
'virginica&0&262': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&263': np.array([-0.3355030348883163, -0.6305271339971502]),
'virginica&0&264': np.array([-0.7770298852793477, -0.029443430477147373]),
'virginica&0&265': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&266': np.array([-0.2519677855687844, -0.7134447168661863]),
'virginica&0&267': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&268': np.array([-0.7799744386472778, -0.026476616324402506]),
'virginica&0&269': np.array([-0.7942342242967624, -0.0119572163963601]),
'virginica&0&270': np.array([-0.04201361383207032, -0.9372571358382161]),
'virginica&0&271': np.array([-0.014237661899709955, -0.9660323357290304]),
'virginica&0&272': np.array([-0.04813346258022244, -0.5416229439456887]),
'virginica&0&273': np.array([-0.3109532939139045, -0.22759134703604383]),
'virginica&0&274': np.array([-0.4167677904879879, 0.22207334821665425]),
'virginica&0&275': np.array([-0.014237661899709955, -0.9660323357290304]),
'virginica&0&276': np.array([-0.04813346258022244, -0.5416229439456887]),
'virginica&0&277': np.array([-0.3109532939139045, -0.22759134703604383]),
'virginica&0&278': np.array([-0.07857689135903215, -0.8696882596532965]),
'virginica&0&279': np.array([-0.04813346258022244, -0.5416229439456887]),
'virginica&0&280': np.array([-0.3109532939139045, -0.22759134703604383]),
'virginica&0&281': np.array([-0.05160969201296555, -0.9000166344885441]),
'virginica&0&282': np.array([-0.3109532939139045, -0.22759134703604383]),
'virginica&0&283': np.array([-0.0766197045034485, -0.5080325256323984]),
'virginica&0&284': np.array([-0.32767091750230254, -0.19689316772421933]),
'virginica&0&285': np.array([-0.05031696218434577, -0.929227611211748]),
'virginica&0&286': np.array([-0.017148644765919676, -0.9632117581295891]),
'virginica&0&287': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&288': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&289': np.array([-0.4656481363306145, 0.007982539480288167]),
'virginica&0&290': np.array([-0.017148644765919676, -0.9632117581295891]),
'virginica&0&291': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&292': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&293': np.array([-0.14241819268815753, -0.8424615476000691]),
'virginica&0&294': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&295': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&296': np.array([-0.1140907502997574, -0.8737800276630269]),
'virginica&0&297': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&298': np.array([-0.14198277461566922, -0.4577720226157396]),
'virginica&0&299': np.array([-0.4385442121294165, -0.05333645823279597]),
'virginica&0&300': np.array([0.029402442458921384, -0.9481684282717414]),
'virginica&0&301': np.array([0.009887859354111524, -0.9698143912008228]),
'virginica&0&302': np.array([0.009595083643662688, -0.5643652067423869]),
'virginica&0&303': np.array([0.13694026920485936, -0.36331091829858003]),
'virginica&0&304': np.array([0.3094460464703627, 0.11400643817329122]),
'virginica&0&305': np.array([0.009887859354111524, -0.9698143912008228]),
'virginica&0&306': np.array([0.009595083643662688, -0.5643652067423869]),
'virginica&0&307': np.array([0.13694026920485936, -0.36331091829858003]),
'virginica&0&308': np.array([0.19002455311770447, -0.8848597943731074]),
'virginica&0&309': np.array([0.009595083643662688, -0.5643652067423869]),
'virginica&0&310': np.array([0.13694026920485936, -0.36331091829858003]),
'virginica&0&311': np.array([0.1746467870122951, -0.9073062742839755]),
'virginica&0&312': np.array([0.13694026920485936, -0.36331091829858003]),
'virginica&0&313': np.array([0.11200181312407695, -0.5330612470996793]),
'virginica&0&314': np.array([0.19998284600732558, -0.3489062419702088]),
'virginica&1&0': np.array([0.37157553889555184, 0.1221600832023858]),
'virginica&1&1': np.array([0.2463036871609408, 0.24630368716093934]),
'virginica&1&2': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&3': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&4': np.array([0.4964962439921071, 0.3798215458387346]),
'virginica&1&5': np.array([0.2463036871609408, 0.24630368716093934]),
'virginica&1&6': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&7': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&8': np.array([0.22125635302655813, 0.2925832702358638]),
'virginica&1&9': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&10': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&11': np.array([0.10063786451829529, 0.4085974066833644]),
'virginica&1&12': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&13': np.array([0.8441748651745272, -0.6057436494968107]),
'virginica&1&14': np.array([0.6453274192140858, -0.6334259878992301]),
'virginica&1&15': np.array([0.37157553889555184, 0.1221600832023858]),
'virginica&1&16': np.array([0.2463036871609408, 0.24630368716093934]),
'virginica&1&17': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&18': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&19': np.array([0.4964962439921071, 0.3798215458387346]),
'virginica&1&20': np.array([0.2463036871609408, 0.24630368716093934]),
'virginica&1&21': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&22': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&23': np.array([0.22125635302655813, 0.2925832702358638]),
'virginica&1&24': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&25': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&26': np.array([0.10063786451829529, 0.4085974066833644]),
'virginica&1&27': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&28': np.array([0.8441748651745272, -0.6057436494968107]),
'virginica&1&29': np.array([0.6453274192140858, -0.6334259878992301]),
'virginica&1&30': np.array([-0.32199975656257646, 0.7482293552463756]),
'virginica&1&31': np.array([-0.43843349141088417, 0.8642740701867917]),
'virginica&1&32': np.array([0.7141739659554729, -0.661981914015288]),
'virginica&1&33': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&34': np.array([0.2619265016777598, 0.33491141590339474]),
'virginica&1&35': np.array([-0.43843349141088417, 0.8642740701867917]),
'virginica&1&36': np.array([0.7141739659554729, -0.661981914015288]),
'virginica&1&37': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&38': np.array([-0.2562642052727569, 0.6920266972283227]),
'virginica&1&39': np.array([0.7141739659554729, -0.661981914015288]),
'virginica&1&40': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&41': np.array([-0.34479806250338163, 0.7789143553916729]),
'virginica&1&42': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&43': np.array([0.6253066100206679, -0.5612970743228719]),
'virginica&1&44': np.array([0.4159041613345079, -0.5802838287107943]),
'virginica&1&45': np.array([-0.7749499208750119, 0.8147189440804429]),
'virginica&1&46': np.array([-0.8040309195416899, 0.8445152504134819]),
'virginica&1&47': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&48': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&49': np.array([-0.4079256832347186, 0.038455640985860955]),
'virginica&1&50': np.array([-0.8040309195416899, 0.8445152504134819]),
'virginica&1&51': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&52': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&53': np.array([-0.6964303997553315, 0.7444536452136676]),
'virginica&1&54': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&55': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&56': np.array([-0.7213651642695392, 0.7718874443854203]),
'virginica&1&57': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&58': np.array([-0.5538416840542331, 0.2026191723113616]),
'virginica&1&59': np.array([-0.3472412936248763, -0.1219322389673262]),
'virginica&1&60': np.array([0.4933316375690332, 0.5272416708629276]),
'virginica&1&61': np.array([0.5041830043657418, 0.5392782673950876]),
'virginica&1&62': np.array([0.25657760110071476, -0.12592645350389117]),
'virginica&1&63': np.array([0.13717260713320115, -0.36277799079016637]),
'virginica&1&64': np.array([0.3093950298647913, 0.1140298206733954]),
'virginica&1&65': np.array([0.5041830043657418, 0.5392782673950876]),
'virginica&1&66': np.array([0.25657760110071476, -0.12592645350389117]),
'virginica&1&67': np.array([0.13717260713320115, -0.36277799079016637]),
'virginica&1&68': np.array([0.40694846236352233, 0.5109051764198169]),
'virginica&1&69': np.array([0.25657760110071476, -0.12592645350389117]),
'virginica&1&70': np.array([0.13717260713320115, -0.36277799079016637]),
'virginica&1&71': np.array([0.415695226122737, 0.5230815102377903]),
'virginica&1&72': np.array([0.13717260713320115, -0.36277799079016637]),
'virginica&1&73': np.array([0.28313251310829024, -0.10978015869508362]),
'virginica&1&74': np.array([0.20013484983664692, -0.3483612449300506]),
'virginica&1&75': np.array([0.0, 0.4756207622944677]),
'virginica&1&76': np.array([0.0, 0.4854334805210761]),
'virginica&1&77': np.array([0.0, -0.16885577975809632]),
'virginica&1&78': np.array([0.0, -0.39580588553855395]),
'virginica&1&79': np.array([0.0, 0.2538072707138344]),
'virginica&1&80': np.array([0.0, 0.4854334805210761]),
'virginica&1&81': np.array([0.0, -0.16885577975809632]),
'virginica&1&82': np.array([0.0, -0.39580588553855395]),
'virginica&1&83': np.array([0.0, 0.4904755652105692]),
'virginica&1&84': np.array([0.0, -0.16885577975809632]),
'virginica&1&85': np.array([0.0, -0.39580588553855395]),
'virginica&1&86': np.array([0.0, 0.5008471974438506]),
'virginica&1&87': np.array([0.0, -0.39580588553855395]),
'virginica&1&88': np.array([0.0, -0.14423919730424817]),
'virginica&1&89': np.array([0.0, -0.3847817540585927]),
'virginica&1&90': np.array([0.37157553889555184, 0.1221600832023858]),
'virginica&1&91': np.array([0.2463036871609408, 0.24630368716093934]),
'virginica&1&92': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&93': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&94': np.array([0.4964962439921071, 0.3798215458387346]),
'virginica&1&95': np.array([0.2463036871609408, 0.24630368716093934]),
'virginica&1&96': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&97': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&98': np.array([0.22125635302655813, 0.2925832702358638]),
'virginica&1&99': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&100': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&101': np.array([0.10063786451829529, 0.4085974066833644]),
'virginica&1&102': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&103': np.array([0.8441748651745272, -0.6057436494968107]),
'virginica&1&104': np.array([0.6453274192140858, -0.6334259878992301]),
'virginica&1&105': np.array([-0.32199975656257646, 0.7482293552463756]),
'virginica&1&106': np.array([-0.43843349141088417, 0.8642740701867917]),
'virginica&1&107': np.array([0.7141739659554729, -0.661981914015288]),
'virginica&1&108': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&109': np.array([0.2619265016777598, 0.33491141590339474]),
'virginica&1&110': np.array([-0.43843349141088417, 0.8642740701867917]),
'virginica&1&111': np.array([0.7141739659554729, -0.661981914015288]),
'virginica&1&112': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&113': np.array([-0.2562642052727569, 0.6920266972283227]),
'virginica&1&114': np.array([0.7141739659554729, -0.661981914015288]),
'virginica&1&115': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&116': np.array([-0.34479806250338163, 0.7789143553916729]),
'virginica&1&117': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&118': np.array([0.6253066100206679, -0.5612970743228719]),
'virginica&1&119': np.array([0.4159041613345079, -0.5802838287107943]),
'virginica&1&120': np.array([-0.7749499208750119, 0.8147189440804429]),
'virginica&1&121': np.array([-0.8040309195416899, 0.8445152504134819]),
'virginica&1&122': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&123': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&124': np.array([-0.4079256832347186, 0.038455640985860955]),
'virginica&1&125': np.array([-0.8040309195416899, 0.8445152504134819]),
'virginica&1&126': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&127': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&128': np.array([-0.6964303997553315, 0.7444536452136676]),
'virginica&1&129': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&130': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&131': np.array([-0.7213651642695392, 0.7718874443854203]),
'virginica&1&132': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&133': np.array([-0.5538416840542331, 0.2026191723113616]),
'virginica&1&134': np.array([-0.3472412936248763, -0.1219322389673262]),
'virginica&1&135': np.array([0.5188109114552927, 0.03638964581864269]),
'virginica&1&136': np.array([0.5131478569192371, 0.04203387599862816]),
'virginica&1&137': np.array([0.7329462736700701, -0.4610490766898857]),
'virginica&1&138': np.array([0.5965042032375719, -0.48856644624972617]),
'virginica&1&139': np.array([0.5436097000280874, 0.1461891067488832]),
'virginica&1&140': np.array([0.5131478569192371, 0.04203387599862816]),
'virginica&1&141': np.array([0.7329462736700701, -0.4610490766898857]),
'virginica&1&142': np.array([0.5965042032375719, -0.48856644624972617]),
'virginica&1&143': np.array([0.4788153032824012, 0.08625929936974323]),
'virginica&1&144': np.array([0.7329462736700701, -0.4610490766898857]),
'virginica&1&145': np.array([0.5965042032375719, -0.48856644624972617]),
'virginica&1&146': np.array([0.46583127837967303, 0.09875847161509169]),
'virginica&1&147': np.array([0.5965042032375719, -0.48856644624972617]),
'virginica&1&148': np.array([0.7419884013108898, -0.4595742931114029]),
'virginica&1&149': np.array([0.6092194175719845, -0.5086479426935605]),
'virginica&1&150': np.array([0.37157553889555184, 0.1221600832023858]),
'virginica&1&151': np.array([0.2463036871609408, 0.24630368716093934]),
'virginica&1&152': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&153': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&154': np.array([0.4964962439921071, 0.3798215458387346]),
'virginica&1&155': np.array([0.2463036871609408, 0.24630368716093934]),
'virginica&1&156': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&157': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&158': np.array([0.22125635302655813, 0.2925832702358638]),
'virginica&1&159': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&160': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&161': np.array([0.10063786451829529, 0.4085974066833644]),
'virginica&1&162': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&163': np.array([0.8441748651745272, -0.6057436494968107]),
'virginica&1&164': np.array([0.6453274192140858, -0.6334259878992301]),
'virginica&1&165': np.array([-0.32199975656257646, 0.7482293552463756]),
'virginica&1&166': np.array([-0.43843349141088417, 0.8642740701867917]),
'virginica&1&167': np.array([0.7141739659554729, -0.661981914015288]),
'virginica&1&168': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&169': np.array([0.2619265016777598, 0.33491141590339474]),
'virginica&1&170': np.array([-0.43843349141088417, 0.8642740701867917]),
'virginica&1&171': np.array([0.7141739659554729, -0.661981914015288]),
'virginica&1&172': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&173': np.array([-0.2562642052727569, 0.6920266972283227]),
'virginica&1&174': np.array([0.7141739659554729, -0.661981914015288]),
'virginica&1&175': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&176': np.array([-0.34479806250338163, 0.7789143553916729]),
'virginica&1&177': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&178': np.array([0.6253066100206679, -0.5612970743228719]),
'virginica&1&179': np.array([0.4159041613345079, -0.5802838287107943]),
'virginica&1&180': np.array([-0.7749499208750119, 0.8147189440804429]),
'virginica&1&181': np.array([-0.8040309195416899, 0.8445152504134819]),
'virginica&1&182': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&183': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&184': np.array([-0.4079256832347186, 0.038455640985860955]),
'virginica&1&185': np.array([-0.8040309195416899, 0.8445152504134819]),
'virginica&1&186': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&187': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&188': np.array([-0.6964303997553315, 0.7444536452136676]),
'virginica&1&189': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&190': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&191': np.array([-0.7213651642695392, 0.7718874443854203]),
'virginica&1&192': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&193': np.array([-0.5538416840542331, 0.2026191723113616]),
'virginica&1&194': np.array([-0.3472412936248763, -0.1219322389673262]),
'virginica&1&195': np.array([0.5188109114552927, 0.03638964581864269]),
'virginica&1&196': np.array([0.5131478569192371, 0.04203387599862816]),
'virginica&1&197': np.array([0.7329462736700701, -0.4610490766898857]),
'virginica&1&198': np.array([0.5965042032375719, -0.48856644624972617]),
'virginica&1&199': np.array([0.5436097000280874, 0.1461891067488832]),
'virginica&1&200': np.array([0.5131478569192371, 0.04203387599862816]),
'virginica&1&201': np.array([0.7329462736700701, -0.4610490766898857]),
'virginica&1&202': np.array([0.5965042032375719, -0.48856644624972617]),
'virginica&1&203': np.array([0.4788153032824012, 0.08625929936974323]),
'virginica&1&204': np.array([0.7329462736700701, -0.4610490766898857]),
'virginica&1&205': np.array([0.5965042032375719, -0.48856644624972617]),
'virginica&1&206': np.array([0.46583127837967303, 0.09875847161509169]),
'virginica&1&207': np.array([0.5965042032375719, -0.48856644624972617]),
'virginica&1&208': np.array([0.7419884013108898, -0.4595742931114029]),
'virginica&1&209': np.array([0.6092194175719845, -0.5086479426935605]),
'virginica&1&210': np.array([0.37157553889555184, 0.1221600832023858]),
'virginica&1&211': np.array([0.2463036871609408, 0.24630368716093934]),
'virginica&1&212': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&213': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&214': np.array([0.4964962439921071, 0.3798215458387346]),
'virginica&1&215': np.array([0.2463036871609408, 0.24630368716093934]),
'virginica&1&216': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&217': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&218': np.array([0.22125635302655813, 0.2925832702358638]),
'virginica&1&219': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&220': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&221': np.array([0.10063786451829529, 0.4085974066833644]),
'virginica&1&222': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&223': np.array([0.8441748651745272, -0.6057436494968107]),
'virginica&1&224': np.array([0.6453274192140858, -0.6334259878992301]),
'virginica&1&225': np.array([-0.7749499208750119, 0.8147189440804429]),
'virginica&1&226': np.array([-0.8040309195416899, 0.8445152504134819]),
'virginica&1&227': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&228': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&229': np.array([-0.4079256832347186, 0.038455640985860955]),
'virginica&1&230': np.array([-0.8040309195416899, 0.8445152504134819]),
'virginica&1&231': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&232': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&233': np.array([-0.6964303997553315, 0.7444536452136676]),
'virginica&1&234': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&235': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&236': np.array([-0.7213651642695392, 0.7718874443854203]),
'virginica&1&237': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&238': np.array([-0.5538416840542331, 0.2026191723113616]),
'virginica&1&239': np.array([-0.3472412936248763, -0.1219322389673262]),
'virginica&1&240': np.array([0.056623968925773045, 0.43360725859686644]),
'virginica&1&241': np.array([0.020169511418752378, 0.47015948158260334]),
'virginica&1&242': np.array([0.5806365328450952, -0.4726270680771261]),
'virginica&1&243': np.array([0.41462901544715686, -0.4964318942067897]),
'virginica&1&244': np.array([0.3351719071445682, 0.20616862401308342]),
'virginica&1&245': np.array([0.020169511418752378, 0.47015948158260334]),
'virginica&1&246': np.array([0.5806365328450952, -0.4726270680771261]),
'virginica&1&247': np.array([0.41462901544715686, -0.4964318942067897]),
'virginica&1&248': np.array([0.024556360933646205, 0.4723948285969902]),
'virginica&1&249': np.array([0.5806365328450952, -0.4726270680771261]),
'virginica&1&250': np.array([0.41462901544715686, -0.4964318942067897]),
'virginica&1&251': np.array([-0.0164329511444131, 0.5132208276383963]),
'virginica&1&252': np.array([0.41462901544715686, -0.4964318942067897]),
'virginica&1&253': np.array([0.581569928198426, -0.46134543884925855]),
'virginica&1&254': np.array([0.42361197252581306, -0.5068181610814407]),
'virginica&1&255': np.array([-0.32199975656257646, 0.7482293552463756]),
'virginica&1&256': np.array([-0.43843349141088417, 0.8642740701867917]),
'virginica&1&257': np.array([0.7141739659554729, -0.661981914015288]),
'virginica&1&258': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&259': np.array([0.2619265016777598, 0.33491141590339474]),
'virginica&1&260': np.array([-0.43843349141088417, 0.8642740701867917]),
'virginica&1&261': np.array([0.7141739659554729, -0.661981914015288]),
'virginica&1&262': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&263': np.array([-0.2562642052727569, 0.6920266972283227]),
'virginica&1&264': np.array([0.7141739659554729, -0.661981914015288]),
'virginica&1&265': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&266': np.array([-0.34479806250338163, 0.7789143553916729]),
'virginica&1&267': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&268': np.array([0.6253066100206679, -0.5612970743228719]),
'virginica&1&269': np.array([0.4159041613345079, -0.5802838287107943]),
'virginica&1&270': np.array([-0.6288817118959938, 0.6849987400957501]),
'virginica&1&271': np.array([-0.6491819158994796, 0.7060292771859485]),
'virginica&1&272': np.array([-0.36354251586275393, 0.01503732165107865]),
'virginica&1&273': np.array([-0.2224264339516076, -0.2751400010362469]),
'virginica&1&274': np.array([-0.3507937472799825, 0.22709708691079003]),
'virginica&1&275': np.array([-0.6491819158994796, 0.7060292771859485]),
'virginica&1&276': np.array([-0.36354251586275393, 0.01503732165107865]),
'virginica&1&277': np.array([-0.2224264339516076, -0.2751400010362469]),
'virginica&1&278': np.array([-0.6219129029345898, 0.6860569455333333]),
'virginica&1&279': np.array([-0.36354251586275393, 0.01503732165107865]),
'virginica&1&280': np.array([-0.2224264339516076, -0.2751400010362469]),
'virginica&1&281': np.array([-0.6423063482710314, 0.7078274136226649]),
'virginica&1&282': np.array([-0.2224264339516076, -0.2751400010362469]),
'virginica&1&283': np.array([-0.38798262782075055, 0.05152547330256509]),
'virginica&1&284': np.array([-0.23804537254556749, -0.24790919248823104]),
'virginica&1&285': np.array([-0.7749499208750119, 0.8147189440804429]),
'virginica&1&286': np.array([-0.8040309195416899, 0.8445152504134819]),
'virginica&1&287': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&288': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&289': np.array([-0.4079256832347186, 0.038455640985860955]),
'virginica&1&290': np.array([-0.8040309195416899, 0.8445152504134819]),
'virginica&1&291': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&292': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&293': np.array([-0.6964303997553315, 0.7444536452136676]),
'virginica&1&294': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&295': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&296': np.array([-0.7213651642695392, 0.7718874443854203]),
'virginica&1&297': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&298': np.array([-0.5538416840542331, 0.2026191723113616]),
'virginica&1&299': np.array([-0.3472412936248763, -0.1219322389673262]),
'virginica&1&300': np.array([0.4933316375690332, 0.5272416708629276]),
'virginica&1&301': np.array([0.5041830043657418, 0.5392782673950876]),
'virginica&1&302': np.array([0.25657760110071476, -0.12592645350389117]),
'virginica&1&303': np.array([0.13717260713320115, -0.36277799079016637]),
'virginica&1&304': np.array([0.3093950298647913, 0.1140298206733954]),
'virginica&1&305': np.array([0.5041830043657418, 0.5392782673950876]),
'virginica&1&306': np.array([0.25657760110071476, -0.12592645350389117]),
'virginica&1&307': np.array([0.13717260713320115, -0.36277799079016637]),
'virginica&1&308': np.array([0.40694846236352233, 0.5109051764198169]),
'virginica&1&309': np.array([0.25657760110071476, -0.12592645350389117]),
'virginica&1&310': np.array([0.13717260713320115, -0.36277799079016637]),
'virginica&1&311': np.array([0.415695226122737, 0.5230815102377903]),
'virginica&1&312': np.array([0.13717260713320115, -0.36277799079016637]),
'virginica&1&313': np.array([0.28313251310829024, -0.10978015869508362]),
'virginica&1&314': np.array([0.20013484983664692, -0.3483612449300506]),
'virginica&2&0': np.array([0.37157691321004915, 0.12216227283618836]),
'virginica&2&1': np.array([0.24630541996506908, 0.24630541996506994]),
'virginica&2&2': np.array([0.04449246321056297, 0.7096449459722027]),
'virginica&2&3': np.array([0.2953784217387408, 0.6750352694420284]),
'virginica&2&4': np.array([0.4741571944522723, -0.3872697414416878]),
'virginica&2&5': np.array([0.24630541996506908, 0.24630541996506994]),
'virginica&2&6': np.array([0.04449246321056297, 0.7096449459722027]),
'virginica&2&7': np.array([0.2953784217387408, 0.6750352694420284]),
'virginica&2&8': np.array([0.6273836195848199, -0.15720981251964872]),
'virginica&2&9': np.array([0.04449246321056297, 0.7096449459722027]),
'virginica&2&10': np.array([0.2953784217387408, 0.6750352694420284]),
'virginica&2&11': np.array([0.6863652799597699, -0.21335694415409426]),
'virginica&2&12': np.array([0.2953784217387408, 0.6750352694420284]),
'virginica&2&13': np.array([0.11274898124253621, 0.6292927079496371]),
'virginica&2&14': np.array([0.32240464148521225, 0.645858545382009]),
'virginica&2&15': np.array([0.37157691321004915, 0.12216227283618836]),
'virginica&2&16': np.array([0.24630541996506908, 0.24630541996506994]),
'virginica&2&17': np.array([0.04449246321056297, 0.7096449459722027]),
'virginica&2&18': np.array([0.2953784217387408, 0.6750352694420284]),
'virginica&2&19': np.array([0.4741571944522723, -0.3872697414416878]),
'virginica&2&20': np.array([0.24630541996506908, 0.24630541996506994]),
'virginica&2&21': np.array([0.04449246321056297, 0.7096449459722027]),
'virginica&2&22': np.array([0.2953784217387408, 0.6750352694420284]),
'virginica&2&23': np.array([0.6273836195848199, -0.15720981251964872]),
'virginica&2&24': np.array([0.04449246321056297, 0.7096449459722027]),
'virginica&2&25': np.array([0.2953784217387408, 0.6750352694420284]),
'virginica&2&26': np.array([0.6863652799597699, -0.21335694415409426]),
'virginica&2&27': np.array([0.2953784217387408, 0.6750352694420284]),
'virginica&2&28': np.array([0.11274898124253621, 0.6292927079496371]),
'virginica&2&29': np.array([0.32240464148521225, 0.645858545382009]),
'virginica&2&30': np.array([0.5188517506916897, 0.036358567813067386]),
'virginica&2&31': np.array([0.5131939273945454, 0.04199748266790813]),
'virginica&2&32': np.array([0.06285591932387397, 0.6914253444924359]),
'virginica&2&33': np.array([0.34904320225465857, 0.6233384360811872]),
'virginica&2&34': np.array([0.5354807894355184, -0.3418054346754283]),
'virginica&2&35': np.array([0.5131939273945454, 0.04199748266790813]),
'virginica&2&36': np.array([0.06285591932387397, 0.6914253444924359]),
'virginica&2&37': np.array([0.34904320225465857, 0.6233384360811872]),
'virginica&2&38': np.array([0.5917672401610737, -0.061499563231173816]),
'virginica&2&39': np.array([0.06285591932387397, 0.6914253444924359]),
'virginica&2&40': np.array([0.34904320225465857, 0.6233384360811872]),
'virginica&2&41': np.array([0.5967658480721675, -0.06546963852548916]),
'virginica&2&42': np.array([0.34904320225465857, 0.6233384360811872]),
'virginica&2&43': np.array([0.15466782862660866, 0.5877736906472755]),
'virginica&2&44': np.array([0.37833006296225374, 0.5922410451071548]),
'virginica&2&45': np.array([0.8252668830593566, 0.11450866713130668]),
'virginica&2&46': np.array([0.8211795643076095, 0.11869650771610692]),
'virginica&2&47': np.array([0.644166410268985, 0.30120464260998964]),
'virginica&2&48': np.array([0.7640280271176497, 0.19364537761420375]),
'virginica&2&49': np.array([0.8735738195653328, -0.046438180466149094]),
'virginica&2&50': np.array([0.8211795643076095, 0.11869650771610692]),
'virginica&2&51': np.array([0.644166410268985, 0.30120464260998964]),
'virginica&2&52': np.array([0.7640280271176497, 0.19364537761420375]),
'virginica&2&53': np.array([0.8388485924434891, 0.09800790238640067]),
'virginica&2&54': np.array([0.644166410268985, 0.30120464260998964]),
'virginica&2&55': np.array([0.7640280271176497, 0.19364537761420375]),
'virginica&2&56': np.array([0.835455914569297, 0.10189258327760495]),
'virginica&2&57': np.array([0.7640280271176497, 0.19364537761420375]),
'virginica&2&58': np.array([0.6958244586699014, 0.2551528503043789]),
'virginica&2&59': np.array([0.7857855057542923, 0.17526869720012267]),
'virginica&2&60': np.array([-0.5227340800279543, 0.4209267574088147]),
'virginica&2&61': | np.array([-0.5140708637198534, 0.4305361238057349]) | numpy.array |
import numpy as np
from sklearn.discriminant_analysis import LinearDiscriminantAnalysis
from sklearn.discriminant_analysis import QuadraticDiscriminantAnalysis
from sklearn.linear_model import LogisticRegression
from sklearn.linear_model import LinearRegression
from matplotlib import pyplot as plt
import seaborn as sns
import pandas as pd
def get_vowel_datsets():
train = np.loadtxt("../datasets/vowel/vowel.train.txt", delimiter=',', skiprows=1, usecols=(i for i in range(1, 12)))
test = np.loadtxt("../datasets/vowel/vowel.test.txt", delimiter=',', skiprows=1, usecols=(i for i in range(1, 12)))
return train, test
def norm(x):
x = (x - x.mean()) / x.std()
return x
# Function for calculating error rate:
def get_error_rate(y_calc, y):
"""Gets the prediction error rate for linear regression
Parameters
----------
y_calc : ndarray
The output of reg.predict
y : DataFrame
The correct output for comparation
Returns
-------
float
the prediction error rate
"""
y_dummy_calc = pd.get_dummies(y_calc.argmax(axis=1))
y_dummy_calc.columns = y.columns.values
y_dummy_calc.index = y.index
return np.mean(np.mean((y_dummy_calc != y) * 11 / 2))
def calc_linear_regression(x_train, y_train, x_test, y_test):
# 1) LINEAR REGRESSION
# Convert to dummy variables for better applicate linear regression
y_dummy = pd.get_dummies(y_train)
y_test_dummy = pd.get_dummies(y_test)
# Fit the model
reg = LinearRegression().fit(x_train, y_dummy)
# Get the error for training and test
print("Linear regression:")
y_test_calc = reg.predict(x_test)
y_calc = reg.predict(x_train)
print("\tThe error rate on train is %2.2f %%" % get_error_rate(y_calc, y_dummy))
print("\tThe error rate on test is %2.2f %%" % get_error_rate(y_test_calc, y_test_dummy))
def calc_lda(x_train, y_train, x_test, y_test):
# 2) LDA
# Fit the model (no need for dummy variables)
model = LinearDiscriminantAnalysis(solver='eigen', shrinkage=None, priors=None,
n_components=None, store_covariance=False, tol=0.0001)
reg = model.fit(x_train, y_train)
print("Linear discriminant analysis (LDA):")
print("\tThe error rate on train is %2.5f %%" % (1 - reg.score(x_train, y_train)))
print("\tThe error rate on test is %2.5f %%" % (1 - reg.score(x_test, y_test)))
def calc_qda(x_train, y_train, x_test, y_test):
# 3) QDA
# Fit the model (no need for dummy variables)
model = QuadraticDiscriminantAnalysis()
reg = model.fit(x_train, y_train)
print("Quadratic discriminant analysis (QDA):")
print("\tThe error rate on train is %2.5f %%" % (1 - reg.score(x_train, y_train)))
print("\tThe error rate on test is %2.5f %%" % (1 - reg.score(x_test, y_test)))
def calc_logistic(x_train, y_train, x_test, y_test):
# 3) QDA
# Fit the model (no need for dummy variables)
model = LogisticRegression(solver='newton-cg', penalty='none')
reg = model.fit(x_train, y_train)
print("Logistic regression:")
print("\tThe error rate on train is %2.2f %%" % (1 - reg.score(x_train, y_train)))
print("\tThe error rate on test is %2.2f %%" % (1 - reg.score(x_test, y_test)))
def discriminant_formula(x, sigma_inv, det, mean, pi):
delta = float(-0.5 * np.log(det) - 0.5 * (x-mean) @ sigma_inv @ (x-mean) + np.log(pi))
return delta
def get_graph_error(x, y, sigma_k_inv, det_k, reg):
out = np.empty([x.shape[0], 11])
for j in range(x.shape[0]): # for each x
out_row = | np.empty(11) | numpy.empty |
"""
Test Surrogates Overview
========================
"""
# Author: <NAME> <<EMAIL>>
# License: new BSD
from PIL import Image
import numpy as np
import scripts.surrogates_overview as exo
import scripts.image_classifier as imgclf
import sklearn.datasets
import sklearn.linear_model
SAMPLES = 10
BATCH = 50
SAMPLE_IRIS = False
IRIS_SAMPLES = 50000
def test_bilmey_image():
"""Tests surrogate image bLIMEy."""
# Load the image
doggo_img = Image.open('surrogates_overview/img/doggo.jpg')
doggo_array = np.array(doggo_img)
# Load the classifier
clf = imgclf.ImageClassifier()
explain_classes = [('tennis ball', 852),
('golden retriever', 207),
('Labrador retriever', 208)]
# Configure widgets to select occlusion colour, segmentation granularity
# and explained class
colour_selection = {
i: i for i in ['mean', 'black', 'white', 'randomise-patch', 'green']
}
granularity_selection = {'low': 13, 'medium': 30, 'high': 50}
# Generate explanations
blimey_image_collection = {}
for gran_name, gran_number in granularity_selection.items():
blimey_image_collection[gran_name] = {}
for col_name in colour_selection:
blimey_image_collection[gran_name][col_name] = \
exo.build_image_blimey(
doggo_array,
clf.predict_proba,
explain_classes,
explanation_size=5,
segments_number=gran_number,
occlusion_colour=col_name,
samples_number=SAMPLES,
batch_size=BATCH,
random_seed=42)
exp = []
for gran_ in blimey_image_collection:
for col_ in blimey_image_collection[gran_]:
exp.append(blimey_image_collection[gran_][col_]['surrogates'])
assert len(exp) == len(EXP_IMG)
for e, E in zip(exp, EXP_IMG):
assert sorted(list(e.keys())) == sorted(list(E.keys()))
for key in e.keys():
assert e[key]['name'] == E[key]['name']
assert len(e[key]['explanation']) == len(E[key]['explanation'])
for e_, E_ in zip(e[key]['explanation'], E[key]['explanation']):
assert e_[0] == E_[0]
assert np.allclose(e_[1], E_[1], atol=.001, equal_nan=True)
def test_bilmey_tabular():
"""Tests surrogate tabular bLIMEy."""
# Load the iris data set
iris = sklearn.datasets.load_iris()
iris_X = iris.data # [:, :2] # take the first two features only
iris_y = iris.target
iris_labels = iris.target_names
iris_feature_names = iris.feature_names
label2class = {lab: i for i, lab in enumerate(iris_labels)}
# Fit the classifier
logreg = sklearn.linear_model.LogisticRegression(C=1e5)
logreg.fit(iris_X, iris_y)
# explained class
_dtype = iris_X.dtype
explained_instances = {
'setosa': np.array([5, 3.5, 1.5, 0.25]).astype(_dtype),
'versicolor': np.array([5.5, 2.75, 4.5, 1.25]).astype(_dtype),
'virginica': np.array([7, 3, 5.5, 2.25]).astype(_dtype)
}
petal_length_idx = iris_feature_names.index('petal length (cm)')
petal_length_bins = [1, 2, 3, 4, 5, 6, 7]
petal_width_idx = iris_feature_names.index('petal width (cm)')
petal_width_bins = [0, .5, 1, 1.5, 2, 2.5]
discs_ = []
for i, ix in enumerate(petal_length_bins): # X-axis
for iix in petal_length_bins[i + 1:]:
for j, jy in enumerate(petal_width_bins): # Y-axis
for jjy in petal_width_bins[j + 1:]:
discs_.append({
petal_length_idx: [ix, iix],
petal_width_idx: [jy, jjy]
})
for inst_i in explained_instances:
for cls_i in iris_labels:
for disc_i, disc in enumerate(discs_):
inst = explained_instances[inst_i]
cls = label2class[cls_i]
exp = exo.build_tabular_blimey(
inst, cls, iris_X, iris_y, logreg.predict_proba, disc,
IRIS_SAMPLES, SAMPLE_IRIS, 42)
key = '{}&{}&{}'.format(inst_i, cls, disc_i)
exp_ = EXP_TAB[key]
assert exp['explanation'].shape[0] == exp_.shape[0]
assert np.allclose(
exp['explanation'], exp_, atol=.001, equal_nan=True)
EXP_IMG = [
{207: {'explanation': [(13, -0.24406872165780585),
(11, -0.20456180387430317),
(9, -0.1866779131424261),
(4, 0.15001224157793785),
(3, 0.11589480417160983)],
'name': 'golden retriever'},
208: {'explanation': [(13, -0.08395966359346249),
(0, -0.0644986107387837),
(9, 0.05845584633658977),
(1, 0.04369763085720947),
(11, -0.035958188394941866)],
'name': '<NAME>'},
852: {'explanation': [(13, 0.3463529698715463),
(11, 0.2678050131923326),
(4, -0.10639863421417416),
(6, 0.08345792378117327),
(9, 0.07366945242386444)],
'name': '<NAME>'}},
{207: {'explanation': [(13, -0.0624167912596456),
(7, 0.06083359545295548),
(3, 0.0495953943686462),
(11, -0.04819787147412231),
(2, -0.03858823761391199)],
'name': '<NAME>'},
208: {'explanation': [(13, -0.08408428146916162),
(7, 0.07704235920590158),
(3, 0.06646468388122273),
(11, -0.0638326572126609),
(2, -0.052621478002380796)],
'name': '<NAME>'},
852: {'explanation': [(11, 0.35248212611685886),
(13, 0.2516925608037859),
(2, 0.13682853028454384),
(9, 0.12930134856644754),
(6, 0.1257747954095489)],
'name': '<NAME>'}},
{207: {'explanation': [(3, 0.21351937934930917),
(10, 0.16933456312772083),
(11, -0.13447244552856766),
(8, 0.11058919217055371),
(2, -0.06269239798368743)],
'name': '<NAME>'},
208: {'explanation': [(8, 0.05995551486884414),
(9, -0.05375302972380482),
(11, -0.051997353324246445),
(6, 0.04213181405953071),
(2, -0.039169895361928275)],
'name': '<NAME>'},
852: {'explanation': [(7, 0.31382219776986503),
(11, 0.24126214884275987),
(13, 0.21075924370226598),
(2, 0.11937652039885377),
(8, -0.11911265319329697)],
'name': '<NAME>'}},
{207: {'explanation': [(3, 0.39254403293049134),
(9, 0.19357165018747347),
(6, 0.16592079671652987),
(0, 0.14042059731407297),
(1, 0.09793027079765507)],
'name': '<NAME>'},
208: {'explanation': [(9, -0.19351859273276703),
(1, -0.15262967987262344),
(3, 0.12205127112235375),
(2, 0.11352141032313934),
(6, -0.11164209893429898)],
'name': '<NAME>'},
852: {'explanation': [(7, 0.17213007100844877),
(0, -0.1583030948868859),
(3, -0.13748574615069775),
(5, 0.13273283867075436),
(11, 0.12309551170070354)],
'name': '<NAME>'}},
{207: {'explanation': [(3, 0.4073533182995105),
(10, 0.20711667988142463),
(8, 0.15360813290032324),
(6, 0.1405424759832785),
(1, 0.1332920685413575)],
'name': '<NAME>'},
208: {'explanation': [(9, -0.14747910525112617),
(1, -0.13977061235228924),
(2, 0.10526833898161611),
(6, -0.10416022118399552),
(3, 0.09555992655161764)],
'name': '<NAME>'},
852: {'explanation': [(11, 0.2232260929107954),
(7, 0.21638443149433054),
(5, 0.21100464215582274),
(13, 0.145614853795006),
(1, -0.11416523431311262)],
'name': '<NAME>'}},
{207: {'explanation': [(1, 0.14700178977744183),
(0, 0.10346667279328238),
(2, 0.10346667279328238),
(7, 0.10346667279328238),
(8, 0.10162900633690726)],
'name': '<NAME>'},
208: {'explanation': [(10, -0.10845134816658476),
(8, -0.1026920429226184),
(6, -0.10238154733842847),
(18, 0.10094164937411244),
(16, 0.08646888450232793)],
'name': '<NAME>'},
852: {'explanation': [(18, -0.20542297091894474),
(13, 0.2012751176130666),
(8, -0.19194747162742365),
(20, 0.14686930696710473),
(15, 0.11796990086271067)],
'name': '<NAME>'}},
{207: {'explanation': [(13, 0.12446259821701779),
(17, 0.11859084421095789),
(15, 0.09690553833007137),
(12, -0.08869743701731962),
(4, 0.08124900427893789)],
'name': '<NAME>'},
208: {'explanation': [(10, -0.09478194981909983),
(20, -0.09173392507039077),
(9, 0.08768898801254493),
(17, -0.07553994244536394),
(4, 0.07422905503397653)],
'name': '<NAME>'},
852: {'explanation': [(21, 0.1327882942965061),
(1, 0.1238236573086363),
(18, -0.10911712271717902),
(19, 0.09707191051320978),
(6, 0.08593672504338913)],
'name': '<NAME>'}},
{207: {'explanation': [(6, 0.14931728779865114),
(14, 0.14092073957103526),
(1, 0.11071480021464616),
(4, 0.10655287976934531),
(8, 0.08705404649152573)],
'name': '<NAME>'},
208: {'explanation': [(8, -0.12242580400886727),
(9, 0.12142729544158742),
(14, -0.1148252787068248),
(16, -0.09562322208795092),
(4, 0.09350160975513132)],
'name': '<NAME>'},
852: {'explanation': [(6, 0.04227675072263027),
(9, -0.03107924340879173),
(14, 0.028007115650713045),
(13, 0.02771190348545554),
(19, 0.02640441416071482)],
'name': '<NAME>'}},
{207: {'explanation': [(19, 0.14313680656283245),
(18, 0.12866508562342843),
(8, 0.11809779264185447),
(0, 0.11286255403442104),
(2, 0.11286255403442104)],
'name': '<NAME>'},
208: {'explanation': [(9, 0.2397917428082761),
(14, -0.19435572812170654),
(6, -0.1760894833446507),
(18, -0.12243333818399058),
(15, 0.10986343675377105)],
'name': '<NAME>'},
852: {'explanation': [(14, 0.15378038774613365),
(9, -0.14245940635481966),
(6, 0.10213601012183973),
(20, 0.1009180838986786),
(3, 0.09780065767815548)],
'name': '<NAME>'}},
{207: {'explanation': [(15, 0.06525850448807077),
(9, 0.06286791243851698),
(19, 0.055189970374185854),
(8, 0.05499197604401475),
(13, 0.04748220842936177)],
'name': '<NAME>'},
208: {'explanation': [(6, -0.31549091899770765),
(5, 0.1862302670824446),
(8, -0.17381478451341995),
(10, -0.17353516098662508),
(14, -0.13591542421754205)],
'name': '<NAME>'},
852: {'explanation': [(14, 0.2163853942943355),
(6, 0.17565046338282214),
(1, 0.12446193028474549),
(9, -0.11365789839746396),
(10, 0.09239073691962967)],
'name': '<NAME>'}},
{207: {'explanation': [(19, 0.1141207265647932),
(36, -0.08861425922625768),
(30, 0.07219209872026074),
(9, -0.07150939547859836),
(38, -0.06988288637544438)],
'name': '<NAME>'},
208: {'explanation': [(29, 0.10531073909547647),
(13, 0.08279642208039652),
(34, -0.0817952443980797),
(33, -0.08086848205765082),
(12, 0.08086848205765082)],
'name': '<NAME>'},
852: {'explanation': [(13, -0.1330452414595897),
(4, 0.09942366413042845),
(12, -0.09881995683190645),
(33, 0.09881995683190645),
(19, -0.09596925317560831)],
'name': '<NAME>'}},
{207: {'explanation': [(37, 0.08193926967758253),
(35, 0.06804043021426347),
(15, 0.06396269230810163),
(11, 0.062255657227065296),
(8, 0.05529200233091672)],
'name': '<NAME>'},
208: {'explanation': [(19, 0.05711957286614678),
(27, -0.050230108135410824),
(16, -0.04743034616549999),
(5, -0.046717346734255705),
(9, -0.04419100026638039)],
'name': '<NAME>'},
852: {'explanation': [(3, -0.08390967998497496),
(30, -0.07037680222442452),
(22, 0.07029819368543713),
(8, -0.06861396187180349),
(37, -0.06662511956402824)],
'name': '<NAME>'}},
{207: {'explanation': [(19, 0.048418845359024805),
(9, -0.0423869575883795),
(30, 0.04012650790044438),
(36, -0.03787242980067195),
(10, 0.036557999380695635)],
'name': '<NAME>'},
208: {'explanation': [(10, 0.12120686823129677),
(17, 0.10196564232230493),
(7, 0.09495133975425854),
(25, -0.0759657891182803),
(2, -0.07035244568286837)],
'name': '<NAME>'},
852: {'explanation': [(3, -0.0770578003457272),
(28, 0.0769372258280398),
(6, -0.06044725989272927),
(22, 0.05550155775286349),
(31, -0.05399028046597057)],
'name': '<NAME>'}},
{207: {'explanation': [(14, 0.05371383110181226),
(0, -0.04442539316084218),
(18, 0.042589475382826494),
(19, 0.04227647855354252),
(17, 0.041685661662754295)],
'name': '<NAME>'},
208: {'explanation': [(29, 0.14419601354489464),
(17, 0.11785174500536676),
(36, 0.1000501679652906),
(10, 0.09679790134851017),
(35, 0.08710376081189208)],
'name': '<NAME>'},
852: {'explanation': [(8, -0.02486237985832769),
(3, -0.022559886154747102),
(11, -0.021878686669239856),
(36, 0.021847953817988534),
(19, -0.018317598300716522)],
'name': '<NAME>'}},
{207: {'explanation': [(37, 0.08098729255605368),
(35, 0.06639102704982619),
(15, 0.06033721190370432),
(34, 0.05826267856117829),
(28, 0.05549505160798173)],
'name': '<NAME>'},
208: {'explanation': [(17, 0.13839012042250542),
(10, 0.11312187488346881),
(7, 0.10729071207480922),
(25, -0.09529127965797404),
(11, -0.09279834572979286)],
'name': '<NAME>'},
852: {'explanation': [(3, -0.028385651836694076),
(22, 0.023364702783498722),
(8, -0.023097812578270233),
(30, -0.022931236620034406),
(37, -0.022040170736525342)],
'name': '<NAME>'}}
]
EXP_TAB = {
'setosa&0&0': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&1': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&2': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&3': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&4': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&5': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&6': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&7': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&8': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&9': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&10': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&11': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&12': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&13': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&14': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&15': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&16': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&17': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&18': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&19': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&20': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&21': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&22': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&23': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&24': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&25': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&26': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&27': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&28': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&29': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&30': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&31': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&32': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&33': np.array([0.7936433456054741, 0.01258375207649658]),
'setosa&0&34': np.array([0.7974072911132786, 0.006894018772033576]),
'setosa&0&35': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&36': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&37': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&38': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&39': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&40': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&41': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&42': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&43': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&44': np.array([0.7936433456054741, 0.01258375207649658]),
'setosa&0&45': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&46': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&47': np.array([0.06151571389390039, 0.524561199322281]),
'setosa&0&48': np.array([0.4329463382004908, 0.057167210150691136]),
'setosa&0&49': np.array([0.4656481363306145, 0.007982539480288167]),
'setosa&0&50': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&51': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&52': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&53': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&54': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&55': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&56': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&57': np.array([0.06151571389390039, 0.524561199322281]),
'setosa&0&58': np.array([0.06151571389390039, 0.524561199322281]),
'setosa&0&59': np.array([0.4329463382004908, 0.057167210150691136]),
'setosa&0&60': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&61': np.array([0.00988785935411159, 0.9698143912008228]),
'setosa&0&62': np.array([0.009595083643662688, 0.5643652067423869]),
'setosa&0&63': np.array([0.13694026920485936, 0.36331091829858003]),
'setosa&0&64': np.array([0.3094460464703627, 0.11400643817329122]),
'setosa&0&65': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&66': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&67': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&68': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&69': np.array([0.00988785935411159, 0.9698143912008228]),
'setosa&0&70': np.array([0.00988785935411159, 0.9698143912008228]),
'setosa&0&71': np.array([0.00988785935411159, 0.9698143912008228]),
'setosa&0&72': np.array([0.009595083643662688, 0.5643652067423869]),
'setosa&0&73': np.array([0.009595083643662688, 0.5643652067423869]),
'setosa&0&74': np.array([0.13694026920485936, 0.36331091829858003]),
'setosa&0&75': np.array([0.0, 0.95124502153736]),
'setosa&0&76': np.array([0.0, 0.9708703761803881]),
'setosa&0&77': np.array([0.0, 0.5659706098422994]),
'setosa&0&78': np.array([0.0, 0.3962828716108186]),
'setosa&0&79': np.array([0.0, 0.2538069363248767]),
'setosa&0&80': np.array([0.0, 0.95124502153736]),
'setosa&0&81': np.array([0.0, 0.95124502153736]),
'setosa&0&82': np.array([0.0, 0.95124502153736]),
'setosa&0&83': np.array([0.0, 0.95124502153736]),
'setosa&0&84': np.array([0.0, 0.9708703761803881]),
'setosa&0&85': np.array([0.0, 0.9708703761803881]),
'setosa&0&86': np.array([0.0, 0.9708703761803881]),
'setosa&0&87': np.array([0.0, 0.5659706098422994]),
'setosa&0&88': np.array([0.0, 0.5659706098422994]),
'setosa&0&89': np.array([0.0, 0.3962828716108186]),
'setosa&0&90': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&91': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&92': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&93': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&94': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&95': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&96': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&97': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&98': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&99': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&100': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&101': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&102': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&103': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&104': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&105': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&106': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&107': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&108': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&109': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&110': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&111': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&112': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&113': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&114': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&115': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&116': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&117': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&118': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&119': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&120': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&121': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&122': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&123': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&124': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&125': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&126': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&127': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&128': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&129': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&130': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&131': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&132': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&133': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&134': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&135': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&136': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&137': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&138': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&139': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&140': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&141': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&142': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&143': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&144': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&145': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&146': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&147': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&148': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&149': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&150': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&151': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&152': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&153': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&154': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&155': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&156': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&157': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&158': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&159': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&160': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&161': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&162': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&163': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&164': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&165': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&166': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&167': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&168': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&169': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&170': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&171': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&172': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&173': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&174': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&175': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&176': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&177': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&178': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&179': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&180': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&181': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&182': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&183': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&184': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&185': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&186': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&187': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&188': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&189': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&190': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&191': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&192': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&193': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&194': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&195': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&196': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&197': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&198': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&199': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&200': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&201': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&202': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&203': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&204': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&205': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&206': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&207': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&208': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&209': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&210': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&211': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&212': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&213': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&214': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&215': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&216': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&217': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&218': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&219': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&220': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&221': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&222': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&223': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&224': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&225': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&226': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&227': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&228': np.array([0.7936433456054741, 0.01258375207649658]),
'setosa&0&229': np.array([0.7974072911132786, 0.006894018772033576]),
'setosa&0&230': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&231': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&232': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&233': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&234': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&235': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&236': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&237': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&238': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&239': np.array([0.7936433456054741, 0.01258375207649658]),
'setosa&0&240': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&241': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&242': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&243': np.array([0.7936433456054741, 0.01258375207649658]),
'setosa&0&244': np.array([0.7974072911132786, 0.006894018772033576]),
'setosa&0&245': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&246': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&247': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&248': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&249': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&250': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&251': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&252': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&253': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&254': np.array([0.7936433456054741, 0.01258375207649658]),
'setosa&0&255': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&256': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&257': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&258': np.array([0.7936433456054741, 0.01258375207649658]),
'setosa&0&259': np.array([0.7974072911132786, 0.006894018772033576]),
'setosa&0&260': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&261': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&262': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&263': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&264': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&265': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&266': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&267': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&268': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&269': np.array([0.7936433456054741, 0.01258375207649658]),
'setosa&0&270': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&271': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&272': np.array([0.06151571389390039, 0.524561199322281]),
'setosa&0&273': np.array([0.4329463382004908, 0.057167210150691136]),
'setosa&0&274': np.array([0.4656481363306145, 0.007982539480288167]),
'setosa&0&275': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&276': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&277': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&278': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&279': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&280': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&281': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&282': np.array([0.06151571389390039, 0.524561199322281]),
'setosa&0&283': np.array([0.06151571389390039, 0.524561199322281]),
'setosa&0&284': np.array([0.4329463382004908, 0.057167210150691136]),
'setosa&0&285': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&286': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&287': np.array([0.06151571389390039, 0.524561199322281]),
'setosa&0&288': np.array([0.4329463382004908, 0.057167210150691136]),
'setosa&0&289': np.array([0.4656481363306145, 0.007982539480288167]),
'setosa&0&290': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&291': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&292': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&293': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&294': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&295': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&296': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&297': np.array([0.06151571389390039, 0.524561199322281]),
'setosa&0&298': np.array([0.06151571389390039, 0.524561199322281]),
'setosa&0&299': np.array([0.4329463382004908, 0.057167210150691136]),
'setosa&0&300': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&301': np.array([0.00988785935411159, 0.9698143912008228]),
'setosa&0&302': np.array([0.009595083643662688, 0.5643652067423869]),
'setosa&0&303': np.array([0.13694026920485936, 0.36331091829858003]),
'setosa&0&304': np.array([0.3094460464703627, 0.11400643817329122]),
'setosa&0&305': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&306': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&307': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&308': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&309': np.array([0.00988785935411159, 0.9698143912008228]),
'setosa&0&310': np.array([0.00988785935411159, 0.9698143912008228]),
'setosa&0&311': np.array([0.00988785935411159, 0.9698143912008228]),
'setosa&0&312': np.array([0.009595083643662688, 0.5643652067423869]),
'setosa&0&313': np.array([0.009595083643662688, 0.5643652067423869]),
'setosa&0&314': np.array([0.13694026920485936, 0.36331091829858003]),
'setosa&1&0': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&1': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&2': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&3': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&4': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&5': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&6': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&7': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&8': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&9': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&10': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&11': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&12': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&13': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&14': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&15': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&16': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&17': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&18': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&19': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&20': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&21': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&22': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&23': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&24': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&25': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&26': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&27': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&28': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&29': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&30': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&31': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&32': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&33': np.array([-0.4446001433508151, 0.6107546840046902]),
'setosa&1&34': np.array([-0.26192650167775977, 0.33491141590339474]),
'setosa&1&35': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&36': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&37': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&38': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&39': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&40': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&41': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&42': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&43': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&44': np.array([-0.4446001433508151, 0.6107546840046902]),
'setosa&1&45': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&46': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&47': np.array([0.5826506963750848, -0.22335655671229107]),
'setosa&1&48': np.array([0.33108168891715983, 0.13647816746351163]),
'setosa&1&49': np.array([0.4079256832347186, 0.038455640985860955]),
'setosa&1&50': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&51': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&52': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&53': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&54': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&55': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&56': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&57': np.array([0.5826506963750848, -0.22335655671229107]),
'setosa&1&58': np.array([0.5826506963750848, -0.22335655671229107]),
'setosa&1&59': np.array([0.33108168891715983, 0.13647816746351163]),
'setosa&1&60': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&61': np.array([0.5041830043657418, -0.5392782673950876]),
'setosa&1&62': np.array([0.25657760110071476, 0.12592645350389123]),
'setosa&1&63': np.array([0.13717260713320106, 0.3627779907901665]),
'setosa&1&64': np.array([0.3093950298647913, 0.1140298206733954]),
'setosa&1&65': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&66': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&67': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&68': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&69': np.array([0.5041830043657418, -0.5392782673950876]),
'setosa&1&70': np.array([0.5041830043657418, -0.5392782673950876]),
'setosa&1&71': np.array([0.5041830043657418, -0.5392782673950876]),
'setosa&1&72': np.array([0.25657760110071476, 0.12592645350389123]),
'setosa&1&73': np.array([0.25657760110071476, 0.12592645350389123]),
'setosa&1&74': np.array([0.13717260713320106, 0.3627779907901665]),
'setosa&1&75': np.array([0.0, -0.4756207622944677]),
'setosa&1&76': np.array([0.0, -0.4854334805210761]),
'setosa&1&77': np.array([0.0, 0.16885577975809635]),
'setosa&1&78': np.array([0.0, 0.395805885538554]),
'setosa&1&79': np.array([0.0, 0.2538072707138344]),
'setosa&1&80': np.array([0.0, -0.4756207622944677]),
'setosa&1&81': np.array([0.0, -0.4756207622944677]),
'setosa&1&82': np.array([0.0, -0.4756207622944677]),
'setosa&1&83': np.array([0.0, -0.4756207622944677]),
'setosa&1&84': np.array([0.0, -0.4854334805210761]),
'setosa&1&85': np.array([0.0, -0.4854334805210761]),
'setosa&1&86': np.array([0.0, -0.4854334805210761]),
'setosa&1&87': np.array([0.0, 0.16885577975809635]),
'setosa&1&88': np.array([0.0, 0.16885577975809635]),
'setosa&1&89': np.array([0.0, 0.395805885538554]),
'setosa&1&90': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&91': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&92': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&93': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&94': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&95': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&96': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&97': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&98': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&99': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&100': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&101': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&102': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&103': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&104': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&105': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&106': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&107': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&108': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&109': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&110': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&111': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&112': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&113': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&114': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&115': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&116': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&117': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&118': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&119': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&120': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&121': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&122': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&123': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&124': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&125': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&126': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&127': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&128': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&129': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&130': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&131': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&132': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&133': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&134': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&135': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&136': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&137': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&138': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&139': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&140': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&141': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&142': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&143': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&144': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&145': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&146': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&147': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&148': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&149': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&150': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&151': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&152': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&153': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&154': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&155': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&156': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&157': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&158': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&159': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&160': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&161': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&162': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&163': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&164': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&165': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&166': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&167': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&168': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&169': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&170': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&171': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&172': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&173': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&174': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&175': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&176': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&177': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&178': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&179': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&180': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&181': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&182': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&183': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&184': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&185': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&186': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&187': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&188': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&189': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&190': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&191': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&192': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&193': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&194': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&195': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&196': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&197': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&198': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&199': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&200': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&201': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&202': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&203': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&204': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&205': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&206': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&207': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&208': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&209': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&210': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&211': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&212': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&213': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&214': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&215': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&216': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&217': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&218': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&219': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&220': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&221': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&222': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&223': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&224': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&225': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&226': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&227': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&228': np.array([-0.4446001433508151, 0.6107546840046902]),
'setosa&1&229': np.array([-0.26192650167775977, 0.33491141590339474]),
'setosa&1&230': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&231': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&232': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&233': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&234': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&235': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&236': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&237': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&238': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&239': np.array([-0.4446001433508151, 0.6107546840046902]),
'setosa&1&240': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&241': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&242': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&243': np.array([-0.4446001433508151, 0.6107546840046902]),
'setosa&1&244': np.array([-0.26192650167775977, 0.33491141590339474]),
'setosa&1&245': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&246': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&247': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&248': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&249': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&250': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&251': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&252': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&253': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&254': np.array([-0.4446001433508151, 0.6107546840046902]),
'setosa&1&255': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&256': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&257': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&258': np.array([-0.4446001433508151, 0.6107546840046902]),
'setosa&1&259': np.array([-0.26192650167775977, 0.33491141590339474]),
'setosa&1&260': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&261': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&262': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&263': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&264': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&265': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&266': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&267': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&268': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&269': np.array([-0.4446001433508151, 0.6107546840046902]),
'setosa&1&270': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&271': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&272': np.array([0.5826506963750848, -0.22335655671229107]),
'setosa&1&273': np.array([0.33108168891715983, 0.13647816746351163]),
'setosa&1&274': np.array([0.4079256832347186, 0.038455640985860955]),
'setosa&1&275': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&276': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&277': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&278': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&279': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&280': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&281': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&282': np.array([0.5826506963750848, -0.22335655671229107]),
'setosa&1&283': np.array([0.5826506963750848, -0.22335655671229107]),
'setosa&1&284': np.array([0.33108168891715983, 0.13647816746351163]),
'setosa&1&285': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&286': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&287': np.array([0.5826506963750848, -0.22335655671229107]),
'setosa&1&288': np.array([0.33108168891715983, 0.13647816746351163]),
'setosa&1&289': np.array([0.4079256832347186, 0.038455640985860955]),
'setosa&1&290': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&291': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&292': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&293': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&294': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&295': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&296': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&297': np.array([0.5826506963750848, -0.22335655671229107]),
'setosa&1&298': np.array([0.5826506963750848, -0.22335655671229107]),
'setosa&1&299': np.array([0.33108168891715983, 0.13647816746351163]),
'setosa&1&300': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&301': np.array([0.5041830043657418, -0.5392782673950876]),
'setosa&1&302': np.array([0.25657760110071476, 0.12592645350389123]),
'setosa&1&303': np.array([0.13717260713320106, 0.3627779907901665]),
'setosa&1&304': np.array([0.3093950298647913, 0.1140298206733954]),
'setosa&1&305': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&306': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&307': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&308': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&309': np.array([0.5041830043657418, -0.5392782673950876]),
'setosa&1&310': np.array([0.5041830043657418, -0.5392782673950876]),
'setosa&1&311': np.array([0.5041830043657418, -0.5392782673950876]),
'setosa&1&312': np.array([0.25657760110071476, 0.12592645350389123]),
'setosa&1&313': np.array([0.25657760110071476, 0.12592645350389123]),
'setosa&1&314': np.array([0.13717260713320106, 0.3627779907901665]),
'setosa&2&0': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&1': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&2': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&3': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&4': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&5': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&6': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&7': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&8': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&9': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&10': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&11': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&12': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&13': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&14': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&15': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&16': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&17': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&18': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&19': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&20': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&21': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&22': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&23': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&24': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&25': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&26': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&27': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&28': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&29': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&30': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&31': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&32': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&33': np.array([-0.34904320225465857, -0.6233384360811872]),
'setosa&2&34': np.array([-0.5354807894355184, -0.3418054346754283]),
'setosa&2&35': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&36': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&37': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&38': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&39': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&40': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&41': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&42': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&43': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&44': np.array([-0.34904320225465857, -0.6233384360811872]),
'setosa&2&45': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&46': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&47': np.array([-0.6441664102689847, -0.3012046426099901]),
'setosa&2&48': np.array([-0.7640280271176497, -0.19364537761420375]),
'setosa&2&49': np.array([-0.8735738195653328, -0.046438180466149094]),
'setosa&2&50': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&51': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&52': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&53': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&54': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&55': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&56': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&57': np.array([-0.6441664102689847, -0.3012046426099901]),
'setosa&2&58': np.array([-0.6441664102689847, -0.3012046426099901]),
'setosa&2&59': np.array([-0.7640280271176497, -0.19364537761420375]),
'setosa&2&60': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&61': np.array([-0.5140708637198534, -0.43053612380573514]),
'setosa&2&62': np.array([-0.2661726847443776, -0.6902916602462779]),
'setosa&2&63': np.array([-0.2741128763380603, -0.7260889090887469]),
'setosa&2&64': np.array([-0.6188410763351541, -0.22803625884668638]),
'setosa&2&65': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&66': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&67': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&68': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&69': np.array([-0.5140708637198534, -0.43053612380573514]),
'setosa&2&70': np.array([-0.5140708637198534, -0.43053612380573514]),
'setosa&2&71': np.array([-0.5140708637198534, -0.43053612380573514]),
'setosa&2&72': np.array([-0.2661726847443776, -0.6902916602462779]),
'setosa&2&73': np.array([-0.2661726847443776, -0.6902916602462779]),
'setosa&2&74': np.array([-0.2741128763380603, -0.7260889090887469]),
'setosa&2&75': np.array([0.0, -0.47562425924289314]),
'setosa&2&76': np.array([0.0, -0.48543689565931186]),
'setosa&2&77': np.array([0.0, -0.7348263896003956]),
'setosa&2&78': np.array([0.0, -0.7920887571493729]),
'setosa&2&79': np.array([0.0, -0.507614207038711]),
'setosa&2&80': np.array([0.0, -0.47562425924289314]),
'setosa&2&81': np.array([0.0, -0.47562425924289314]),
'setosa&2&82': np.array([0.0, -0.47562425924289314]),
'setosa&2&83': np.array([0.0, -0.47562425924289314]),
'setosa&2&84': np.array([0.0, -0.48543689565931186]),
'setosa&2&85': np.array([0.0, -0.48543689565931186]),
'setosa&2&86': np.array([0.0, -0.48543689565931186]),
'setosa&2&87': np.array([0.0, -0.7348263896003956]),
'setosa&2&88': np.array([0.0, -0.7348263896003956]),
'setosa&2&89': np.array([0.0, -0.7920887571493729]),
'setosa&2&90': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&91': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&92': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&93': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&94': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&95': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&96': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&97': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&98': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&99': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&100': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&101': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&102': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&103': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&104': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&105': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&106': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&107': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&108': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&109': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&110': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&111': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&112': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&113': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&114': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&115': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&116': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&117': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&118': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&119': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&120': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&121': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&122': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&123': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&124': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&125': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&126': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&127': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&128': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&129': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&130': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&131': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&132': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&133': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&134': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&135': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&136': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&137': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&138': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&139': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&140': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&141': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&142': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&143': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&144': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&145': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&146': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&147': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&148': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&149': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&150': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&151': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&152': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&153': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&154': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&155': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&156': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&157': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&158': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&159': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&160': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&161': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&162': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&163': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&164': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&165': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&166': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&167': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&168': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&169': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&170': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&171': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&172': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&173': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&174': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&175': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&176': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&177': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&178': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&179': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&180': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&181': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&182': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&183': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&184': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&185': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&186': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&187': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&188': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&189': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&190': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&191': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&192': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&193': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&194': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&195': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&196': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&197': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&198': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&199': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&200': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&201': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&202': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&203': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&204': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&205': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&206': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&207': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&208': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&209': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&210': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&211': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&212': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&213': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&214': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&215': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&216': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&217': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&218': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&219': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&220': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&221': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&222': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&223': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&224': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&225': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&226': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&227': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&228': np.array([-0.34904320225465857, -0.6233384360811872]),
'setosa&2&229': np.array([-0.5354807894355184, -0.3418054346754283]),
'setosa&2&230': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&231': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&232': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&233': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&234': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&235': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&236': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&237': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&238': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&239': np.array([-0.34904320225465857, -0.6233384360811872]),
'setosa&2&240': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&241': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&242': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&243': np.array([-0.34904320225465857, -0.6233384360811872]),
'setosa&2&244': np.array([-0.5354807894355184, -0.3418054346754283]),
'setosa&2&245': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&246': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&247': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&248': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&249': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&250': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&251': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&252': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&253': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&254': np.array([-0.34904320225465857, -0.6233384360811872]),
'setosa&2&255': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&256': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&257': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&258': np.array([-0.34904320225465857, -0.6233384360811872]),
'setosa&2&259': np.array([-0.5354807894355184, -0.3418054346754283]),
'setosa&2&260': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&261': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&262': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&263': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&264': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&265': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&266': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&267': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&268': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&269': np.array([-0.34904320225465857, -0.6233384360811872]),
'setosa&2&270': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&271': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&272': np.array([-0.6441664102689847, -0.3012046426099901]),
'setosa&2&273': np.array([-0.7640280271176497, -0.19364537761420375]),
'setosa&2&274': np.array([-0.8735738195653328, -0.046438180466149094]),
'setosa&2&275': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&276': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&277': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&278': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&279': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&280': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&281': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&282': np.array([-0.6441664102689847, -0.3012046426099901]),
'setosa&2&283': np.array([-0.6441664102689847, -0.3012046426099901]),
'setosa&2&284': np.array([-0.7640280271176497, -0.19364537761420375]),
'setosa&2&285': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&286': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&287': np.array([-0.6441664102689847, -0.3012046426099901]),
'setosa&2&288': np.array([-0.7640280271176497, -0.19364537761420375]),
'setosa&2&289': np.array([-0.8735738195653328, -0.046438180466149094]),
'setosa&2&290': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&291': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&292': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&293': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&294': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&295': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&296': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&297': np.array([-0.6441664102689847, -0.3012046426099901]),
'setosa&2&298': np.array([-0.6441664102689847, -0.3012046426099901]),
'setosa&2&299': np.array([-0.7640280271176497, -0.19364537761420375]),
'setosa&2&300': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&301': np.array([-0.5140708637198534, -0.43053612380573514]),
'setosa&2&302': np.array([-0.2661726847443776, -0.6902916602462779]),
'setosa&2&303': np.array([-0.2741128763380603, -0.7260889090887469]),
'setosa&2&304': np.array([-0.6188410763351541, -0.22803625884668638]),
'setosa&2&305': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&306': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&307': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&308': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&309': np.array([-0.5140708637198534, -0.43053612380573514]),
'setosa&2&310': np.array([-0.5140708637198534, -0.43053612380573514]),
'setosa&2&311': np.array([-0.5140708637198534, -0.43053612380573514]),
'setosa&2&312': np.array([-0.2661726847443776, -0.6902916602462779]),
'setosa&2&313': np.array([-0.2661726847443776, -0.6902916602462779]),
'setosa&2&314': np.array([-0.2741128763380603, -0.7260889090887469]),
'versicolor&0&0': np.array([-0.7431524521056113, -0.24432235603856345]),
'versicolor&0&1': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&2': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&3': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&4': np.array([-0.9706534384443797, 0.007448195602953232]),
'versicolor&0&5': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&6': np.array([-0.967167257194905, -0.011919414234523772]),
'versicolor&0&7': np.array([-0.953200964337313, -0.027163424176667752]),
'versicolor&0&8': np.array([-0.8486399726113752, -0.13537345771621853]),
'versicolor&0&9': np.array([-0.9658161779555727, -0.01446062269877741]),
'versicolor&0&10': np.array([-0.9493506964095418, -0.0312186903717912]),
'versicolor&0&11': np.array([-0.7870031444780577, -0.1952404625292782]),
'versicolor&0&12': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&13': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&14': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&15': np.array([-0.7431524521056113, -0.24432235603856345]),
'versicolor&0&16': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&17': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&18': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&19': np.array([-0.9706534384443797, 0.007448195602953232]),
'versicolor&0&20': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&21': np.array([-0.967167257194905, -0.011919414234523772]),
'versicolor&0&22': np.array([-0.953200964337313, -0.027163424176667752]),
'versicolor&0&23': np.array([-0.8486399726113752, -0.13537345771621853]),
'versicolor&0&24': np.array([-0.9658161779555727, -0.01446062269877741]),
'versicolor&0&25': np.array([-0.9493506964095418, -0.0312186903717912]),
'versicolor&0&26': np.array([-0.7870031444780577, -0.1952404625292782]),
'versicolor&0&27': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&28': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&29': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&30': np.array([-0.19685199412911655, -0.7845879230594393]),
'versicolor&0&31': np.array([-0.07476043598366228, -0.9062715528546994]),
'versicolor&0&32': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&33': np.array([-0.7936433456054744, 0.012583752076496493]),
'versicolor&0&34': np.array([-0.7974072911132788, 0.006894018772033604]),
'versicolor&0&35': np.array([-0.07476043598366228, -0.9062715528546994]),
'versicolor&0&36': np.array([-0.7779663027946229, -0.2981599980028888]),
'versicolor&0&37': np.array([-0.6669876551417979, -0.2911996622134135]),
'versicolor&0&38': np.array([-0.3355030348883163, -0.6305271339971502]),
'versicolor&0&39': np.array([-0.7658431164447598, -0.3248317507526541]),
'versicolor&0&40': np.array([-0.6459073168288453, -0.31573292128613833]),
'versicolor&0&41': np.array([-0.2519677855687844, -0.7134447168661863]),
'versicolor&0&42': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&43': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&44': np.array([-0.7936433456054744, 0.012583752076496493]),
'versicolor&0&45': np.array([0.05031696218434577, -0.929227611211748]),
'versicolor&0&46': np.array([0.017148644765919676, -0.9632117581295891]),
'versicolor&0&47': np.array([0.06151571389390039, 0.524561199322281]),
'versicolor&0&48': np.array([0.4329463382004908, 0.057167210150691136]),
'versicolor&0&49': np.array([0.4656481363306145, 0.007982539480288167]),
'versicolor&0&50': np.array([0.017148644765919676, -0.9632117581295891]),
'versicolor&0&51': np.array([0.6614632074748169, -0.6030419328583525]),
'versicolor&0&52': np.array([0.5519595359123358, -0.6434192906054143]),
'versicolor&0&53': np.array([0.14241819268815753, -0.8424615476000691]),
'versicolor&0&54': np.array([0.667423576348749, -0.6594086777766442]),
'versicolor&0&55': np.array([0.5429872243487625, -0.6697888833280774]),
'versicolor&0&56': np.array([0.1140907502997574, -0.8737800276630269]),
'versicolor&0&57': np.array([0.06151571389390039, 0.524561199322281]),
'versicolor&0&58': np.array([0.06151571389390039, 0.524561199322281]),
'versicolor&0&59': np.array([0.4329463382004908, 0.057167210150691136]),
'versicolor&0&60': np.array([0.029402442458921384, -0.9481684282717414]),
'versicolor&0&61': np.array([0.009887859354111524, -0.9698143912008228]),
'versicolor&0&62': np.array([0.009595083643662688, 0.5643652067423869]),
'versicolor&0&63': np.array([0.13694026920485936, 0.36331091829858003]),
'versicolor&0&64': np.array([0.3094460464703627, 0.11400643817329122]),
'versicolor&0&65': np.array([0.009887859354111524, -0.9698143912008228]),
'versicolor&0&66': np.array([0.42809266524335826, -0.40375108595117376]),
'versicolor&0&67': np.array([0.45547700380103057, -0.6083463409799501]),
'versicolor&0&68': np.array([0.19002455311770447, -0.8848597943731074]),
'versicolor&0&69': np.array([0.436966114193701, -0.4638042290788281]),
'versicolor&0&70': np.array([0.45424510803217066, -0.6425314361631614]),
'versicolor&0&71': np.array([0.1746467870122951, -0.9073062742839755]),
'versicolor&0&72': np.array([0.009595083643662688, 0.5643652067423869]),
'versicolor&0&73': np.array([0.009595083643662688, 0.5643652067423869]),
'versicolor&0&74': np.array([0.13694026920485936, 0.36331091829858003]),
'versicolor&0&75': np.array([0.0, -0.95124502153736]),
'versicolor&0&76': np.array([0.0, -0.9708703761803881]),
'versicolor&0&77': np.array([0.0, 0.5659706098422994]),
'versicolor&0&78': np.array([0.0, 0.3962828716108186]),
'versicolor&0&79': np.array([0.0, 0.2538069363248767]),
'versicolor&0&80': np.array([0.0, -0.9708703761803881]),
'versicolor&0&81': np.array([0.0, -0.3631376646911367]),
'versicolor&0&82': np.array([0.0, -0.5804857652839247]),
'versicolor&0&83': np.array([0.0, -0.8943993997517804]),
'versicolor&0&84': np.array([0.0, -0.4231275527222919]),
'versicolor&0&85': np.array([0.0, -0.6164235822373675]),
'versicolor&0&86': np.array([0.0, -0.9166476163222441]),
'versicolor&0&87': np.array([0.0, 0.5659706098422994]),
'versicolor&0&88': np.array([0.0, 0.5659706098422994]),
'versicolor&0&89': np.array([0.0, 0.3962828716108186]),
'versicolor&0&90': np.array([-0.7431524521056113, -0.24432235603856345]),
'versicolor&0&91': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&92': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&93': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&94': np.array([-0.9706534384443797, 0.007448195602953232]),
'versicolor&0&95': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&96': np.array([-0.967167257194905, -0.011919414234523772]),
'versicolor&0&97': np.array([-0.953200964337313, -0.027163424176667752]),
'versicolor&0&98': np.array([-0.8486399726113752, -0.13537345771621853]),
'versicolor&0&99': np.array([-0.9658161779555727, -0.01446062269877741]),
'versicolor&0&100': np.array([-0.9493506964095418, -0.0312186903717912]),
'versicolor&0&101': np.array([-0.7870031444780577, -0.1952404625292782]),
'versicolor&0&102': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&103': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&104': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&105': np.array([-0.19685199412911655, -0.7845879230594393]),
'versicolor&0&106': np.array([-0.07476043598366228, -0.9062715528546994]),
'versicolor&0&107': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&108': np.array([-0.7936433456054744, 0.012583752076496493]),
'versicolor&0&109': np.array([-0.7974072911132788, 0.006894018772033604]),
'versicolor&0&110': np.array([-0.07476043598366228, -0.9062715528546994]),
'versicolor&0&111': np.array([-0.7779663027946229, -0.2981599980028888]),
'versicolor&0&112': np.array([-0.6669876551417979, -0.2911996622134135]),
'versicolor&0&113': np.array([-0.3355030348883163, -0.6305271339971502]),
'versicolor&0&114': np.array([-0.7658431164447598, -0.3248317507526541]),
'versicolor&0&115': np.array([-0.6459073168288453, -0.31573292128613833]),
'versicolor&0&116': np.array([-0.2519677855687844, -0.7134447168661863]),
'versicolor&0&117': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&118': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&119': np.array([-0.7936433456054744, 0.012583752076496493]),
'versicolor&0&120': np.array([-0.05855179950109871, -0.9211684729232403]),
'versicolor&0&121': np.array([-0.020067537725011863, -0.960349531159508]),
'versicolor&0&122': np.array([-0.5775164514598086, 0.6278692602817483]),
'versicolor&0&123': np.array([-0.6813845327458135, 0.6599725404733693]),
'versicolor&0&124': np.array([-0.5182062652425321, 0.3958533237517639]),
'versicolor&0&125': np.array([-0.020067537725011863, -0.960349531159508]),
'versicolor&0&126': np.array([-0.5107107533700952, 0.0075507123577884866]),
'versicolor&0&127': np.array([-0.1464063320531759, -0.4788055402156298]),
'versicolor&0&128': np.array([-0.061109248092233844, -0.8620287767000373]),
'versicolor&0&129': np.array([-0.4706137753079746, -0.057389625790424635]),
'versicolor&0&130': np.array([-0.06804620923037683, -0.5677904519730453]),
'versicolor&0&131': np.array([-0.020216773196675246, -0.9057119888626176]),
'versicolor&0&132': np.array([-0.5775164514598086, 0.6278692602817483]),
'versicolor&0&133': np.array([-0.5775164514598086, 0.6278692602817483]),
'versicolor&0&134': np.array([-0.6813845327458135, 0.6599725404733693]),
'versicolor&0&135': np.array([-0.19684482070614498, -0.7845939961595055]),
'versicolor&0&136': np.array([-0.07475231751447156, -0.9062785678426409]),
'versicolor&0&137': np.array([-0.6782037543706109, 0.2956007367698983]),
'versicolor&0&138': np.array([-0.7694171988675237, 0.276633135028249]),
'versicolor&0&139': np.array([-0.8063011502229427, 0.4134300066735808]),
'versicolor&0&140': np.array([-0.07475231751447156, -0.9062785678426409]),
'versicolor&0&141': np.array([-0.7985789197998611, 0.0026209054759345337]),
'versicolor&0&142': np.array([-0.7182275903095532, -0.11963032135457498]),
'versicolor&0&143': np.array([-0.2798927835773098, -0.6581136857450849]),
'versicolor&0&144': np.array([-0.7920119433269182, -0.0142751249964083]),
'versicolor&0&145': np.array([-0.6943081428778407, -0.14852813120265815]),
'versicolor&0&146': np.array([-0.16106555563262584, -0.777621649099753]),
'versicolor&0&147': np.array([-0.6782037543706109, 0.2956007367698983]),
'versicolor&0&148': np.array([-0.6782037543706109, 0.2956007367698983]),
'versicolor&0&149': np.array([-0.7694171988675237, 0.276633135028249]),
'versicolor&0&150': np.array([-0.7431524521056113, -0.24432235603856345]),
'versicolor&0&151': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&152': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&153': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&154': np.array([-0.9706534384443797, 0.007448195602953232]),
'versicolor&0&155': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&156': np.array([-0.967167257194905, -0.011919414234523772]),
'versicolor&0&157': np.array([-0.953200964337313, -0.027163424176667752]),
'versicolor&0&158': np.array([-0.8486399726113752, -0.13537345771621853]),
'versicolor&0&159': np.array([-0.9658161779555727, -0.01446062269877741]),
'versicolor&0&160': np.array([-0.9493506964095418, -0.0312186903717912]),
'versicolor&0&161': np.array([-0.7870031444780577, -0.1952404625292782]),
'versicolor&0&162': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&163': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&164': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&165': np.array([-0.19685199412911655, -0.7845879230594393]),
'versicolor&0&166': np.array([-0.07476043598366228, -0.9062715528546994]),
'versicolor&0&167': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&168': np.array([-0.7936433456054744, 0.012583752076496493]),
'versicolor&0&169': np.array([-0.7974072911132788, 0.006894018772033604]),
'versicolor&0&170': np.array([-0.07476043598366228, -0.9062715528546994]),
'versicolor&0&171': np.array([-0.7779663027946229, -0.2981599980028888]),
'versicolor&0&172': np.array([-0.6669876551417979, -0.2911996622134135]),
'versicolor&0&173': np.array([-0.3355030348883163, -0.6305271339971502]),
'versicolor&0&174': np.array([-0.7658431164447598, -0.3248317507526541]),
'versicolor&0&175': np.array([-0.6459073168288453, -0.31573292128613833]),
'versicolor&0&176': np.array([-0.2519677855687844, -0.7134447168661863]),
'versicolor&0&177': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&178': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&179': np.array([-0.7936433456054744, 0.012583752076496493]),
'versicolor&0&180': np.array([-0.05855179950109871, -0.9211684729232403]),
'versicolor&0&181': np.array([-0.020067537725011863, -0.960349531159508]),
'versicolor&0&182': np.array([-0.5775164514598086, 0.6278692602817483]),
'versicolor&0&183': np.array([-0.6813845327458135, 0.6599725404733693]),
'versicolor&0&184': np.array([-0.5182062652425321, 0.3958533237517639]),
'versicolor&0&185': np.array([-0.020067537725011863, -0.960349531159508]),
'versicolor&0&186': np.array([-0.5107107533700952, 0.0075507123577884866]),
'versicolor&0&187': np.array([-0.1464063320531759, -0.4788055402156298]),
'versicolor&0&188': np.array([-0.061109248092233844, -0.8620287767000373]),
'versicolor&0&189': np.array([-0.4706137753079746, -0.057389625790424635]),
'versicolor&0&190': np.array([-0.06804620923037683, -0.5677904519730453]),
'versicolor&0&191': np.array([-0.020216773196675246, -0.9057119888626176]),
'versicolor&0&192': np.array([-0.5775164514598086, 0.6278692602817483]),
'versicolor&0&193': np.array([-0.5775164514598086, 0.6278692602817483]),
'versicolor&0&194': np.array([-0.6813845327458135, 0.6599725404733693]),
'versicolor&0&195': np.array([-0.19684482070614498, -0.7845939961595055]),
'versicolor&0&196': np.array([-0.07475231751447156, -0.9062785678426409]),
'versicolor&0&197': np.array([-0.6782037543706109, 0.2956007367698983]),
'versicolor&0&198': np.array([-0.7694171988675237, 0.276633135028249]),
'versicolor&0&199': np.array([-0.8063011502229427, 0.4134300066735808]),
'versicolor&0&200': np.array([-0.07475231751447156, -0.9062785678426409]),
'versicolor&0&201': np.array([-0.7985789197998611, 0.0026209054759345337]),
'versicolor&0&202': np.array([-0.7182275903095532, -0.11963032135457498]),
'versicolor&0&203': np.array([-0.2798927835773098, -0.6581136857450849]),
'versicolor&0&204': np.array([-0.7920119433269182, -0.0142751249964083]),
'versicolor&0&205': np.array([-0.6943081428778407, -0.14852813120265815]),
'versicolor&0&206': np.array([-0.16106555563262584, -0.777621649099753]),
'versicolor&0&207': np.array([-0.6782037543706109, 0.2956007367698983]),
'versicolor&0&208': np.array([-0.6782037543706109, 0.2956007367698983]),
'versicolor&0&209': np.array([-0.7694171988675237, 0.276633135028249]),
'versicolor&0&210': np.array([-0.7431524521056113, -0.24432235603856345]),
'versicolor&0&211': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&212': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&213': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&214': np.array([-0.9706534384443797, 0.007448195602953232]),
'versicolor&0&215': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&216': np.array([-0.967167257194905, -0.011919414234523772]),
'versicolor&0&217': np.array([-0.953200964337313, -0.027163424176667752]),
'versicolor&0&218': np.array([-0.8486399726113752, -0.13537345771621853]),
'versicolor&0&219': np.array([-0.9658161779555727, -0.01446062269877741]),
'versicolor&0&220': np.array([-0.9493506964095418, -0.0312186903717912]),
'versicolor&0&221': np.array([-0.7870031444780577, -0.1952404625292782]),
'versicolor&0&222': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&223': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&224': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&225': np.array([-0.04777085826693217, -0.931704979630315]),
'versicolor&0&226': np.array([-0.016252316132452975, -0.9640854286687816]),
'versicolor&0&227': np.array([-0.44101924439572626, 0.5583264842761904]),
'versicolor&0&228': np.array([-0.5844994389588399, 0.5715208832363579]),
'versicolor&0&229': np.array([-0.46216647196120714, 0.35468591243823655]),
'versicolor&0&230': np.array([-0.016252316132452975, -0.9640854286687816]),
'versicolor&0&231': np.array([-0.3707180757031537, -0.1977196581472426]),
'versicolor&0&232': np.array([-0.1043459833293615, -0.5233314327065356]),
'versicolor&0&233': np.array([-0.049289647556763364, -0.8736084405111605]),
'versicolor&0&234': np.array([-0.34078174031874375, -0.25874482325965437]),
'versicolor&0&235': np.array([-0.050841051273783675, -0.5877587283589205]),
'versicolor&0&236': np.array([-0.0161720977425142, -0.9096817855236822]),
'versicolor&0&237': np.array([-0.44101924439572626, 0.5583264842761904]),
'versicolor&0&238': np.array([-0.44101924439572626, 0.5583264842761904]),
'versicolor&0&239': np.array([-0.5844994389588399, 0.5715208832363579]),
'versicolor&0&240': np.array([-0.11329659732608087, -0.8671819100849522]),
'versicolor&0&241': np.array([-0.040390637135858574, -0.9402832917474078]),
'versicolor&0&242': np.array([-0.5276460255602035, 0.28992233541586077]),
'versicolor&0&243': np.array([-0.6392402874163683, 0.24114611970435948]),
'versicolor&0&244': np.array([-0.6814868825686854, 0.35066801608083215]),
'versicolor&0&245': np.array([-0.040390637135858574, -0.9402832917474078]),
'versicolor&0&246': np.array([-0.6425009695928476, -0.24851992476830956]),
'versicolor&0&247': np.array([-0.5151243662384031, -0.3255567772442641]),
'versicolor&0&248': np.array([-0.16157511199607094, -0.7754323813403634]),
'versicolor&0&249': np.array([-0.6300442788906601, -0.28361140069713875]),
'versicolor&0&250': np.array([-0.4875864856121089, -0.3614122096616301]),
'versicolor&0&251': np.array([-0.08968204532514226, -0.8491191210330045]),
'versicolor&0&252': np.array([-0.5276460255602035, 0.28992233541586077]),
'versicolor&0&253': np.array([-0.5276460255602035, 0.28992233541586077]),
'versicolor&0&254': np.array([-0.6392402874163683, 0.24114611970435948]),
'versicolor&0&255': np.array([-0.19685199412911655, -0.7845879230594393]),
'versicolor&0&256': np.array([-0.07476043598366228, -0.9062715528546994]),
'versicolor&0&257': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&258': np.array([-0.7936433456054744, 0.012583752076496493]),
'versicolor&0&259': np.array([-0.7974072911132788, 0.006894018772033604]),
'versicolor&0&260': np.array([-0.07476043598366228, -0.9062715528546994]),
'versicolor&0&261': np.array([-0.7779663027946229, -0.2981599980028888]),
'versicolor&0&262': np.array([-0.6669876551417979, -0.2911996622134135]),
'versicolor&0&263': np.array([-0.3355030348883163, -0.6305271339971502]),
'versicolor&0&264': np.array([-0.7658431164447598, -0.3248317507526541]),
'versicolor&0&265': np.array([-0.6459073168288453, -0.31573292128613833]),
'versicolor&0&266': np.array([-0.2519677855687844, -0.7134447168661863]),
'versicolor&0&267': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&268': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&269': np.array([-0.7936433456054744, 0.012583752076496493]),
'versicolor&0&270': np.array([0.05031696218434577, -0.929227611211748]),
'versicolor&0&271': np.array([0.017148644765919676, -0.9632117581295891]),
'versicolor&0&272': np.array([0.06151571389390039, 0.524561199322281]),
'versicolor&0&273': np.array([0.4329463382004908, 0.057167210150691136]),
'versicolor&0&274': np.array([0.4656481363306145, 0.007982539480288167]),
'versicolor&0&275': np.array([0.017148644765919676, -0.9632117581295891]),
'versicolor&0&276': np.array([0.6614632074748169, -0.6030419328583525]),
'versicolor&0&277': np.array([0.5519595359123358, -0.6434192906054143]),
'versicolor&0&278': np.array([0.14241819268815753, -0.8424615476000691]),
'versicolor&0&279': np.array([0.667423576348749, -0.6594086777766442]),
'versicolor&0&280': np.array([0.5429872243487625, -0.6697888833280774]),
'versicolor&0&281': np.array([0.1140907502997574, -0.8737800276630269]),
'versicolor&0&282': np.array([0.06151571389390039, 0.524561199322281]),
'versicolor&0&283': np.array([0.06151571389390039, 0.524561199322281]),
'versicolor&0&284': np.array([0.4329463382004908, 0.057167210150691136]),
'versicolor&0&285': np.array([0.05031696218434577, -0.929227611211748]),
'versicolor&0&286': np.array([0.017148644765919676, -0.9632117581295891]),
'versicolor&0&287': np.array([0.06151571389390039, 0.524561199322281]),
'versicolor&0&288': np.array([0.4329463382004908, 0.057167210150691136]),
'versicolor&0&289': np.array([0.4656481363306145, 0.007982539480288167]),
'versicolor&0&290': np.array([0.017148644765919676, -0.9632117581295891]),
'versicolor&0&291': np.array([0.6614632074748169, -0.6030419328583525]),
'versicolor&0&292': np.array([0.5519595359123358, -0.6434192906054143]),
'versicolor&0&293': np.array([0.14241819268815753, -0.8424615476000691]),
'versicolor&0&294': np.array([0.667423576348749, -0.6594086777766442]),
'versicolor&0&295': np.array([0.5429872243487625, -0.6697888833280774]),
'versicolor&0&296': np.array([0.1140907502997574, -0.8737800276630269]),
'versicolor&0&297': np.array([0.06151571389390039, 0.524561199322281]),
'versicolor&0&298': np.array([0.06151571389390039, 0.524561199322281]),
'versicolor&0&299': np.array([0.4329463382004908, 0.057167210150691136]),
'versicolor&0&300': np.array([0.029402442458921384, -0.9481684282717414]),
'versicolor&0&301': np.array([0.009887859354111524, -0.9698143912008228]),
'versicolor&0&302': np.array([0.009595083643662688, 0.5643652067423869]),
'versicolor&0&303': np.array([0.13694026920485936, 0.36331091829858003]),
'versicolor&0&304': np.array([0.3094460464703627, 0.11400643817329122]),
'versicolor&0&305': np.array([0.009887859354111524, -0.9698143912008228]),
'versicolor&0&306': np.array([0.42809266524335826, -0.40375108595117376]),
'versicolor&0&307': np.array([0.45547700380103057, -0.6083463409799501]),
'versicolor&0&308': np.array([0.19002455311770447, -0.8848597943731074]),
'versicolor&0&309': np.array([0.436966114193701, -0.4638042290788281]),
'versicolor&0&310': np.array([0.45424510803217066, -0.6425314361631614]),
'versicolor&0&311': np.array([0.1746467870122951, -0.9073062742839755]),
'versicolor&0&312': np.array([0.009595083643662688, 0.5643652067423869]),
'versicolor&0&313': np.array([0.009595083643662688, 0.5643652067423869]),
'versicolor&0&314': np.array([0.13694026920485936, 0.36331091829858003]),
'versicolor&1&0': np.array([0.37157553889555184, 0.1221600832023858]),
'versicolor&1&1': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&2': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&3': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&4': np.array([0.4964962439921071, 0.3798215458387346]),
'versicolor&1&5': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&6': np.array([0.2805345936193346, 0.6595182922149835]),
'versicolor&1&7': np.array([0.08302493125394889, 0.6186280682763334]),
'versicolor&1&8': np.array([0.22125635302655813, 0.2925832702358638]),
'versicolor&1&9': np.array([0.2365788606456636, 0.7120007179768731]),
'versicolor&1&10': np.array([0.022347126801293967, 0.6718013300441928]),
'versicolor&1&11': np.array([0.10063786451829529, 0.4085974066833644]),
'versicolor&1&12': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&13': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&14': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&15': np.array([0.37157553889555184, 0.1221600832023858]),
'versicolor&1&16': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&17': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&18': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&19': np.array([0.4964962439921071, 0.3798215458387346]),
'versicolor&1&20': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&21': np.array([0.2805345936193346, 0.6595182922149835]),
'versicolor&1&22': np.array([0.08302493125394889, 0.6186280682763334]),
'versicolor&1&23': np.array([0.22125635302655813, 0.2925832702358638]),
'versicolor&1&24': np.array([0.2365788606456636, 0.7120007179768731]),
'versicolor&1&25': np.array([0.022347126801293967, 0.6718013300441928]),
'versicolor&1&26': np.array([0.10063786451829529, 0.4085974066833644]),
'versicolor&1&27': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&28': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&29': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&30': np.array([-0.32199975656257646, 0.7482293552463756]),
'versicolor&1&31': np.array([-0.43843349141088417, 0.8642740701867917]),
'versicolor&1&32': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&33': np.array([0.44460014335081516, 0.6107546840046902]),
'versicolor&1&34': np.array([0.2619265016777598, 0.33491141590339474]),
'versicolor&1&35': np.array([-0.43843349141088417, 0.8642740701867917]),
'versicolor&1&36': np.array([0.20183015430619713, 0.7445346002055082]),
'versicolor&1&37': np.array([-0.05987874887638573, 0.6927937290176818]),
'versicolor&1&38': np.array([-0.2562642052727569, 0.6920266972283227]),
'versicolor&1&39': np.array([0.1736438124560164, 0.7898174616442941]),
'versicolor&1&40': np.array([-0.10114089899940126, 0.7326610366533243]),
'versicolor&1&41': np.array([-0.34479806250338163, 0.7789143553916729]),
'versicolor&1&42': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&43': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&44': np.array([0.44460014335081516, 0.6107546840046902]),
'versicolor&1&45': np.array([0.7749499208750119, 0.8147189440804429]),
'versicolor&1&46': np.array([0.8040309195416899, 0.8445152504134819]),
'versicolor&1&47': np.array([0.5826506963750848, -0.22335655671229107]),
'versicolor&1&48': np.array([0.33108168891715983, 0.13647816746351163]),
'versicolor&1&49': np.array([0.4079256832347186, 0.038455640985860955]),
'versicolor&1&50': np.array([0.8040309195416899, 0.8445152504134819]),
'versicolor&1&51': np.array([0.18555813792691386, 0.6940923833143309]),
'versicolor&1&52': np.array([0.32639262064172164, 0.6296083447134281]),
'versicolor&1&53': np.array([0.6964303997553315, 0.7444536452136676]),
'versicolor&1&54': np.array([0.18216358701833335, 0.747615101407194]),
'versicolor&1&55': np.array([0.33549445287370383, 0.6526039763053625]),
'versicolor&1&56': np.array([0.7213651642695392, 0.7718874443854203]),
'versicolor&1&57': np.array([0.5826506963750848, -0.22335655671229107]),
'versicolor&1&58': np.array([0.5826506963750848, -0.22335655671229107]),
'versicolor&1&59': np.array([0.33108168891715983, 0.13647816746351163]),
'versicolor&1&60': np.array([0.4933316375690332, 0.5272416708629276]),
'versicolor&1&61': np.array([0.5041830043657418, 0.5392782673950876]),
'versicolor&1&62': np.array([0.25657760110071476, 0.12592645350389123]),
'versicolor&1&63': np.array([0.13717260713320106, 0.3627779907901665]),
'versicolor&1&64': np.array([0.3093950298647913, 0.1140298206733954]),
'versicolor&1&65': np.array([0.5041830043657418, 0.5392782673950876]),
'versicolor&1&66': np.array([0.1413116283690917, 0.7479856297394165]),
'versicolor&1&67': np.array([0.189773257421942, 0.6552150653012478]),
'versicolor&1&68': np.array([0.40694846236352233, 0.5109051764198169]),
'versicolor&1&69': np.array([0.1390424906594644, 0.7991613016301518]),
'versicolor&1&70': np.array([0.1945777487290197, 0.6743932844312892]),
'versicolor&1&71': np.array([0.415695226122737, 0.5230815102377903]),
'versicolor&1&72': np.array([0.25657760110071476, 0.12592645350389123]),
'versicolor&1&73': np.array([0.25657760110071476, 0.12592645350389123]),
'versicolor&1&74': np.array([0.13717260713320106, 0.3627779907901665]),
'versicolor&1&75': np.array([0.0, 0.4756207622944677]),
'versicolor&1&76': np.array([0.0, 0.4854334805210761]),
'versicolor&1&77': np.array([0.0, 0.16885577975809635]),
'versicolor&1&78': np.array([0.0, 0.395805885538554]),
'versicolor&1&79': np.array([0.0, 0.2538072707138344]),
'versicolor&1&80': np.array([0.0, 0.4854334805210761]),
'versicolor&1&81': np.array([0.0, 0.7613919530844643]),
'versicolor&1&82': np.array([0.0, 0.6668230985485095]),
'versicolor&1&83': np.array([0.0, 0.4904755652105692]),
'versicolor&1&84': np.array([0.0, 0.8121046082359693]),
'versicolor&1&85': np.array([0.0, 0.6855766903749089]),
'versicolor&1&86': np.array([0.0, 0.5008471974438506]),
'versicolor&1&87': np.array([0.0, 0.16885577975809635]),
'versicolor&1&88': np.array([0.0, 0.16885577975809635]),
'versicolor&1&89': np.array([0.0, 0.395805885538554]),
'versicolor&1&90': np.array([0.37157553889555184, 0.1221600832023858]),
'versicolor&1&91': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&92': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&93': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&94': np.array([0.4964962439921071, 0.3798215458387346]),
'versicolor&1&95': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&96': np.array([0.2805345936193346, 0.6595182922149835]),
'versicolor&1&97': np.array([0.08302493125394889, 0.6186280682763334]),
'versicolor&1&98': np.array([0.22125635302655813, 0.2925832702358638]),
'versicolor&1&99': np.array([0.2365788606456636, 0.7120007179768731]),
'versicolor&1&100': np.array([0.022347126801293967, 0.6718013300441928]),
'versicolor&1&101': np.array([0.10063786451829529, 0.4085974066833644]),
'versicolor&1&102': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&103': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&104': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&105': np.array([-0.32199975656257646, 0.7482293552463756]),
'versicolor&1&106': np.array([-0.43843349141088417, 0.8642740701867917]),
'versicolor&1&107': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&108': np.array([0.44460014335081516, 0.6107546840046902]),
'versicolor&1&109': np.array([0.2619265016777598, 0.33491141590339474]),
'versicolor&1&110': np.array([-0.43843349141088417, 0.8642740701867917]),
'versicolor&1&111': np.array([0.20183015430619713, 0.7445346002055082]),
'versicolor&1&112': np.array([-0.05987874887638573, 0.6927937290176818]),
'versicolor&1&113': np.array([-0.2562642052727569, 0.6920266972283227]),
'versicolor&1&114': np.array([0.1736438124560164, 0.7898174616442941]),
'versicolor&1&115': np.array([-0.10114089899940126, 0.7326610366533243]),
'versicolor&1&116': np.array([-0.34479806250338163, 0.7789143553916729]),
'versicolor&1&117': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&118': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&119': np.array([0.44460014335081516, 0.6107546840046902]),
'versicolor&1&120': np.array([0.8224435822504677, 0.05315271528828394]),
'versicolor&1&121': np.array([0.820222886307464, 0.055413714884152906]),
'versicolor&1&122': np.array([0.8393089066702096, 0.0788980157959197]),
'versicolor&1&123': np.array([0.8282924295054531, 0.0752641855714259]),
'versicolor&1&124': np.array([0.8476206690613984, 0.02146454924522743]),
'versicolor&1&125': np.array([0.820222886307464, 0.055413714884152906]),
'versicolor&1&126': np.array([0.69362517791403, 0.2579390890424607]),
'versicolor&1&127': np.array([0.7261791877801502, 0.16248655642013624]),
'versicolor&1&128': np.array([0.8190416077589757, 0.05661509439536992]),
'versicolor&1&129': np.array([0.6654762076749751, 0.2949291633432878]),
'versicolor&1&130': np.array([0.7118161070185614, 0.17683644094125878]),
'versicolor&1&131': np.array([0.8165214253946836, 0.059175619390630096]),
'versicolor&1&132': np.array([0.8393089066702096, 0.0788980157959197]),
'versicolor&1&133': np.array([0.8393089066702096, 0.0788980157959197]),
'versicolor&1&134': np.array([0.8282924295054531, 0.0752641855714259]),
'versicolor&1&135': np.array([0.5188109114552927, 0.03638964581864269]),
'versicolor&1&136': np.array([0.5131478569192371, 0.04203387599862816]),
'versicolor&1&137': np.array([0.73294627367007, 0.4610490766898855]),
'versicolor&1&138': np.array([0.5965042032375719, 0.48856644624972617]),
'versicolor&1&139': np.array([0.5436097000280874, 0.1461891067488832]),
'versicolor&1&140': np.array([0.5131478569192371, 0.04203387599862816]),
'versicolor&1&141': np.array([0.32513442685780247, 0.6124765483184536]),
'versicolor&1&142': np.array([0.1812883360919208, 0.5504982486874137]),
'versicolor&1&143': np.array([0.4788153032824012, 0.08625929936974323]),
'versicolor&1&144': np.array([0.28490718210609345, 0.6650298146522879]),
'versicolor&1&145': np.array([0.1313204067730033, 0.597079642504441]),
'versicolor&1&146': np.array([0.46583127837967303, 0.09875847161509169]),
'versicolor&1&147': np.array([0.73294627367007, 0.4610490766898855]),
'versicolor&1&148': np.array([0.73294627367007, 0.4610490766898855]),
'versicolor&1&149': np.array([0.5965042032375719, 0.48856644624972617]),
'versicolor&1&150': np.array([0.37157553889555184, 0.1221600832023858]),
'versicolor&1&151': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&152': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&153': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&154': np.array([0.4964962439921071, 0.3798215458387346]),
'versicolor&1&155': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&156': np.array([0.2805345936193346, 0.6595182922149835]),
'versicolor&1&157': np.array([0.08302493125394889, 0.6186280682763334]),
'versicolor&1&158': np.array([0.22125635302655813, 0.2925832702358638]),
'versicolor&1&159': np.array([0.2365788606456636, 0.7120007179768731]),
'versicolor&1&160': np.array([0.022347126801293967, 0.6718013300441928]),
'versicolor&1&161': np.array([0.10063786451829529, 0.4085974066833644]),
'versicolor&1&162': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&163': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&164': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&165': np.array([-0.32199975656257646, 0.7482293552463756]),
'versicolor&1&166': np.array([-0.43843349141088417, 0.8642740701867917]),
'versicolor&1&167': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&168': np.array([0.44460014335081516, 0.6107546840046902]),
'versicolor&1&169': np.array([0.2619265016777598, 0.33491141590339474]),
'versicolor&1&170': np.array([-0.43843349141088417, 0.8642740701867917]),
'versicolor&1&171': np.array([0.20183015430619713, 0.7445346002055082]),
'versicolor&1&172': np.array([-0.05987874887638573, 0.6927937290176818]),
'versicolor&1&173': np.array([-0.2562642052727569, 0.6920266972283227]),
'versicolor&1&174': np.array([0.1736438124560164, 0.7898174616442941]),
'versicolor&1&175': np.array([-0.10114089899940126, 0.7326610366533243]),
'versicolor&1&176': np.array([-0.34479806250338163, 0.7789143553916729]),
'versicolor&1&177': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&178': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&179': np.array([0.44460014335081516, 0.6107546840046902]),
'versicolor&1&180': np.array([0.8224435822504677, 0.05315271528828394]),
'versicolor&1&181': np.array([0.820222886307464, 0.055413714884152906]),
'versicolor&1&182': np.array([0.8393089066702096, 0.0788980157959197]),
'versicolor&1&183': np.array([0.8282924295054531, 0.0752641855714259]),
'versicolor&1&184': np.array([0.8476206690613984, 0.02146454924522743]),
'versicolor&1&185': np.array([0.820222886307464, 0.055413714884152906]),
'versicolor&1&186': np.array([0.69362517791403, 0.2579390890424607]),
'versicolor&1&187': np.array([0.7261791877801502, 0.16248655642013624]),
'versicolor&1&188': np.array([0.8190416077589757, 0.05661509439536992]),
'versicolor&1&189': np.array([0.6654762076749751, 0.2949291633432878]),
'versicolor&1&190': np.array([0.7118161070185614, 0.17683644094125878]),
'versicolor&1&191': np.array([0.8165214253946836, 0.059175619390630096]),
'versicolor&1&192': np.array([0.8393089066702096, 0.0788980157959197]),
'versicolor&1&193': np.array([0.8393089066702096, 0.0788980157959197]),
'versicolor&1&194': np.array([0.8282924295054531, 0.0752641855714259]),
'versicolor&1&195': np.array([0.5188109114552927, 0.03638964581864269]),
'versicolor&1&196': np.array([0.5131478569192371, 0.04203387599862816]),
'versicolor&1&197': np.array([0.73294627367007, 0.4610490766898855]),
'versicolor&1&198': np.array([0.5965042032375719, 0.48856644624972617]),
'versicolor&1&199': np.array([0.5436097000280874, 0.1461891067488832]),
'versicolor&1&200': np.array([0.5131478569192371, 0.04203387599862816]),
'versicolor&1&201': np.array([0.32513442685780247, 0.6124765483184536]),
'versicolor&1&202': np.array([0.1812883360919208, 0.5504982486874137]),
'versicolor&1&203': np.array([0.4788153032824012, 0.08625929936974323]),
'versicolor&1&204': np.array([0.28490718210609345, 0.6650298146522879]),
'versicolor&1&205': np.array([0.1313204067730033, 0.597079642504441]),
'versicolor&1&206': np.array([0.46583127837967303, 0.09875847161509169]),
'versicolor&1&207': np.array([0.73294627367007, 0.4610490766898855]),
'versicolor&1&208': np.array([0.73294627367007, 0.4610490766898855]),
'versicolor&1&209': np.array([0.5965042032375719, 0.48856644624972617]),
'versicolor&1&210': np.array([0.37157553889555184, 0.1221600832023858]),
'versicolor&1&211': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&212': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&213': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&214': np.array([0.4964962439921071, 0.3798215458387346]),
'versicolor&1&215': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&216': np.array([0.2805345936193346, 0.6595182922149835]),
'versicolor&1&217': np.array([0.08302493125394889, 0.6186280682763334]),
'versicolor&1&218': np.array([0.22125635302655813, 0.2925832702358638]),
'versicolor&1&219': np.array([0.2365788606456636, 0.7120007179768731]),
'versicolor&1&220': np.array([0.022347126801293967, 0.6718013300441928]),
'versicolor&1&221': np.array([0.10063786451829529, 0.4085974066833644]),
'versicolor&1&222': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&223': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&224': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&225': np.array([0.6253337666017573, 0.21983620140147825]),
'versicolor&1&226': np.array([0.6178968870349187, 0.22747652768125623]),
'versicolor&1&227': np.array([0.7245803616608639, 0.18141483095066183]),
'versicolor&1&228': np.array([0.6762617119303499, 0.19305674697949574]),
'versicolor&1&229': np.array([0.7182033715159247, 0.0970420677941148]),
'versicolor&1&230': np.array([0.6178968870349187, 0.22747652768125623]),
'versicolor&1&231': np.array([0.4976586558055923, 0.5393318265947251]),
'versicolor&1&232': np.array([0.4361093214026388, 0.4279491486345008]),
'versicolor&1&233': np.array([0.613985959011319, 0.23148898930908424]),
'versicolor&1&234': np.array([0.46747697713468217, 0.586607956360002]),
'versicolor&1&235': np.array([0.41044950174869577, 0.45415985894965977]),
'versicolor&1&236': np.array([0.6057447478066579, 0.23993389556303918]),
'versicolor&1&237': np.array([0.7245803616608639, 0.18141483095066183]),
'versicolor&1&238': np.array([0.7245803616608639, 0.18141483095066183]),
'versicolor&1&239': np.array([0.6762617119303499, 0.19305674697949574]),
'versicolor&1&240': np.array([0.056623968925773045, 0.43360725859686644]),
'versicolor&1&241': np.array([0.020169511418752378, 0.47015948158260334]),
'versicolor&1&242': np.array([0.5806365328450954, 0.47262706807712623]),
'versicolor&1&243': np.array([0.4146290154471569, 0.4964318942067898]),
'versicolor&1&244': np.array([0.3351719071445682, 0.20616862401308342]),
'versicolor&1&245': np.array([0.020169511418752378, 0.47015948158260334]),
'versicolor&1&246': np.array([0.24022705822940116, 0.7185371033867092]),
'versicolor&1&247': np.array([0.010447231513465048, 0.6616528865917504]),
'versicolor&1&248': np.array([0.024556360933646205, 0.4723948285969902]),
'versicolor&1&249': np.array([0.21321406009810842, 0.7648907754638917]),
'versicolor&1&250': np.array([-0.027450681014480036, 0.6999336015080245]),
'versicolor&1&251': np.array([-0.0164329511444131, 0.5132208276383963]),
'versicolor&1&252': np.array([0.5806365328450954, 0.47262706807712623]),
'versicolor&1&253': np.array([0.5806365328450954, 0.47262706807712623]),
'versicolor&1&254': np.array([0.4146290154471569, 0.4964318942067898]),
'versicolor&1&255': np.array([-0.32199975656257646, 0.7482293552463756]),
'versicolor&1&256': np.array([-0.43843349141088417, 0.8642740701867917]),
'versicolor&1&257': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&258': np.array([0.44460014335081516, 0.6107546840046902]),
'versicolor&1&259': np.array([0.2619265016777598, 0.33491141590339474]),
'versicolor&1&260': np.array([-0.43843349141088417, 0.8642740701867917]),
'versicolor&1&261': np.array([0.20183015430619713, 0.7445346002055082]),
'versicolor&1&262': np.array([-0.05987874887638573, 0.6927937290176818]),
'versicolor&1&263': np.array([-0.2562642052727569, 0.6920266972283227]),
'versicolor&1&264': np.array([0.1736438124560164, 0.7898174616442941]),
'versicolor&1&265': np.array([-0.10114089899940126, 0.7326610366533243]),
'versicolor&1&266': np.array([-0.34479806250338163, 0.7789143553916729]),
'versicolor&1&267': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&268': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&269': np.array([0.44460014335081516, 0.6107546840046902]),
'versicolor&1&270': np.array([0.7749499208750119, 0.8147189440804429]),
'versicolor&1&271': np.array([0.8040309195416899, 0.8445152504134819]),
'versicolor&1&272': np.array([0.5826506963750848, -0.22335655671229107]),
'versicolor&1&273': np.array([0.33108168891715983, 0.13647816746351163]),
'versicolor&1&274': np.array([0.4079256832347186, 0.038455640985860955]),
'versicolor&1&275': np.array([0.8040309195416899, 0.8445152504134819]),
'versicolor&1&276': np.array([0.18555813792691386, 0.6940923833143309]),
'versicolor&1&277': np.array([0.32639262064172164, 0.6296083447134281]),
'versicolor&1&278': np.array([0.6964303997553315, 0.7444536452136676]),
'versicolor&1&279': np.array([0.18216358701833335, 0.747615101407194]),
'versicolor&1&280': np.array([0.33549445287370383, 0.6526039763053625]),
'versicolor&1&281': np.array([0.7213651642695392, 0.7718874443854203]),
'versicolor&1&282': np.array([0.5826506963750848, -0.22335655671229107]),
'versicolor&1&283': np.array([0.5826506963750848, -0.22335655671229107]),
'versicolor&1&284': np.array([0.33108168891715983, 0.13647816746351163]),
'versicolor&1&285': np.array([0.7749499208750119, 0.8147189440804429]),
'versicolor&1&286': np.array([0.8040309195416899, 0.8445152504134819]),
'versicolor&1&287': np.array([0.5826506963750848, -0.22335655671229107]),
'versicolor&1&288': np.array([0.33108168891715983, 0.13647816746351163]),
'versicolor&1&289': np.array([0.4079256832347186, 0.038455640985860955]),
'versicolor&1&290': np.array([0.8040309195416899, 0.8445152504134819]),
'versicolor&1&291': np.array([0.18555813792691386, 0.6940923833143309]),
'versicolor&1&292': np.array([0.32639262064172164, 0.6296083447134281]),
'versicolor&1&293': np.array([0.6964303997553315, 0.7444536452136676]),
'versicolor&1&294': np.array([0.18216358701833335, 0.747615101407194]),
'versicolor&1&295': np.array([0.33549445287370383, 0.6526039763053625]),
'versicolor&1&296': np.array([0.7213651642695392, 0.7718874443854203]),
'versicolor&1&297': np.array([0.5826506963750848, -0.22335655671229107]),
'versicolor&1&298': np.array([0.5826506963750848, -0.22335655671229107]),
'versicolor&1&299': np.array([0.33108168891715983, 0.13647816746351163]),
'versicolor&1&300': np.array([0.4933316375690332, 0.5272416708629276]),
'versicolor&1&301': np.array([0.5041830043657418, 0.5392782673950876]),
'versicolor&1&302': np.array([0.25657760110071476, 0.12592645350389123]),
'versicolor&1&303': np.array([0.13717260713320106, 0.3627779907901665]),
'versicolor&1&304': np.array([0.3093950298647913, 0.1140298206733954]),
'versicolor&1&305': np.array([0.5041830043657418, 0.5392782673950876]),
'versicolor&1&306': np.array([0.1413116283690917, 0.7479856297394165]),
'versicolor&1&307': np.array([0.189773257421942, 0.6552150653012478]),
'versicolor&1&308': np.array([0.40694846236352233, 0.5109051764198169]),
'versicolor&1&309': np.array([0.1390424906594644, 0.7991613016301518]),
'versicolor&1&310': np.array([0.1945777487290197, 0.6743932844312892]),
'versicolor&1&311': np.array([0.415695226122737, 0.5230815102377903]),
'versicolor&1&312': np.array([0.25657760110071476, 0.12592645350389123]),
'versicolor&1&313': np.array([0.25657760110071476, 0.12592645350389123]),
'versicolor&1&314': np.array([0.13717260713320106, 0.3627779907901665]),
'versicolor&2&0': np.array([0.37157691321004915, 0.12216227283618836]),
'versicolor&2&1': np.array([0.24630541996506908, 0.24630541996506994]),
'versicolor&2&2': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&3': np.array([0.2953784217387408, -0.6750352694420283]),
'versicolor&2&4': np.array([0.4741571944522723, -0.3872697414416878]),
'versicolor&2&5': np.array([0.24630541996506908, 0.24630541996506994]),
'versicolor&2&6': np.array([0.68663266357557, -0.6475988779804592]),
'versicolor&2&7': np.array([0.8701760330833639, -0.5914646440996656]),
'versicolor&2&8': np.array([0.6273836195848199, -0.15720981251964872]),
'versicolor&2&9': np.array([0.7292373173099087, -0.6975400952780954]),
'versicolor&2&10': np.array([0.9270035696082471, -0.640582639672401]),
'versicolor&2&11': np.array([0.6863652799597699, -0.21335694415409426]),
'versicolor&2&12': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&13': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&14': np.array([0.2953784217387408, -0.6750352694420283]),
'versicolor&2&15': np.array([0.37157691321004915, 0.12216227283618836]),
'versicolor&2&16': np.array([0.24630541996506908, 0.24630541996506994]),
'versicolor&2&17': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&18': np.array([0.2953784217387408, -0.6750352694420283]),
'versicolor&2&19': np.array([0.4741571944522723, -0.3872697414416878]),
'versicolor&2&20': np.array([0.24630541996506908, 0.24630541996506994]),
'versicolor&2&21': np.array([0.68663266357557, -0.6475988779804592]),
'versicolor&2&22': np.array([0.8701760330833639, -0.5914646440996656]),
'versicolor&2&23': np.array([0.6273836195848199, -0.15720981251964872]),
'versicolor&2&24': np.array([0.7292373173099087, -0.6975400952780954]),
'versicolor&2&25': np.array([0.9270035696082471, -0.640582639672401]),
'versicolor&2&26': np.array([0.6863652799597699, -0.21335694415409426]),
'versicolor&2&27': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&28': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&29': np.array([0.2953784217387408, -0.6750352694420283]),
'versicolor&2&30': np.array([0.5188517506916897, 0.036358567813067386]),
'versicolor&2&31': np.array([0.5131939273945454, 0.04199748266790813]),
'versicolor&2&32': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&33': np.array([0.34904320225465857, -0.6233384360811872]),
'versicolor&2&34': np.array([0.5354807894355184, -0.3418054346754283]),
'versicolor&2&35': np.array([0.5131939273945454, 0.04199748266790813]),
'versicolor&2&36': np.array([0.5761361484884252, -0.44637460220261904]),
'versicolor&2&37': np.array([0.7268664040181829, -0.40159406680426807]),
'versicolor&2&38': np.array([0.5917672401610737, -0.061499563231173816]),
'versicolor&2&39': np.array([0.5921993039887428, -0.46498571089163954]),
'versicolor&2&40': np.array([0.7470482158282458, -0.4169281153671854]),
'versicolor&2&41': np.array([0.5967658480721675, -0.06546963852548916]),
'versicolor&2&42': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&43': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&44': np.array([0.34904320225465857, -0.6233384360811872]),
'versicolor&2&45': np.array([-0.8252668830593566, 0.11450866713130668]),
'versicolor&2&46': np.array([-0.8211795643076095, 0.11869650771610692]),
'versicolor&2&47': np.array([-0.6441664102689847, -0.3012046426099901]),
'versicolor&2&48': np.array([-0.7640280271176497, -0.19364537761420375]),
'versicolor&2&49': np.array([-0.8735738195653328, -0.046438180466149094]),
'versicolor&2&50': np.array([-0.8211795643076095, 0.11869650771610692]),
'versicolor&2&51': np.array([-0.8470213454017305, -0.0910504504559782]),
'versicolor&2&52': np.array([-0.8783521565540571, 0.01381094589198601]),
'versicolor&2&53': np.array([-0.8388485924434891, 0.09800790238640067]),
'versicolor&2&54': np.array([-0.8495871633670822, -0.08820642363054954]),
'versicolor&2&55': np.array([-0.8784816772224661, 0.017184907022714958]),
'versicolor&2&56': np.array([-0.835455914569297, 0.10189258327760495]),
'versicolor&2&57': np.array([-0.6441664102689847, -0.3012046426099901]),
'versicolor&2&58': np.array([-0.6441664102689847, -0.3012046426099901]),
'versicolor&2&59': np.array([-0.7640280271176497, -0.19364537761420375]),
'versicolor&2&60': np.array([-0.5227340800279543, 0.4209267574088147]),
'versicolor&2&61': np.array([-0.5140708637198534, 0.4305361238057349]),
'versicolor&2&62': np.array([-0.2661726847443776, -0.6902916602462779]),
'versicolor&2&63': np.array([-0.2741128763380603, -0.7260889090887469]),
'versicolor&2&64': np.array([-0.6188410763351541, -0.22803625884668638]),
'versicolor&2&65': np.array([-0.5140708637198534, 0.4305361238057349]),
'versicolor&2&66': np.array([-0.56940429361245, -0.3442345437882425]),
'versicolor&2&67': np.array([-0.6452502612229726, -0.04686872432129788]),
'versicolor&2&68': np.array([-0.596973015481227, 0.37395461795328944]),
'versicolor&2&69': np.array([-0.5760086048531655, -0.3353570725513232]),
'versicolor&2&70': np.array([-0.6488228567611906, -0.03186184826812757]),
'versicolor&2&71': np.array([-0.5903420131350324, 0.384224764046184]),
'versicolor&2&72': np.array([-0.2661726847443776, -0.6902916602462779]),
'versicolor&2&73': np.array([-0.2661726847443776, -0.6902916602462779]),
'versicolor&2&74': np.array([-0.2741128763380603, -0.7260889090887469]),
'versicolor&2&75': np.array([0.0, 0.47562425924289314]),
'versicolor&2&76': np.array([0.0, 0.4854368956593117]),
'versicolor&2&77': np.array([0.0, -0.7348263896003956]),
'versicolor&2&78': np.array([0.0, -0.7920887571493729]),
'versicolor&2&79': np.array([0.0, -0.507614207038711]),
'versicolor&2&80': np.array([0.0, 0.4854368956593117]),
'versicolor&2&81': np.array([0.0, -0.3982542883933272]),
'versicolor&2&82': np.array([0.0, -0.08633733326458487]),
'versicolor&2&83': np.array([0.0, 0.4039238345412103]),
'versicolor&2&84': np.array([0.0, -0.38897705551367706]),
'versicolor&2&85': np.array([0.0, -0.06915310813754129]),
'versicolor&2&86': np.array([0.0, 0.41580041887839214]),
'versicolor&2&87': np.array([0.0, -0.7348263896003956]),
'versicolor&2&88': np.array([0.0, -0.7348263896003956]),
'versicolor&2&89': np.array([0.0, -0.7920887571493729]),
'versicolor&2&90': np.array([0.37157691321004915, 0.12216227283618836]),
'versicolor&2&91': np.array([0.24630541996506908, 0.24630541996506994]),
'versicolor&2&92': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&93': np.array([0.2953784217387408, -0.6750352694420283]),
'versicolor&2&94': np.array([0.4741571944522723, -0.3872697414416878]),
'versicolor&2&95': np.array([0.24630541996506908, 0.24630541996506994]),
'versicolor&2&96': np.array([0.68663266357557, -0.6475988779804592]),
'versicolor&2&97': np.array([0.8701760330833639, -0.5914646440996656]),
'versicolor&2&98': np.array([0.6273836195848199, -0.15720981251964872]),
'versicolor&2&99': np.array([0.7292373173099087, -0.6975400952780954]),
'versicolor&2&100': np.array([0.9270035696082471, -0.640582639672401]),
'versicolor&2&101': np.array([0.6863652799597699, -0.21335694415409426]),
'versicolor&2&102': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&103': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&104': np.array([0.2953784217387408, -0.6750352694420283]),
'versicolor&2&105': np.array([0.5188517506916897, 0.036358567813067386]),
'versicolor&2&106': np.array([0.5131939273945454, 0.04199748266790813]),
'versicolor&2&107': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&108': np.array([0.34904320225465857, -0.6233384360811872]),
'versicolor&2&109': np.array([0.5354807894355184, -0.3418054346754283]),
'versicolor&2&110': np.array([0.5131939273945454, 0.04199748266790813]),
'versicolor&2&111': np.array([0.5761361484884252, -0.44637460220261904]),
'versicolor&2&112': np.array([0.7268664040181829, -0.40159406680426807]),
'versicolor&2&113': np.array([0.5917672401610737, -0.061499563231173816]),
'versicolor&2&114': np.array([0.5921993039887428, -0.46498571089163954]),
'versicolor&2&115': np.array([0.7470482158282458, -0.4169281153671854]),
'versicolor&2&116': np.array([0.5967658480721675, -0.06546963852548916]),
'versicolor&2&117': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&118': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&119': np.array([0.34904320225465857, -0.6233384360811872]),
'versicolor&2&120': np.array([-0.7638917827493686, 0.868015757634957]),
'versicolor&2&121': np.array([-0.8001553485824509, 0.9049358162753539]),
'versicolor&2&122': np.array([-0.26179245521040034, -0.7067672760776678]),
'versicolor&2&123': np.array([-0.14690789675963867, -0.7352367260447958]),
'versicolor&2&124': np.array([-0.32941440381886555, -0.4173178729969913]),
'versicolor&2&125': np.array([-0.8001553485824509, 0.9049358162753539]),
'versicolor&2&126': np.array([-0.18291442454393395, -0.2654898014002494]),
'versicolor&2&127': np.array([-0.5797728557269727, 0.3163189837954924]),
'versicolor&2&128': np.array([-0.7579323596667402, 0.8054136823046655]),
'versicolor&2&129': np.array([-0.1948624323669993, -0.23753953755286383]),
'versicolor&2&130': np.array([-0.6437698977881832, 0.3909540110317858]),
'versicolor&2&131': np.array([-0.7963046521980063, 0.846536369471985]),
'versicolor&2&132': np.array([-0.26179245521040034, -0.7067672760776678]),
'versicolor&2&133': np.array([-0.26179245521040034, -0.7067672760776678]),
'versicolor&2&134': np.array([-0.14690789675963867, -0.7352367260447958]),
'versicolor&2&135': np.array([-0.3219660907491514, 0.7482043503408669]),
'versicolor&2&136': np.array([-0.43839553940476644, 0.8642446918440131]),
'versicolor&2&137': np.array([-0.05474251929945989, -0.7566498134597841]),
'versicolor&2&138': np.array([0.17291299562995102, -0.7651995812779756]),
'versicolor&2&139': np.array([0.2626914501948546, -0.5596191134224637]),
'versicolor&2&140': np.array([-0.43839553940476644, 0.8642446918440131]),
'versicolor&2&141': np.array([0.4734444929420575, -0.6150974537943872]),
'versicolor&2&142': np.array([0.5369392542176313, -0.430867927332838]),
'versicolor&2&143': np.array([-0.19892251970509112, 0.5718543863753405]),
'versicolor&2&144': np.array([0.5071047612208237, -0.6507546896558788]),
'versicolor&2&145': np.array([0.5629877361048359, -0.4485515113017818]),
'versicolor&2&146': np.array([-0.3047657227470458, 0.6788631774846587]),
'versicolor&2&147': np.array([-0.05474251929945989, -0.7566498134597841]),
'versicolor&2&148': np.array([-0.05474251929945989, -0.7566498134597841]),
'versicolor&2&149': np.array([0.17291299562995102, -0.7651995812779756]),
'versicolor&2&150': np.array([0.37157691321004915, 0.12216227283618836]),
'versicolor&2&151': np.array([0.24630541996506908, 0.24630541996506994]),
'versicolor&2&152': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&153': np.array([0.2953784217387408, -0.6750352694420283]),
'versicolor&2&154': np.array([0.4741571944522723, -0.3872697414416878]),
'versicolor&2&155': np.array([0.24630541996506908, 0.24630541996506994]),
'versicolor&2&156': np.array([0.68663266357557, -0.6475988779804592]),
'versicolor&2&157': np.array([0.8701760330833639, -0.5914646440996656]),
'versicolor&2&158': np.array([0.6273836195848199, -0.15720981251964872]),
'versicolor&2&159': | np.array([0.7292373173099087, -0.6975400952780954]) | numpy.array |
import os
import sys
import unittest
import numpy as np
import NumpyTestCase
import cPickle
try:
import pathLocate
except:
from unittests import pathLocate
unittest_dir = pathLocate.getUnitTestDirectory()
sys.path.append(pathLocate.getRootDirectory())
from TrackGenerator import trackSize
class TestRmaxModel(NumpyTestCase.NumpyTestCase):
def setUp(self):
np.random.seed(10)
self.dparray = np.arange(10, 51, 5)
self.latarray = | np.arange(-23, -5, 2) | numpy.arange |
#https://docs.pymc.io/notebooks/api_quickstart.html
#%matplotlib inline
import numpy as np
import theano.tensor as tt
import pymc3 as pm
import seaborn as sns
import matplotlib.pyplot as plt
from time import time
#sns.set_context('notebook')
plt.style.use('seaborn-darkgrid')
print('Running on PyMC3 v{}'.format(pm.__version__))
np.random.seed(0)
N = 100
x = np.random.randn(100)
mu_prior = 1.1
sigma_prior = 1.2
Sigma_prior = sigma_prior**2
sigma_x = 1.3
Sigma_x = sigma_x**2
with pm.Model() as model:
mu = pm.Normal('mu', mu=mu_prior, sd=sigma_prior)
obs = pm.Normal('obs', mu=mu, sd=sigma_x, observed=x)
time_start = time()
mcmc_samples = pm.sample(1000, tune=500) # mcmc
print('time spent MCMC {:0.3f}'.format(time() - time_start))
time_start = time()
vi_post = pm.fit() # variational inference
print('time spent VI {:0.3f}'.format(time() - time_start))
vi_samples = vi_post.sample(1000)
mu_clamped = -0.5
logp = model.logp({'mu': mu_clamped})
import scipy.stats
# Computed the log joint manually
log_prior = scipy.stats.norm(mu_prior, sigma_prior).logpdf(mu_clamped)
log_lik = np.sum(scipy.stats.norm(mu_clamped, sigma_x).logpdf(x))
log_joint = log_prior + log_lik
assert np.isclose(logp, log_joint)
# Standard MCMC diagonistics
pm.traceplot(mcmc_samples)
pm.plot_posterior(mcmc_samples);
Rhat = pm.gelman_rubin(mcmc_samples)
print(Rhat)
# Estimate posterior over mu when unclamped
# Bayes rule for Gaussians MLAPA sec 5.6.2
Sigma_post = 1/( 1/Sigma_prior + N/Sigma_x )
xbar = np.mean(x)
mu_post = Sigma_post * (1/Sigma_x * N * xbar + 1/Sigma_prior * mu_prior)
vals = mcmc_samples.get_values('mu')
mu_post_mcmc = | np.mean(vals) | numpy.mean |
'''
@File :dataloader.py
@Author:Morton
@Date :2020/6/18 16:04
@Desc :The basic loading function to extract raw content and mention graph information from raw data "user_info.xxx.gz".
'''
# -*- coding:utf-8 -*-
import os
import re
import csv
import kdtree
import gensim
import numpy as np
import pandas as pd
import networkx as nx
from haversine import haversine
from collections import defaultdict, OrderedDict
from sklearn.neighbors import NearestNeighbors
class DataLoader:
def __init__(self, data_home, bucket_size=50, encoding='utf-8', celebrity_threshold=10, one_hot_labels=False,
mindf=10, maxdf=0.2, norm='l2', idf=True, btf=True, tokenizer=None, subtf=False, stops=None,
token_pattern=r'(?u)(?<![#@])\b\w\w+\b', vocab=None):
self.data_home = data_home
self.bucket_size = bucket_size
self.encoding = encoding
self.celebrity_threshold = celebrity_threshold
self.one_hot_labels = one_hot_labels
self.mindf = mindf
self.maxdf = maxdf
self.norm = norm
self.idf = idf
self.btf = btf
self.tokenizer = tokenizer
self.subtf = subtf
self.stops = stops if stops else 'english'
self.token_pattern = r'(?u)(?<![#@|,.-_+^……$%&*(); :`,。?、:;;《》{}“”~#¥])\b\w\w+\b'
self.vocab = vocab
def load_data(self):
print('loading the dataset from: {}'.format(self.data_home))
train_file = os.path.join(self.data_home, 'user_info.train.gz')
dev_file = os.path.join(self.data_home, 'user_info.dev.gz')
test_file = os.path.join(self.data_home, 'user_info.test.gz')
df_train = pd.read_csv(train_file, delimiter='\t', encoding=self.encoding, names=['user', 'lat', 'lon', 'text'],
quoting=csv.QUOTE_NONE, error_bad_lines=False)
df_dev = pd.read_csv(dev_file, delimiter='\t', encoding=self.encoding, names=['user', 'lat', 'lon', 'text'],
quoting=csv.QUOTE_NONE, error_bad_lines=False)
df_test = pd.read_csv(test_file, delimiter='\t', encoding=self.encoding, names=['user', 'lat', 'lon', 'text'],
quoting=csv.QUOTE_NONE, error_bad_lines=False)
df_train.dropna(inplace=True)
df_dev.dropna(inplace=True)
df_test.dropna(inplace=True)
df_train['user'] = df_train['user'].apply(lambda x: str(x).lower())
df_train.drop_duplicates(['user'], inplace=True, keep='last')
df_train.set_index(['user'], drop=True, append=False, inplace=True)
df_train.sort_index(inplace=True)
df_dev['user'] = df_dev['user'].apply(lambda x: str(x).lower())
df_dev.drop_duplicates(['user'], inplace=True, keep='last')
df_dev.set_index(['user'], drop=True, append=False, inplace=True)
df_dev.sort_index(inplace=True)
df_test['user'] = df_test['user'].apply(lambda x: str(x).lower())
df_test.drop_duplicates(['user'], inplace=True, keep='last')
df_test.set_index(['user'], drop=True, append=False, inplace=True)
df_test.sort_index(inplace=True)
self.df_train = df_train
self.df_dev = df_dev
self.df_test = df_test
def get_graph(self):
g = nx.Graph()
nodes = set(self.df_train.index.tolist() + self.df_dev.index.tolist() + self.df_test.index.tolist())
assert len(nodes) == len(self.df_train) + len(self.df_dev) + len(self.df_test), 'duplicate target node'
nodes_list = self.df_train.index.tolist() + self.df_dev.index.tolist() + self.df_test.index.tolist()
node_id = {node: id for id, node in enumerate(nodes_list)}
g.add_nodes_from(node_id.values())
for node in nodes:
g.add_edge(node_id[node], node_id[node])
pattern = '(?<=^|(?<=[^a-zA-Z0-9-_\\.]))@([A-Za-z]+[A-Za-z0-9_]+)'
pattern = re.compile(pattern)
print('start adding the train graph')
externalNum = 0
for i in range(len(self.df_train)):
user = self.df_train.index[i]
user_id = node_id[user]
mentions = [m.lower() for m in pattern.findall(self.df_train.text[i])]
idmentions = set()
for m in mentions:
if m in node_id:
idmentions.add(node_id[m])
else:
id = len(node_id)
node_id[m] = id
idmentions.add(id)
externalNum += 1
if len(idmentions) > 0:
g.add_nodes_from(idmentions)
for id in idmentions:
g.add_edge(user_id, id)
print('start adding the dev graph')
externalNum = 0
for i in range(len(self.df_dev)):
user = self.df_dev.index[i]
user_id = node_id[user]
mentions = [m.lower() for m in pattern.findall(self.df_dev.text[i])]
idmentions = set()
for m in mentions:
if m in node_id:
idmentions.add(node_id[m])
else:
id = len(node_id)
node_id[m] = id
idmentions.add(id)
externalNum += 1
if len(idmentions) > 0:
g.add_nodes_from(idmentions)
for id in idmentions:
g.add_edge(id, user_id)
print('start adding the test graph')
externalNum = 0
for i in range(len(self.df_test)):
user = self.df_test.index[i]
user_id = node_id[user]
mentions = [m.lower() for m in pattern.findall(self.df_test.text[i])]
idmentions = set()
for m in mentions:
if m in node_id:
idmentions.add(node_id[m])
else:
id = len(node_id)
node_id[m] = id
idmentions.add(id)
externalNum += 1
if len(idmentions) > 0:
g.add_nodes_from(idmentions)
for id in idmentions:
g.add_edge(id, user_id)
print('#nodes: %d, #edges: %d' % (nx.number_of_nodes(g), nx.number_of_edges(g)))
celebrities = []
for i in range(len(nodes_list), len(node_id)):
deg = len(g[i])
if deg == 1 or deg > self.celebrity_threshold:
celebrities.append(i)
print('removing %d celebrity nodes with degree higher than %d' % (len(celebrities), self.celebrity_threshold))
g.remove_nodes_from(celebrities)
print('projecting the graph')
projected_g = self.efficient_collaboration_weighted_projected_graph2(g, range(len(nodes_list)))
print('#nodes: %d, #edges: %d' % (nx.number_of_nodes(projected_g), nx.number_of_edges(projected_g)))
self.graph = projected_g
def efficient_collaboration_weighted_projected_graph2(self, B, nodes):
# B: the whole graph including known nodes and mentioned nodes --large graph
# nodes: the node_id of known nodes --small graph node
nodes = set(nodes)
G = nx.Graph()
G.add_nodes_from(nodes)
all_nodes = set(B.nodes())
for m in all_nodes:
nbrs = B[m]
target_nbrs = [t for t in nbrs if t in nodes]
# add edge between known nodesA(m) and known nodesB(n)
if m in nodes:
for n in target_nbrs:
if m < n:
if not G.has_edge(m, n):
# Morton added for exclude the long edges
G.add_edge(m, n)
# add edge between known n1 and known n2,
# just because n1 and n2 have relation to m, why ? ? ? Yes, it's right.
for n1 in target_nbrs:
for n2 in target_nbrs:
if n1 < n2:
if not G.has_edge(n1, n2):
G.add_edge(n1, n2)
return G
def get_raw_content_and_save(self, save_file_path):
# Morton add for save the raw content data into files.
if os.path.exists(save_file_path):
print("content already saved.")
return None
data = list(self.df_train.text.values) + list(self.df_dev.text.values) + list(self.df_test.text.values)
file = open(save_file_path, 'w', encoding='utf-8')
for i in range(len(data)):
file.write(str(data[i]) + '\n')
file.close()
print("content saved in {}".format(save_file_path))
def load_doc2vec_feature(self, doc2vec_model_file):
"""
doc2vec_model_file: the file that including all doc2vec features of the raw content.
"""
# load model
model = gensim.models.doc2vec.Doc2Vec.load(doc2vec_model_file)
# train data features
feature_list = list()
index_l = 0
index_r = len(self.df_train.text)
for i in range(index_l, index_r):
feature_list.append(model.docvecs[i])
self.X_train = np.array(feature_list)
# dev data features
feature_list = list()
index_l = len(self.df_train.text)
index_r = len(self.df_train.text) + len(self.df_dev.text)
for i in range(index_l, index_r):
feature_list.append(model.docvecs[i])
self.X_dev = np.array(feature_list)
# test data features
feature_list = list()
index_l = len(self.df_train.text) + len(self.df_dev.text)
index_r = len(self.df_train.text) + len(self.df_dev.text) + len(self.df_test.text)
for i in range(index_l, index_r):
feature_list.append(model.docvecs[i])
self.X_test = | np.array(feature_list) | numpy.array |
import numpy as np
import pytest
from skypy.utils.photometry import HAS_SPECLITE
def test_magnitude_functions():
from skypy.utils.photometry import (luminosity_in_band,
luminosity_from_absolute_magnitude,
absolute_magnitude_from_luminosity)
# convert between absolute luminosity and magnitude
assert np.isclose(luminosity_from_absolute_magnitude(-22), 630957344.5)
assert np.isclose(absolute_magnitude_from_luminosity(630957344.5), -22)
# convert with standard luminosities
for ref, mag in luminosity_in_band.items():
assert np.isclose(luminosity_from_absolute_magnitude(mag, ref), 1.0)
assert np.isclose(absolute_magnitude_from_luminosity(1.0, ref), mag)
# error when unknown reference is used
with pytest.raises(KeyError):
luminosity_from_absolute_magnitude(0., 'unknown')
with pytest.raises(KeyError):
absolute_magnitude_from_luminosity(1., 'unknown')
@pytest.mark.skipif(not HAS_SPECLITE, reason='test requires speclite')
def test_mag_ab_standard_source():
from astropy import units
from speclite.filters import FilterResponse
from skypy.utils.photometry import mag_ab
# create a filter
filt_lam = np.logspace(0, 4, 1000)*units.AA
filt_tx = np.exp(-((filt_lam - 1000*units.AA)/(100*units.AA))**2)
filt_tx[[0, -1]] = 0
FilterResponse(wavelength=filt_lam, response=filt_tx,
meta=dict(group_name='test', band_name='filt'))
# test that the AB standard source has zero magnitude
lam = filt_lam # same grid to prevent interpolation issues
flam = 0.10885464149979998*units.Unit('erg s-1 cm-2 AA')/lam**2
m = mag_ab(lam, flam, 'test-filt')
assert np.isclose(m, 0)
@pytest.mark.skipif(not HAS_SPECLITE, reason='test requires speclite')
def test_mag_ab_redshift_dependence():
from astropy import units
from speclite.filters import FilterResponse
from skypy.utils.photometry import mag_ab
# make a wide tophat bandpass
filt_lam = [1.0e-10, 1.1e-10, 1.0e0, 0.9e10, 1.0e10]
filt_tx = [0., 1., 1., 1., 0.]
FilterResponse(wavelength=filt_lam, response=filt_tx,
meta=dict(group_name='test', band_name='filt'))
# create a narrow gaussian source
lam = | np.logspace(-11, 11, 1000) | numpy.logspace |
import multiprocessing
import os
import tempfile
import numpy as np
from collections import OrderedDict
import cloudpickle
import time
from rllab.sampler.utils import rollout
from rllab.misc import logger
from curriculum.envs.base import FixedStateGenerator
class FunctionWrapper(object):
"""Wrap a function for use with parallelized map.
"""
def __init__(self, func, *args, **kwargs):
"""Construct the function oject.
Args:
func: a top level function, or a picklable callable object.
*args and **kwargs: Any additional required enviroment data.
"""
self.func = func
self.args = args
self.kwargs = kwargs
def __call__(self, obj):
if obj is None:
return self.func(*self.args, **self.kwargs)
else:
return self.func(obj, *self.args, **self.kwargs)
def __getstate__(self):
""" Here we overwrite the default pickle protocol to use cloudpickle. """
return dict(
func=cloudpickle.dumps(self.func),
args=cloudpickle.dumps(self.args),
kwargs=cloudpickle.dumps(self.kwargs)
)
def __setstate__(self, d):
self.func = cloudpickle.loads(d['func'])
self.args = cloudpickle.loads(d['args'])
self.kwargs = cloudpickle.loads(d['kwargs'])
def disable_cuda_initializer(*args, **kwargs):
import os
os.environ['THEANO_FLAGS'] = 'device=cpu'
os.environ['CUDA_VISIBLE_DEVICES'] = ''
def parallel_map(func, iterable_object, num_processes=-1):
"""Parallelized map function based on python process
Args:
func: Pickleable callable object that takes one parameter.
iterable_object: An iterable of elements to map the function on.
num_processes: Number of process to use. When num_processes is 1,
no new process will be created.
Returns:
The list resulted in calling the func on all objects in the original list.
"""
if num_processes == 1:
return [func(x) for x in iterable_object]
if num_processes == -1:
from rllab.sampler.stateful_pool import singleton_pool
num_processes = singleton_pool.n_parallel
process_pool = multiprocessing.Pool(
num_processes,
initializer=disable_cuda_initializer
)
results = process_pool.map(func, iterable_object)
process_pool.close()
process_pool.join()
return results
def compute_rewards_from_paths(all_paths, key='rewards', as_goal=True, env=None, terminal_eps=0.1):
all_rewards = []
all_states = []
for paths in all_paths:
for path in paths:
if key == 'competence':
#goal = tuple(path['env_infos']['goal'][0])
goal_np_array = np.array(tuple(path['env_infos']['goal'][0]))
start_state = np.array(tuple(env.transform_to_goal_space(path['observations'][0])))
end_state = np.array(tuple(env.transform_to_goal_space(path['observations'][-1])))
final_dist = np.linalg.norm(goal_np_array - end_state)
initial_dist = np.linalg.norm(start_state - goal_np_array)
if final_dist > initial_dist:
competence = -1
elif final_dist < terminal_eps:
competence = 0
else:
competence = -final_dist / initial_dist
reward = competence
else:
reward = evaluate_path(path, key=key)
if as_goal:
state = tuple(path['env_infos']['goal'][0])
else:
state = tuple(env.transform_to_start_space(path['observations'][0]))
all_states.append(state)
all_rewards.append(reward)
return [all_states, all_rewards]
def label_states_from_paths(all_paths, min_reward=0, max_reward=1, key='rewards', as_goal=True,
old_rewards=None, improvement_threshold=0, n_traj=1, env=None, return_mean_rewards = False,
order_of_states = None):
state_dict = {}
for paths in all_paths:
for path in paths:
reward = evaluate_path(path, key=key)
if as_goal:
state = tuple(path['env_infos']['goal'][0])
else:
env_infos_first_time_step = {key: value[0] for key, value in path['env_infos'].items()}
state = tuple(env.transform_to_start_space(path['observations'][0], env_infos_first_time_step))
if state in state_dict:
state_dict[state].append(reward)
else:
state_dict[state] = [reward]
states = []
unlabeled_state = []
mean_rewards = []
if order_of_states is None:
for state, rewards in state_dict.items():
if len(rewards) >= n_traj:
states.append(list(state))
mean_rewards.append(np.mean(rewards))
# case where you want states returned in a specific order (useful for TSCL)
else:
updated = []
for state in order_of_states:
states.append(state)
if state not in state_dict or len(state_dict[tuple(state)]) < n_traj:
mean_rewards.append(0)
updated.append(False)
else:
mean_rewards.append(np.mean(state_dict[tuple(state)]))
updated.append(True)
# Make this a vertical list.
mean_rewards = np.array(mean_rewards).reshape(-1, 1)
labels = compute_labels(mean_rewards, old_rewards=old_rewards, min_reward=min_reward, max_reward=max_reward,
improvement_threshold=improvement_threshold)
states = | np.array(states) | numpy.array |
'''Module for all things Radio Frequency Interference Flagging'''
import numpy as np
from scipy.signal import medfilt
def medmin(d):
"""Calculate the median minus median statistic of array.
Args:
d (array): 2D data array
Returns:
(array): array with the statistic applied.
"""
#return np.median(np.min(chisq,axis=0))
mn = np.min(d,axis=0)
return 2*np.median(mn) - np.min(mn)
def medminfilt(d, K=8):
"""Filter an array on scales of K indexes with medmin.
Args:
d (array): 2D data array.
K (int, optional): integer representing box size to apply statistic.
Returns:
array: filtered array. Same shape as input array.
"""
d_sm = np.empty_like(d)
for i in xrange(d.shape[0]):
for j in xrange(d.shape[1]):
i0,j0 = max(0,i-K), max(0,j-K)
i1,j1 = min(d.shape[0], i+K), min(d.shape[1], j+K)
d_sm[i,j] = medmin(d[i0:i1,j0:j1])
return d_sm
#def omni_chisq_to_flags(chisq, K=8, sigma=6, sigl=2):
# '''Returns a mask of RFI given omnical's chisq statistic'''
# if False:
# w_sm = np.empty_like(chisq)
# sig = np.empty_like(chisq)
# #get smooth component of chisq
# for i in xrange(chisq.shape[0]):
# for j in xrange(chisq.shape[1]):
# i0,j0 = max(0,i-K), max(0,j-K)
# i1,j1 = min(chisq.shape[0], i+K), min(chisq.shape[1], j+K)
# #w_sm[i,j] = np.median(chisq[i0:i1,j0:j1])
# w_sm[i,j] = medmin(chisq[i0:i1,j0:j1])
# else: w_sm = medfilt(chisq, 2*K+1)
# #the residual from smooth component
# w_rs = chisq - w_sm
# w_sq = np.abs(w_rs)**2
# #get the standard deviation of the media.
# if False:
# for i in xrange(chisq.shape[0]):
# for j in xrange(chisq.shape[1]):
# i0,j0 = max(0,i-K), max(0,j-K)
# i1,j1 = min(chisq.shape[0], i+K), min(chisq.shape[1], j+K)
# #sig[i,j] = np.sqrt(np.median(w_sq[i0:i1,j0:j1]))
# sig[i,j] = np.sqrt(medmin(w_sq[i0:i1,j0:j1]))
# else: sig = np.sqrt(medfilt(w_sq, 2*K+1))
# #Number of sigma above the residual unsmooth part is.
# f1 = w_rs / sig
# return watershed_flag(f1, sig_init=sigma, sig_adj=sigl)
def watershed_flag(d, f=None, sig_init=6, sig_adj=2):
'''Generates a mask for flags using a watershed algorithm.
Returns a watershed flagging of an array that is in units of standard
deviation (i.e. how many sigma the datapoint is from the center).
Args:
d (array): 2D array to perform watershed on.
d should be in units of standard deviations.
f (array, optional): input flags. Same size as d.
sig_init (int): number of sigma to flag above, initially.
sig_adj (int): number of sigma to flag above for points
near flagged points.
Returns:
bool array: Array of mask values for d.
'''
#mask off any points above 'sig' sigma and nan's.
f1 = np.ma.array(d, mask=np.where(d > sig_init,1,0))
f1.mask |= np.isnan(f1)
if not f is None: f1.mask |= f
# Loop over flagged points and examine adjacent points to see if they exceed sig_adj
#Start the watershed
prevx,prevy = 0,0
x,y = np.where(f1.mask)
while x.size != prevx and y.size != prevy:
for dx,dy in [(1,0),(-1,0),(0,1),(0,-1)]:
prevx,prevy = x.size, y.size
xp, yp = (x+dx).clip(0,f1.shape[0]-1), (y+dy).clip(0,f1.shape[1]-1)
i = np.where(f1[xp,yp] > sig_adj)[0] # if sigma > 'sigl'
f1.mask[xp[i],yp[i]] = 1
x,y = np.where(f1.mask)
return f1.mask
def toss_times_freqs(mask, sig_t=6, sig_f=6):
"""XXX what does this function do? Needs test."""
f1ch = np.average(f1.mask, axis=0); f1ch.shape = (1,-1)
#The cut off value is a made up number here...sig = 'sig' if none flagged.
f1.mask = np.logical_or(f1.mask, np.where(f1 > sig_init*(1-f1ch), 1, 0))
f1t = np.average(f1.mask, axis=1) # band-avg flag vs t
ts = np.where(f1t > 2* | np.median(f1t) | numpy.median |
# Geometry Module
import numpy as np
from shapely.geometry import Polygon
from shapely.geometry.point import Point
def width(mag, maglim=20, seeing=1):
"""
Gives the approximate size of a star on a captor, based on its magnitude,
the highest magnitude visible and the seeing of the captor.
Parameters
----------
mag : float
magnitude of the star.
maglim : float
highest magnitude visible by the captor.
seeing : float
seeing, FWHM of the point spread function of the atmosphere.
Returns
-------
out : the size of the point on the captor, in arcsec.
"""
if mag >= maglim:
w = 0
else:
w = 0.58 * seeing * np.sqrt(maglim - mag)
return w # arcsec
def order_shape(n: int, x, y, mag, config, maglim, seeing):
"""
Generates shapes representing the visible n'th order of the point spread
function of a star on the captor in the case of slitless spectroscopy.
Parameters
----------
n : int
spectrum order
x, y : floats
position of the star on the captor (along 0x and 0y axes).
mag : float
apparent magnitude of the star.
config : Configuration object
made from a configuration file.
maglim : float
highest visible magnitude on the captor.
seeing : float
seeing, FWHM of the point spread function of the telescope.
Returns
-------
out : shapely Polygon object
"""
try:
disperserate = False
lmin, lmax = config.lambda_min, config.lambda_max
gpm = config.grooves_per_mm
d2ccd = config.distance2ccd
p2m, p2a = config.pixel2mm, config.pixel2arcsec
except BaseException:
disperserate = True
lmin, lmax = config.lambda_min, config.lambda_max
dr, p2a = config.dispersion_ratio, config.pixel2arcsec
w = width(mag, maglim, seeing)/p2a
if n == 0:
order = Point(x, y).buffer(w)
elif isinstance(n, int):
if disperserate:
hstart, hstop = lmin/dr, lmax/dr
else:
hstart = n*np.tan(np.arcsin(lmin*gpm))*d2ccd/p2m
hstop = n*np.tan(np.arcsin(lmax*gpm))*d2ccd/p2m
xmin, xmax = x + hstart, x + hstop
ymin, ymax = y - w/abs(n), y + w/abs(n)
order = Polygon([[xmin, ymin], [xmin, ymax],
[xmax, ymax], [xmax, ymin]])
return order
def rotate_around(matrix, centre, angle):
"""
Rotates a collection of points (2xn matrix) by an angle around a centre.
Parameters
----------
matrix : 2xn array_like
concatenation of n points on a 2D plain.
centre : 2x1 array_like
centre around which the points will rotate.
angle : float
rotation angle, in radian.
Returns
-------
out : 2xn numpy array.
the same concatenation of points but rotated by the given angle around
the centre.
"""
RotMat = np.array(((np.cos(angle), -np.sin(angle)),
(np.sin(angle), np.cos(angle))))
n = matrix.shape[-1]
X0, Y0 = centre
CentreMat = np.repeat([[X0], [Y0]], n, axis=1)
return | np.dot(RotMat, matrix - CentreMat) | numpy.dot |
#
import numpy as np
import netCDF4
import scipy.ndimage as ndimage
import datetime as dt
import cartopy
import cartopy.crs as ccrs
import cartopy.feature as cpf
from cartopy.io.shapereader import Reader
from cartopy.io.shapereader import natural_earth
from cartopy.mpl.gridliner import LONGITUDE_FORMATTER, LATITUDE_FORMATTER
import matplotlib.pyplot as plt
import matplotlib.ticker as mticker
import matplotlib.patches as mpatches
from matplotlib import colors
from mpl_toolkits.axes_grid1.inset_locator import inset_axes
from sinop_funciones import mapa_base
from sinop_funciones import get_index_time
from sinop_funciones import get_index_lat
from sinop_funciones import extract_var
def extraer_variable(file, fecha, nomvar, llat, llon):
"""
Extrae variables en espacio (X, Y) - Tiempo para la variable
pedida en nomvar
"""
l_lat = llat
l_lon = np.array(llon) % 360
i_lat, i_lon, lat, lon = get_index_lat(fecha, file, llat, llon)
tiempos = get_index_time(file, fecha)
# Creamos una variable aux
ndays = 8
res = np.empty([ndays, len(lat), len(lon)])
res[:] = np.nan
fdates = []
if nomvar == 'precip':
# Leemos la variable
ppinit = file.variables['apcpsfc'][:, i_lat[0]:i_lat[1]+1,
i_lon[0]:i_lon[1]+1]
i1 = np.min(np.where(np.array([a.hour for a in tiempos])==12))
# primer tiempo que inicia a las 12Z
d0 = tiempos[i1] # --> Initial day at 12UTC (=9 Local Time)
for dia in | np.arange(0, ndays) | numpy.arange |
from types import GeneratorType
import typing as tp
import csv
import json
from collections import namedtuple
from functools import partial
import numpy as np
from numpy.ma import MaskedArray
from static_frame.core.util import DEFAULT_SORT_KIND
from static_frame.core.util import NULL_SLICE
from static_frame.core.util import KEY_MULTIPLE_TYPES
from static_frame.core.util import GetItemKeyType
from static_frame.core.util import GetItemKeyTypeCompound
from static_frame.core.util import CallableOrMapping
from static_frame.core.util import KeyOrKeys
from static_frame.core.util import FilePathOrFileLike
from static_frame.core.util import DtypeSpecifier
from static_frame.core.util import DtypesSpecifier
from static_frame.core.util import IndexSpecifier
from static_frame.core.util import IndexInitializer
from static_frame.core.util import FrameInitializer
from static_frame.core.util import immutable_filter
from static_frame.core.util import column_2d_filter
from static_frame.core.util import column_1d_filter
from static_frame.core.util import name_filter
from static_frame.core.util import _gen_skip_middle
from static_frame.core.util import iterable_to_array
from static_frame.core.util import _dict_to_sorted_items
from static_frame.core.util import _array_to_duplicated
from static_frame.core.util import array_set_ufunc_many
from static_frame.core.util import array2d_to_tuples
from static_frame.core.util import _read_url
from static_frame.core.util import write_optional_file
from static_frame.core.util import GetItem
from static_frame.core.util import InterfaceSelection2D
from static_frame.core.util import InterfaceAsType
from static_frame.core.util import IndexCorrespondence
from static_frame.core.util import ufunc_unique
from static_frame.core.util import STATIC_ATTR
from static_frame.core.util import concat_resolved
from static_frame.core.util import DepthLevelSpecifier
from static_frame.core.util import _array_to_groups_and_locations
from static_frame.core.operator_delegate import MetaOperatorDelegate
from static_frame.core.iter_node import IterNodeApplyType
from static_frame.core.iter_node import IterNodeType
from static_frame.core.iter_node import IterNode
from static_frame.core.display import DisplayConfig
from static_frame.core.display import DisplayActive
from static_frame.core.display import Display
from static_frame.core.display import DisplayFormats
from static_frame.core.display import DisplayHeader
from static_frame.core.type_blocks import TypeBlocks
from static_frame.core.series import Series
from static_frame.core.index_base import IndexBase
from static_frame.core.index import Index
from static_frame.core.index import IndexGO
from static_frame.core.index import _requires_reindex
from static_frame.core.index import _is_index_initializer
from static_frame.core.index import immutable_index_filter
from static_frame.core.index_hierarchy import IndexHierarchy
from static_frame.core.index_hierarchy import IndexHierarchyGO
from static_frame.core.doc_str import doc_inject
def dtypes_mappable(dtypes: DtypesSpecifier):
'''
Determine if the dtypes argument can be used by name lookup, rather than index.
'''
return isinstance(dtypes, (dict, Series))
@doc_inject(selector='container_init', class_name='Frame')
class Frame(metaclass=MetaOperatorDelegate):
'''
A two-dimensional ordered, labelled collection, immutable and of fixed size.
Args:
data: An iterable of row iterables, a 2D numpy array, or dictionary mapping column names to column values.
{index}
{columns}
{own_data}
{own_index}
{own_columns}
'''
__slots__ = (
'_blocks',
'_columns',
'_index',
'_name'
)
_COLUMN_CONSTRUCTOR = Index
@classmethod
def from_concat(cls,
frames: tp.Iterable[tp.Union['Frame', Series]],
*,
axis: int = 0,
union: bool = True,
index: IndexInitializer = None,
columns: IndexInitializer = None,
name: tp.Hashable = None,
consolidate_blocks: bool = False
):
'''
Concatenate multiple Frames into a new Frame. If index or columns are provided and appropriately sized, the resulting Frame will have those indices. If the axis along concatenation (index for axis 0, columns for axis 1) is unique after concatenation, it will be preserved.
Args:
frames: Iterable of Frames.
axis: Integer specifying 0 to concatenate vertically, 1 to concatenate horizontally.
union: If True, the union of the aligned indices is used; if False, the intersection is used.
index: Optionally specify a new index.
columns: Optionally specify new columns.
Returns:
:py:class:`static_frame.Frame`
'''
# when doing axis 1 concat (growin horizontally) Series need to be presented as rows (axis 0)
# axis_series = (0 if axis is 1 else 1)
frames = [f if isinstance(f, Frame) else f.to_frame(axis) for f in frames]
# switch if we have reduced the columns argument to an array
from_array_columns = False
from_array_index = False
own_columns = False
own_index = False
if axis == 1: # stacks columns (extends rows)
# index can be the same, columns must be redefined if not unique
if columns is None:
# returns immutable array
columns = concat_resolved([frame._columns.values for frame in frames])
from_array_columns = True
# avoid sort for performance; always want rows if ndim is 2
if len(ufunc_unique(columns, axis=0)) != len(columns):
raise RuntimeError('Column names after horizontal concatenation are not unique; supply a columns argument.')
if index is None:
index = array_set_ufunc_many(
(frame._index.values for frame in frames),
union=union)
index.flags.writeable = False
from_array_index = True
def blocks():
for frame in frames:
if len(frame.index) != len(index) or (frame.index != index).any():
frame = frame.reindex(index=index)
for block in frame._blocks._blocks:
yield block
elif axis == 0: # stacks rows (extends columns)
if index is None:
# returns immutable array
index = concat_resolved([frame._index.values for frame in frames])
from_array_index = True
# avoid sort for performance; always want rows if ndim is 2
if len(ufunc_unique(index, axis=0)) != len(index):
raise RuntimeError('Index names after vertical concatenation are not unique; supply an index argument.')
if columns is None:
columns = array_set_ufunc_many(
(frame._columns.values for frame in frames),
union=union)
# import ipdb; ipdb.set_trace()
columns.flags.writeable = False
from_array_columns = True
def blocks():
aligned_frames = []
previous_frame = None
block_compatible = True
reblock_compatible = True
for frame in frames:
if len(frame.columns) != len(columns) or (frame.columns != columns).any():
frame = frame.reindex(columns=columns)
aligned_frames.append(frame)
# column size is all the same by this point
if previous_frame is not None:
if block_compatible:
block_compatible &= frame._blocks.block_compatible(
previous_frame._blocks)
if reblock_compatible:
reblock_compatible &= frame._blocks.reblock_compatible(
previous_frame._blocks)
previous_frame = frame
if block_compatible or reblock_compatible:
if not block_compatible and reblock_compatible:
type_blocks = [f._blocks.consolidate() for f in aligned_frames]
else:
type_blocks = [f._blocks for f in aligned_frames]
# all TypeBlocks have the same number of blocks by here
for block_idx in range(len(type_blocks[0]._blocks)):
block_parts = []
for frame_idx in range(len(type_blocks)):
b = column_2d_filter(
type_blocks[frame_idx]._blocks[block_idx])
block_parts.append(b)
# returns immutable array
yield concat_resolved(block_parts)
else:
# must just combine .values; returns immutable array
yield concat_resolved([frame.values for frame in frames])
else:
raise NotImplementedError('no support for axis', axis)
if from_array_columns:
if columns.ndim == 2: # we have a hierarchical index
column_cls = (IndexHierarchy
if cls._COLUMN_CONSTRUCTOR.STATIC else IndexHierarchyGO)
columns = column_cls.from_labels(columns)
own_columns = True
if from_array_index:
if index.ndim == 2: # we have a hierarchical index
index = IndexHierarchy.from_labels(index)
own_index = True
if consolidate_blocks:
block_gen = lambda: TypeBlocks.consolidate_blocks(blocks())
else:
block_gen = blocks
return cls(TypeBlocks.from_blocks(block_gen()),
index=index,
columns=columns,
name=name,
own_data=True,
own_columns=own_columns,
own_index=own_index)
@classmethod
def from_records(cls,
records: tp.Iterable[tp.Any],
*,
index: tp.Optional[IndexInitializer] = None,
columns: tp.Optional[IndexInitializer] = None,
dtypes: DtypesSpecifier = None,
name: tp.Hashable = None,
consolidate_blocks: bool = False
) -> 'Frame':
'''Frame constructor from an iterable of rows.
Args:
records: Iterable of row values, provided either as arrays, tuples, lists, or namedtuples.
index: Optionally provide an iterable of index labels, equal in length to the number of records.
columns: Optionally provide an iterable of column labels, equal in length to the length of each row.
dtypes: Optionally provide an iterable of dtypes, equal in length to the length of each row, or mapping by column name. If a dtype is given as None, NumPy's default type determination will be used.
Returns:
:py:class:`static_frame.Frame`
'''
derive_columns = False
if columns is None:
derive_columns = True
# leave columns list in outer scope for blocks() to populate
columns = []
# if records is np; we can just pass it to constructor, as is alrady a consolidate type
if isinstance(records, np.ndarray):
if dtypes is not None:
raise NotImplementedError('handling of dtypes when using NP records is no yet implemented')
return cls(records, index=index, columns=columns)
dtypes_is_map = dtypes_mappable(dtypes)
def get_col_dtype(col_idx):
if dtypes_is_map:
return dtypes.get(columns[col_idx], None)
return dtypes[col_idx]
def blocks():
if not hasattr(records, '__len__'):
rows = list(records)
else:
rows = records
row_reference = rows[0]
row_count = len(rows)
col_count = len(row_reference)
# if dtypes is not None and len(dtypes) != col_count:
# raise RuntimeError('length of dtypes does not match rows')
column_getter = None
if isinstance(row_reference, dict):
col_idx_iter = (k for k, _ in _dict_to_sorted_items(row_reference))
if derive_columns: # just pass the key back
column_getter = lambda key: key
elif isinstance(row_reference, Series):
raise RuntimeError('Frame.from_records() does not support Series. Use Frame.from_concat() instead.')
else:
# all other iterables
col_idx_iter = range(col_count)
if hasattr(row_reference, '_fields') and derive_columns:
column_getter = row_reference._fields.__getitem__
# derive types from first rows
for col_idx, col_key in enumerate(col_idx_iter):
if column_getter: # append as side effect of generator!
columns.append(column_getter(col_key))
# for each column, try to get a column_type, or None
if dtypes is None:
field_ref = row_reference[col_key]
# string, datetime64 types requires size in dtype specification, so cannot use np.fromiter, as we do not know the size of all columns
column_type = (type(field_ref)
if not isinstance(field_ref, (str, np.datetime64))
else None)
column_type_explicit = False
else: # column_type returned here can be None.
column_type = get_col_dtype(col_idx)
column_type_explicit = True
values = None
if column_type is not None:
try:
values = np.fromiter(
(row[col_key] for row in rows),
count=row_count,
dtype=column_type)
except ValueError:
# the column_type may not be compatible, so must fall back on using np.array to determine the type, i.e., ValueError: cannot convert float NaN to integer
if not column_type_explicit:
# reset to None if not explicit and failued in fromiter
column_type = None
if values is None:
# let array constructor determine type if column_type is None
values = np.array([row[col_key] for row in rows],
dtype=column_type)
values.flags.writeable = False
yield values
if consolidate_blocks:
block_gen = lambda: TypeBlocks.consolidate_blocks(blocks())
else:
block_gen = blocks
return cls(TypeBlocks.from_blocks(block_gen()),
index=index,
columns=columns,
name=name,
own_data=True)
@classmethod
def from_json(cls,
json_data: str,
*,
name: tp.Hashable = None,
dtypes: DtypesSpecifier = None
) -> 'Frame':
'''Frame constructor from an in-memory JSON document.
Args:
json_data: a string of JSON, encoding a table as an array of JSON objects.
Returns:
:py:class:`static_frame.Frame`
'''
data = json.loads(json_data)
return cls.from_records(data, name=name, dtypes=dtypes)
@classmethod
def from_json_url(cls,
url: str,
*,
name: tp.Hashable = None,
dtypes: DtypesSpecifier = None
) -> 'Frame':
'''Frame constructor from a JSON documenst provided via a URL.
Args:
url: URL to the JSON resource.
Returns:
:py:class:`static_frame.Frame`
'''
return cls.from_json(_read_url(url), name=name, dtypes=dtypes)
@classmethod
def from_items(cls,
pairs: tp.Iterable[tp.Tuple[tp.Hashable, tp.Iterable[tp.Any]]],
*,
index: IndexInitializer = None,
fill_value: object = np.nan,
name: tp.Hashable = None,
dtypes: DtypesSpecifier = None,
consolidate_blocks: bool = False):
'''Frame constructor from an iterator or generator of pairs, where the first value is the column name and the second value an iterable of column values.
Args:
pairs: Iterable of pairs of column name, column values.
index: Iterable of values to create an Index.
fill_value: If pairs include Series, they will be reindexed with the provided index; reindexing will use this fill value.
consoidate_blocks: If True, same typed adjacent columns will be consolidated into a contiguous array.
Returns:
:py:class:`static_frame.Frame`
'''
columns = []
# if an index initializer is passed, and we expect to get Series, we need to create the index in advance of iterating blocks
own_index = False
if _is_index_initializer(index):
index = Index(index)
own_index = True
dtypes_is_map = dtypes_mappable(dtypes)
def get_col_dtype(col_idx):
if dtypes_is_map:
return dtypes.get(columns[col_idx], None)
return dtypes[col_idx]
def blocks():
for col_idx, (k, v) in enumerate(pairs):
columns.append(k) # side effet of generator!
if dtypes:
column_type = get_col_dtype(col_idx)
else:
column_type = None
if isinstance(v, np.ndarray):
# NOTE: we rely on TypeBlocks constructor to check that these are same sized
if column_type is not None:
yield v.astype(column_type)
else:
yield v
elif isinstance(v, Series):
if index is None:
raise RuntimeError('can only consume Series in Frame.from_items if an Index is provided.')
if column_type is not None:
v = v.astype(column_type)
if _requires_reindex(v.index, index):
yield v.reindex(index, fill_value=fill_value).values
else:
yield v.values
elif isinstance(v, Frame):
raise NotImplementedError('Frames are not supported in from_items constructor.')
else:
values = np.array(v, dtype=column_type)
values.flags.writeable = False
yield values
if consolidate_blocks:
block_gen = lambda: TypeBlocks.consolidate_blocks(blocks())
else:
block_gen = blocks
return cls(TypeBlocks.from_blocks(block_gen()),
index=index,
columns=columns,
name=name,
own_data=True,
own_index=own_index)
@classmethod
def from_dict(cls,
dict: tp.Dict[tp.Hashable, tp.Iterable[tp.Any]],
*,
index: IndexInitializer = None,
fill_value: object = np.nan,
name: tp.Hashable = None,
dtypes: DtypesSpecifier = None,
consolidate_blocks: bool = False):
'''
Create a Frame from a dictionary, or any object that has an items() method.
'''
return cls.from_items(dict.items(),
index=index,
fill_value=fill_value,
name=name,
dtypes=dtypes,
consolidate_blocks=consolidate_blocks)
@classmethod
def from_structured_array(cls,
array: np.ndarray,
*,
name: tp.Hashable = None,
index_column: tp.Optional[IndexSpecifier] = None,
dtypes: DtypesSpecifier = None,
consolidate_blocks: bool = False) -> 'Frame':
'''
Convert a NumPy structed array into a Frame.
Args:
array: Structured NumPy array.
index_column: Optionally provide the name or position offset of the column to use as the index.
Returns:
:py:class:`static_frame.Frame`
'''
names = array.dtype.names
if isinstance(index_column, int):
index_name = names[index_column]
else:
index_name = index_column
# assign in generator; requires reading through gen first
index_array = None
# cannot use names of we remove an index; might be a more efficient way as we kmnow the size
columns = []
columns_with_index = []
dtypes_is_map = dtypes_mappable(dtypes)
def get_col_dtype(col_idx):
if dtypes_is_map:
return dtypes.get(columns_with_index[col_idx], None)
return dtypes[col_idx]
def blocks():
for col_idx, name in enumerate(names):
columns_with_index.append(name)
if name == index_name:
nonlocal index_array
index_array = array[name]
continue
columns.append(name)
# this is not expected to make a copy
if dtypes:
dtype = get_col_dtype(col_idx)
if dtype is not None:
yield array[name].astype(dtype)
else:
yield array[name]
else:
yield array[name]
if consolidate_blocks:
block_gen = lambda: TypeBlocks.consolidate_blocks(blocks())
else:
block_gen = blocks
return cls(TypeBlocks.from_blocks(block_gen()),
columns=columns,
index=index_array,
name=name,
own_data=True)
#---------------------------------------------------------------------------
# iloc/loc pairs constructors: these are not yet documented
@classmethod
def from_element_iloc_items(cls,
items,
*,
index,
columns,
dtype,
name: tp.Hashable = None
) -> 'Frame':
'''
Given an iterable of pairs of iloc coordinates and values, populate a Frame as defined by the given index and columns. Dtype must be specified.
Returns:
:py:class:`static_frame.Frame`
'''
index = Index(index)
columns = cls._COLUMN_CONSTRUCTOR(columns)
tb = TypeBlocks.from_element_items(items,
shape=(len(index), len(columns)),
dtype=dtype)
return cls(tb,
index=index,
columns=columns,
name=name,
own_data=True,
own_index=True,
own_columns=True)
@classmethod
def from_element_loc_items(cls,
items,
*,
index,
columns,
dtype=None,
name: tp.Hashable = None
) -> 'Frame':
'''
Returns:
:py:class:`static_frame.Frame`
'''
index = Index(index)
columns = cls._COLUMN_CONSTRUCTOR(columns)
items = (((index.loc_to_iloc(k[0]), columns.loc_to_iloc(k[1])), v)
for k, v in items)
dtype = dtype if dtype is not None else object
tb = TypeBlocks.from_element_items(items,
shape=(len(index), len(columns)),
dtype=dtype)
return cls(tb,
index=index,
columns=columns,
name=name,
own_data=True,
own_index=True,
own_columns=True)
#---------------------------------------------------------------------------
# file, data format loaders
@classmethod
def from_csv(cls,
fp: FilePathOrFileLike,
*,
delimiter: str = ',',
index_column: tp.Optional[tp.Union[int, str]] = None,
skip_header: int = 0,
skip_footer: int = 0,
header_is_columns: bool = True,
quote_char: str = '"',
dtypes: DtypesSpecifier = None,
encoding: tp.Optional[str] = None
) -> 'Frame':
'''
Create a Frame from a file path or a file-like object defining a delimited (CSV, TSV) data file.
Args:
fp: A file path or a file-like object.
delimiter: The character used to seperate row elements.
index_column: Optionally specify a column, by position or name, to become the index.
skip_header: Number of leading lines to skip.
skip_footer: Numver of trailing lines to skip.
header_is_columns: If True, columns names are read from the first line after the first skip_header lines.
dtypes: set to None by default to permit discovery
Returns:
:py:class:`static_frame.Frame`
'''
# https://docs.scipy.org/doc/numpy/reference/generated/numpy.loadtxt.html
# https://docs.scipy.org/doc/numpy/reference/generated/numpy.genfromtxt.html
delimiter_native = '\t'
if delimiter != delimiter_native:
# this is necessary if there are quoted cells that include the delimiter
def to_tsv():
if isinstance(fp, str):
with open(fp, 'r') as f:
for row in csv.reader(f, delimiter=delimiter, quotechar=quote_char):
yield delimiter_native.join(row)
else:
# handling file like object works for stringio but not for bytesio
for row in csv.reader(fp, delimiter=delimiter, quotechar=quote_char):
yield delimiter_native.join(row)
file_like = to_tsv()
else:
file_like = fp
array = np.genfromtxt(file_like,
delimiter=delimiter_native,
skip_header=skip_header,
skip_footer=skip_footer,
names=header_is_columns,
dtype=None,
encoding=encoding,
invalid_raise=False,
missing_values={''},
)
# can own this array so set it as immutable
array.flags.writeable = False
return cls.from_structured_array(array,
index_column=index_column,
dtypes=dtypes
)
@classmethod
def from_tsv(cls, fp, **kwargs) -> 'Frame':
'''
Specialized version of :py:meth:`Frame.from_csv` for TSV files.
Returns:
:py:class:`static_frame.Frame`
'''
return cls.from_csv(fp, delimiter='\t', **kwargs)
@classmethod
@doc_inject()
def from_pandas(cls,
value,
*,
own_data: bool = False) -> 'Frame':
'''Given a Pandas DataFrame, return a Frame.
Args:
value: Pandas DataFrame.
{own_data}
Returns:
:py:class:`static_frame.Frame`
'''
# create generator of contiguous typed data
# calling .values will force type unification accross all columns
def blocks():
#import ipdb; ipdb.set_trace()
pairs = value.dtypes.items()
column_start, dtype_current = next(pairs)
column_last = column_start
for column, dtype in pairs:
if dtype != dtype_current:
# use loc to select before calling .values
array = value.loc[NULL_SLICE,
slice(column_start, column_last)].values
if own_data:
array.flags.writeable = False
yield array
column_start = column
dtype_current = dtype
column_last = column
# always have left over
array = value.loc[NULL_SLICE, slice(column_start, None)].values
if own_data:
array.flags.writeable = False
yield array
blocks = TypeBlocks.from_blocks(blocks())
# avoid getting a Series if a column
if 'name' not in value.columns and hasattr(value, 'name'):
name = value.name
else:
name = None
is_go = not cls._COLUMN_CONSTRUCTOR.STATIC
return cls(blocks,
index=IndexBase.from_pandas(value.index),
columns=IndexBase.from_pandas(value.columns, is_go=is_go),
name=name,
own_data=True,
own_index=True,
own_columns=True)
#---------------------------------------------------------------------------
def __init__(self,
data: FrameInitializer = None,
*,
index: IndexInitializer = None,
columns: IndexInitializer = None,
name: tp.Hashable = None,
own_data: bool = False,
own_index: bool = False,
own_columns: bool = False
) -> None:
'''
Args:
own_data: if True, assume that the data being based in can be owned entirely by this Frame; that is, that a copy does not need to made.
own_index: if True, the index is taken as is and is not passed to an Index initializer.
'''
self._name = name if name is None else name_filter(name)
#-----------------------------------------------------------------------
# blocks assignment
blocks_constructor = None
if isinstance(data, TypeBlocks):
if own_data:
self._blocks = data
else:
# assume we need to create a new TB instance; this will not copy underlying arrays as all blocks are immutable
self._blocks = TypeBlocks.from_blocks(data._blocks)
elif isinstance(data, np.ndarray):
if own_data:
data.flags.writeable = False
self._blocks = TypeBlocks.from_blocks(data)
elif isinstance(data, dict):
raise RuntimeError('use Frame.from_dict to create a Frmae from a dict')
# if a dictionary is given, it is treated as a dictionary of columns
# if columns is not None:
# raise RuntimeError('cannot create Frame from dictionary when columns is defined')
# columns = []
# def blocks():
# for k, v in _dict_to_sorted_items(data):
# columns.append(k)
# if isinstance(v, np.ndarray):
# yield v
# else:
# values = np.array(v)
# values.flags.writeable = False
# yield values
# self._blocks = TypeBlocks.from_blocks(blocks())
elif data is None and columns is None:
# will have shape of 0,0
self._blocks = TypeBlocks.from_none()
elif not hasattr(data, '__len__') and not isinstance(data, str):
# data is not None, single element to scale to size of index and columns
def blocks_constructor(shape):
a = np.full(shape, data)
a.flags.writeable = False
self._blocks = TypeBlocks.from_blocks(a)
else:
# could be list of lists to be made into an array
a = np.array(data)
a.flags.writeable = False
self._blocks = TypeBlocks.from_blocks(a)
# counts can be zero (not None) if _block was created but is empty
row_count, col_count = self._blocks._shape if not blocks_constructor else (None, None)
#-----------------------------------------------------------------------
# index assignment
if own_columns or (hasattr(columns, STATIC_ATTR) and columns.STATIC):
# if it is a STATIC index we can assign directly
self._columns = columns
elif columns is None or (hasattr(columns, '__len__') and len(columns) == 0):
if col_count is None:
raise RuntimeError('cannot create columns when no data given')
self._columns = self._COLUMN_CONSTRUCTOR(
range(col_count),
loc_is_iloc=True,
dtype=np.int64)
else:
self._columns = self._COLUMN_CONSTRUCTOR(columns)
if own_index or (hasattr(index, STATIC_ATTR) and index.STATIC):
self._index = index
elif index is None or (hasattr(index, '__len__') and len(index) == 0):
if row_count is None:
raise RuntimeError('cannot create rows when no data given')
self._index = Index(range(row_count),
loc_is_iloc=True,
dtype=np.int64)
else:
self._index = Index(index)
# permit bypassing this check if the
if blocks_constructor:
row_count = self._index.__len__()
col_count = self._columns.__len__()
blocks_constructor((row_count, col_count))
if row_count and len(self._index) != row_count:
# row count might be 0 for an empty DF
raise RuntimeError(
'Index has incorrect size (got {}, expected {})'.format(
len(self._index), row_count))
if len(self._columns) != col_count:
raise RuntimeError(
'Columns has incorrect size (got {}, expected {})'.format(
len(self._columns), col_count))
#---------------------------------------------------------------------------
# name interface
@property
def name(self) -> tp.Hashable:
return self._name
def rename(self, name: tp.Hashable) -> 'Frame':
'''
Return a new Frame with an updated name attribute.
'''
# copying blocks does not copy underlying data
return self.__class__(self._blocks.copy(),
index=self._index,
columns=self._columns, # let constructor handle if GO
name=name,
own_data=True,
own_index=True)
#---------------------------------------------------------------------------
# interfaces
@property
def loc(self) -> GetItem:
return GetItem(self._extract_loc)
@property
def iloc(self) -> GetItem:
return GetItem(self._extract_iloc)
@property
def drop(self) -> InterfaceSelection2D:
return InterfaceSelection2D(
func_iloc=self._drop_iloc,
func_loc=self._drop_loc,
func_getitem=self._drop_getitem)
@property
def mask(self) -> InterfaceSelection2D:
return InterfaceSelection2D(
func_iloc=self._extract_iloc_mask,
func_loc=self._extract_loc_mask,
func_getitem=self._extract_getitem_mask)
@property
def masked_array(self) -> InterfaceSelection2D:
return InterfaceSelection2D(
func_iloc=self._extract_iloc_masked_array,
func_loc=self._extract_loc_masked_array,
func_getitem=self._extract_getitem_masked_array)
@property
def assign(self) -> InterfaceSelection2D:
return InterfaceSelection2D(
func_iloc=self._extract_iloc_assign,
func_loc=self._extract_loc_assign,
func_getitem=self._extract_getitem_assign)
@property
def astype(self) -> InterfaceAsType:
return InterfaceAsType(func_getitem=self._extract_getitem_astype)
# generators
@property
def iter_array(self) -> IterNode:
return IterNode(
container=self,
function_values=self._axis_array,
function_items=self._axis_array_items,
yield_type=IterNodeType.VALUES
)
@property
def iter_array_items(self) -> IterNode:
return IterNode(
container=self,
function_values=self._axis_array,
function_items=self._axis_array_items,
yield_type=IterNodeType.ITEMS
)
@property
def iter_tuple(self) -> IterNode:
return IterNode(
container=self,
function_values=self._axis_tuple,
function_items=self._axis_tuple_items,
yield_type=IterNodeType.VALUES
)
@property
def iter_tuple_items(self) -> IterNode:
return IterNode(
container=self,
function_values=self._axis_tuple,
function_items=self._axis_tuple_items,
yield_type=IterNodeType.ITEMS
)
@property
def iter_series(self) -> IterNode:
return IterNode(
container=self,
function_values=self._axis_series,
function_items=self._axis_series_items,
yield_type=IterNodeType.VALUES
)
@property
def iter_series_items(self) -> IterNode:
return IterNode(
container=self,
function_values=self._axis_series,
function_items=self._axis_series_items,
yield_type=IterNodeType.ITEMS
)
@property
def iter_group(self) -> IterNode:
return IterNode(
container=self,
function_values=self._axis_group_loc,
function_items=self._axis_group_loc_items,
yield_type=IterNodeType.VALUES
)
@property
def iter_group_items(self) -> IterNode:
return IterNode(
container=self,
function_values=self._axis_group_loc,
function_items=self._axis_group_loc_items,
yield_type=IterNodeType.ITEMS
)
@property
def iter_group_index(self) -> IterNode:
return IterNode(
container=self,
function_values=self._axis_group_index,
function_items=self._axis_group_index_items,
yield_type=IterNodeType.VALUES
)
@property
def iter_group_index_items(self) -> IterNode:
return IterNode(
container=self,
function_values=self._axis_group_index,
function_items=self._axis_group_index_items,
yield_type=IterNodeType.ITEMS
)
@property
def iter_element(self) -> IterNode:
return IterNode(
container=self,
function_values=self._iter_element_loc,
function_items=self._iter_element_loc_items,
yield_type=IterNodeType.VALUES,
apply_type=IterNodeApplyType.FRAME_ELEMENTS
)
@property
def iter_element_items(self) -> IterNode:
return IterNode(
container=self,
function_values=self._iter_element_loc,
function_items=self._iter_element_loc_items,
yield_type=IterNodeType.ITEMS,
apply_type=IterNodeApplyType.FRAME_ELEMENTS
)
#---------------------------------------------------------------------------
# index manipulation
def _reindex_other_like_iloc(self,
value: tp.Union[Series, 'Frame'],
iloc_key: GetItemKeyTypeCompound,
fill_value=np.nan
) -> 'Frame':
'''Given a value that is a Series or Frame, reindex it to the index components, drawn from this Frame, that are specified by the iloc_key.
'''
if isinstance(iloc_key, tuple):
row_key, column_key = iloc_key
else:
row_key, column_key = iloc_key, None
# within this frame, get Index objects by extracting based on passed-in iloc keys
nm_row, nm_column = self._extract_axis_not_multi(row_key, column_key)
v = None
if nm_row and not nm_column:
# only column is multi selection, reindex by column
if isinstance(value, Series):
v = value.reindex(self._columns._extract_iloc(column_key),
fill_value=fill_value)
elif not nm_row and nm_column:
# only row is multi selection, reindex by index
if isinstance(value, Series):
v = value.reindex(self._index._extract_iloc(row_key),
fill_value=fill_value)
elif not nm_row and not nm_column:
# both multi, must be a Frame
if isinstance(value, Frame):
target_column_index = self._columns._extract_iloc(column_key)
target_row_index = self._index._extract_iloc(row_key)
# this will use the default fillna type, which may or may not be what is wanted
v = value.reindex(
index=target_row_index,
columns=target_column_index,
fill_value=fill_value)
if v is None:
raise Exception(('cannot assign '
+ value.__class__.__name__
+ ' with key configuration'), (nm_row, nm_column))
return v
def reindex(self,
index: tp.Union[Index, tp.Sequence[tp.Any]] = None,
columns: tp.Union[Index, tp.Sequence[tp.Any]] = None,
fill_value=np.nan) -> 'Frame':
'''
Return a new Frame based on the passed index and/or columns.
'''
if index is None and columns is None:
raise Exception('must specify one of index or columns')
if index is not None:
if isinstance(index, (Index, IndexHierarchy)):
# always use the Index constructor for safe reuse when possible
index = index.__class__(index)
else: # create the Index if not already an index, assume 1D
index = Index(index)
index_ic = IndexCorrespondence.from_correspondence(self._index, index)
else:
index = self._index
index_ic = None
if columns is not None:
if isinstance(columns, (Index, IndexHierarchy)):
# always use the Index constructor for safe reuse when possible
if columns.STATIC != self._COLUMN_CONSTRUCTOR.STATIC:
raise Exception('static status of index does not match expected column static status')
columns = columns.__class__(columns)
else: # create the Index if not already an columns, assume 1D
columns = self._COLUMN_CONSTRUCTOR(columns)
columns_ic = IndexCorrespondence.from_correspondence(self._columns, columns)
else:
columns = self._columns
columns_ic = None
return self.__class__(
TypeBlocks.from_blocks(self._blocks.resize_blocks(
index_ic=index_ic,
columns_ic=columns_ic,
fill_value=fill_value)),
index=index,
columns=columns,
name=self._name,
own_data=True)
def relabel(self,
index: CallableOrMapping = None,
columns: CallableOrMapping = None) -> 'Frame':
'''
Return a new Frame based on a mapping (or callable) from old to new index values.
'''
# create new index objects in both cases so as to call with own*
index = self._index.relabel(index) if index else self._index.copy()
columns = self._columns.relabel(columns) if columns else self._columns.copy()
return self.__class__(
self._blocks.copy(), # does not copy arrays
index=index,
columns=columns,
name=self._name,
own_data=True,
own_index=True,
own_columns=True)
def reindex_flat(self,
index: bool = False,
columns: bool = False) -> 'Frame':
'''
Return a new Frame, where an ``IndexHierarchy`` defined on the index or columns is replaced with a flat, one-dimension index of tuples.
'''
index = self._index.flat() if index else self._index.copy()
columns = self._columns.flat() if columns else self._columns.copy()
return self.__class__(
self._blocks.copy(), # does not copy arrays
index=index,
columns=columns,
name=self._name,
own_data=True,
own_index=True,
own_columns=True)
def reindex_add_level(self,
index: tp.Hashable = None,
columns: tp.Hashable = None) -> 'Frame':
'''
Return a new Frame, adding a new root level to the ``IndexHierarchy`` defined on the index or columns.
'''
index = self._index.add_level(index) if index else self._index.copy()
columns = self._columns.add_level(columns) if columns else self._columns.copy()
return self.__class__(
self._blocks.copy(), # does not copy arrays
index=index,
columns=columns,
name=self._name,
own_data=True,
own_index=True,
own_columns=True)
@doc_inject(selector='reindex')
def reindex_drop_level(self,
index: int = 0,
columns: int = 0
) -> 'Frame':
'''
Return a new Frame, dropping one or more levels from the ``IndexHierarchy`` defined on the index or columns. {count}
'''
index = self._index.drop_level(index) if index else self._index.copy()
columns = self._columns.drop_level(columns) if columns else self._columns.copy()
return self.__class__(
self._blocks.copy(), # does not copy arrays
index=index,
columns=columns,
name=self._name,
own_data=True,
own_index=True,
own_columns=True)
#---------------------------------------------------------------------------
# na handling
def isna(self) -> 'Frame':
'''
Return a same-indexed, Boolean Frame indicating True which values are NaN or None.
'''
# always return a Frame, even if this is a FrameGO
return Frame(self._blocks.isna(),
index=self._index,
columns=self._columns,
own_data=True)
def notna(self) -> 'Frame':
'''
Return a same-indexed, Boolean Frame indicating True which values are not NaN or None.
'''
# always return a Frame, even if this is a FrameGO
return Frame(self._blocks.notna(),
index=self._index,
columns=self._columns,
own_data=True)
def dropna(self,
axis: int = 0,
condition: tp.Callable[[np.ndarray], bool] = np.all) -> 'Frame':
'''
Return a new Frame after removing rows (axis 0) or columns (axis 1) where condition is True, where condition is an NumPy ufunc that process the Boolean array returned by isna().
'''
# returns Boolean areas that define axis to keep
row_key, column_key = self._blocks.dropna_to_keep_locations(
axis=axis,
condition=condition)
# NOTE: if not values to drop and this is a Frame (not a FrameGO) we can return self as it is immutable
if self.__class__ is Frame:
if (row_key is not None and column_key is not None
and row_key.all() and column_key.all()):
return self
return self._extract(row_key, column_key)
def fillna(self, value) -> 'Frame':
'''Return a new Frame after replacing NaN or None values with the supplied value.
'''
return self.__class__(self._blocks.fillna(value),
index=self._index,
columns=self._columns,
name=self._name,
own_data=True)
#---------------------------------------------------------------------------
def __len__(self) -> int:
'''Length of rows in values.
'''
return self._blocks._shape[0]
def display(self,
config: tp.Optional[DisplayConfig] = None
) -> Display:
config = config or DisplayActive.get()
# create an empty display, then populate with index
d = Display([[]],
config=config,
outermost=True,
index_depth=self._index.depth,
columns_depth=self._columns.depth + 2)
display_index = self._index.display(config=config)
d.extend_display(display_index)
if self._blocks._shape[1] > config.display_columns:
# columns as they will look after application of truncation and insertion of ellipsis
# get target column count in the absence of meta data, subtracting 2
data_half_count = Display.truncate_half_count(
config.display_columns - Display.DATA_MARGINS)
column_gen = partial(_gen_skip_middle,
forward_iter=partial(self._blocks.axis_values, axis=0),
forward_count=data_half_count,
reverse_iter=partial(self._blocks.axis_values, axis=0, reverse=True),
reverse_count=data_half_count,
center_sentinel=Display.ELLIPSIS_CENTER_SENTINEL
)
else:
column_gen = partial(self._blocks.axis_values, axis=0)
for column in column_gen():
if column is Display.ELLIPSIS_CENTER_SENTINEL:
d.extend_ellipsis()
else:
d.extend_iterable(column, header='')
config_transpose = config.to_transpose()
display_cls = Display.from_values((),
header=DisplayHeader(self.__class__, self._name),
config=config_transpose)
# need to apply the column config such that it truncates it based on the the max columns, not the max rows
display_columns = self._columns.display(
config=config_transpose)
# add spacers for a wide index
for _ in range(self._index.depth - 1):
# will need a width equal to the column depth
row = [Display.to_cell('', config=config)
for _ in range(self._columns.depth)]
spacer = Display([row])
display_columns.insert_displays(spacer,
insert_index=1) # after the first, the name
if self._columns.depth > 1:
display_columns_horizontal = display_columns.transform()
else: # can just flatten a single column into one row
display_columns_horizontal = display_columns.flatten()
d.insert_displays(
display_cls.flatten(),
display_columns_horizontal,
)
return d
def __repr__(self) -> str:
return repr(self.display())
def _repr_html_(self):
'''
Provide HTML representation for Jupyter Notebooks.
'''
# modify the active display to be fore HTML
config = DisplayActive.get(
display_format=DisplayFormats.HTML_TABLE,
type_show=False
)
return repr(self.display(config))
#---------------------------------------------------------------------------
# accessors
@property
def values(self) -> np.ndarray:
return self._blocks.values
@property
def index(self) -> Index:
return self._index
@property
def columns(self) -> Index:
return self._columns
#---------------------------------------------------------------------------
# common attributes from the numpy array
@property
def dtypes(self) -> Series:
'''
Return a Series of dytpes for each realizable column.
Returns:
:py:class:`static_frame.Series`
'''
return Series(self._blocks.dtypes, index=self._columns.values)
@property
def mloc(self) -> np.ndarray:
'''Return an immutable ndarray of NP array memory location integers.
'''
return self._blocks.mloc
#---------------------------------------------------------------------------
@property
def shape(self) -> tp.Tuple[int, int]:
'''
Return a tuple describing the shape of the underlying NumPy array.
Returns:
:py:class:`tp.Tuple[int]`
'''
return self._blocks._shape
@property
def ndim(self) -> int:
'''
Return the number of dimensions, which for a `Frame` is always 2.
Returns:
:py:class:`int`
'''
return self._blocks.ndim
@property
def size(self) -> int:
'''
Return the size of the underlying NumPy array.
Returns:
:py:class:`int`
'''
return self._blocks.size
@property
def nbytes(self) -> int:
'''
Return the total bytes of the underlying NumPy array.
Returns:
:py:class:`int`
'''
return self._blocks.nbytes
#---------------------------------------------------------------------------
@staticmethod
def _extract_axis_not_multi(row_key, column_key) -> tp.Tuple[bool, bool]:
'''
If either row or column is given with a non-multiple type of selection (a single scalar), reduce dimensionality.
'''
row_nm = False
column_nm = False
if row_key is not None and not isinstance(row_key, KEY_MULTIPLE_TYPES):
row_nm = True # axis 0
if column_key is not None and not isinstance(column_key, KEY_MULTIPLE_TYPES):
column_nm = True # axis 1
return row_nm, column_nm
def _extract(self,
row_key: GetItemKeyType = None,
column_key: GetItemKeyType = None) -> tp.Union['Frame', Series]:
'''
Extract based on iloc selection (indices have already mapped)
'''
blocks = self._blocks._extract(row_key=row_key, column_key=column_key)
if not isinstance(blocks, TypeBlocks):
return blocks # reduced to an element
own_index = True # the extracted Frame can always own this index
row_key_is_slice = isinstance(row_key, slice)
if row_key is None or (row_key_is_slice and row_key == NULL_SLICE):
index = self._index
else:
index = self._index._extract_iloc(row_key)
if not row_key_is_slice:
name_row = self._index.values[row_key]
if self._index.depth > 1:
name_row = tuple(name_row)
# can only own columns if _COLUMN_CONSTRUCTOR is static
column_key_is_slice = isinstance(column_key, slice)
if column_key is None or (column_key_is_slice and column_key == NULL_SLICE):
columns = self._columns
own_columns = self._COLUMN_CONSTRUCTOR.STATIC
else:
columns = self._columns._extract_iloc(column_key)
own_columns = True
if not column_key_is_slice:
name_column = self._columns.values[column_key]
if self._columns.depth > 1:
name_column = tuple(name_column)
axis_nm = self._extract_axis_not_multi(row_key, column_key)
if blocks._shape == (1, 1):
# if TypeBlocks did not return an element, need to determine which axis to use for Series index
if axis_nm[0]: # if row not multi
return Series(blocks.values[0],
index=immutable_index_filter(columns),
name=name_row)
elif axis_nm[1]:
return Series(blocks.values[0],
index=index,
name=name_column)
# if both are multi, we return a Frame
elif blocks._shape[0] == 1: # if one row
if axis_nm[0]: # if row key not multi
# best to use blocks.values, as will need to consolidate dtypes; will always return a 2D array
return Series(blocks.values[0],
index=immutable_index_filter(columns),
name=name_row)
elif blocks._shape[1] == 1: # if one column
if axis_nm[1]: # if column key is not multi
return Series(
column_1d_filter(blocks._blocks[0]),
index=index,
name=name_column)
return self.__class__(blocks,
index=index,
columns=columns,
name=self._name,
own_data=True, # always get new TypeBlock instance above
own_index=own_index,
own_columns=own_columns
)
def _extract_iloc(self, key: GetItemKeyTypeCompound) -> 'Frame':
'''
Give a compound key, return a new Frame. This method simply handles the variabiliyt of single or compound selectors.
'''
if isinstance(key, tuple):
return self._extract(*key)
return self._extract(row_key=key)
def _compound_loc_to_iloc(self,
key: GetItemKeyTypeCompound) -> tp.Tuple[GetItemKeyType, GetItemKeyType]:
'''
Given a compound iloc key, return a tuple of row, column keys. Assumes the first argument is always a row extractor.
'''
if isinstance(key, tuple):
loc_row_key, loc_column_key = key
iloc_column_key = self._columns.loc_to_iloc(loc_column_key)
else:
loc_row_key = key
iloc_column_key = None
iloc_row_key = self._index.loc_to_iloc(loc_row_key)
return iloc_row_key, iloc_column_key
def _compound_loc_to_getitem_iloc(self,
key: GetItemKeyTypeCompound) -> tp.Tuple[GetItemKeyType, GetItemKeyType]:
'''Handle a potentially compound key in the style of __getitem__. This will raise an appropriate exception if a two argument loc-style call is attempted.
'''
if isinstance(key, tuple):
raise KeyError('__getitem__ does not support multiple indexers')
iloc_column_key = self._columns.loc_to_iloc(key)
return None, iloc_column_key
def _extract_loc(self, key: GetItemKeyTypeCompound) -> 'Frame':
iloc_row_key, iloc_column_key = self._compound_loc_to_iloc(key)
return self._extract(row_key=iloc_row_key,
column_key=iloc_column_key)
def __getitem__(self, key: GetItemKeyType):
return self._extract(*self._compound_loc_to_getitem_iloc(key))
#---------------------------------------------------------------------------
def _drop_iloc(self, key: GetItemKeyTypeCompound) -> 'Frame':
'''
Args:
key: If a Boolean Series was passed, it has been converted to Boolean NumPy array already in loc to iloc.
'''
blocks = self._blocks.drop(key)
if isinstance(key, tuple):
iloc_row_key, iloc_column_key = key
index = self._index._drop_iloc(iloc_row_key)
own_index = True
columns = self._columns._drop_iloc(iloc_column_key)
own_columns = True
else:
iloc_row_key = key # no column selection
index = self._index._drop_iloc(iloc_row_key)
own_index = True
columns = self._columns
own_columns = False
return self.__class__(blocks,
columns=columns,
index=index,
name=self._name,
own_data=True,
own_columns=own_columns,
own_index=own_index
)
def _drop_loc(self, key: GetItemKeyTypeCompound) -> 'Frame':
key = self._compound_loc_to_iloc(key)
return self._drop_iloc(key=key)
def _drop_getitem(self, key: GetItemKeyTypeCompound) -> 'Frame':
key = self._compound_loc_to_getitem_iloc(key)
return self._drop_iloc(key=key)
#---------------------------------------------------------------------------
def _extract_iloc_mask(self, key: GetItemKeyTypeCompound) -> 'Frame':
masked_blocks = self._blocks.extract_iloc_mask(key)
return self.__class__(masked_blocks,
columns=self._columns,
index=self._index,
own_data=True)
def _extract_loc_mask(self, key: GetItemKeyTypeCompound) -> 'Frame':
key = self._compound_loc_to_iloc(key)
return self._extract_iloc_mask(key=key)
def _extract_getitem_mask(self, key: GetItemKeyTypeCompound) -> 'Frame':
key = self._compound_loc_to_getitem_iloc(key)
return self._extract_iloc_mask(key=key)
#---------------------------------------------------------------------------
def _extract_iloc_masked_array(self, key: GetItemKeyTypeCompound) -> MaskedArray:
masked_blocks = self._blocks.extract_iloc_mask(key)
return MaskedArray(data=self.values, mask=masked_blocks.values)
def _extract_loc_masked_array(self, key: GetItemKeyTypeCompound) -> MaskedArray:
key = self._compound_loc_to_iloc(key)
return self._extract_iloc_masked_array(key=key)
def _extract_getitem_masked_array(self, key: GetItemKeyTypeCompound) -> 'Frame':
key = self._compound_loc_to_getitem_iloc(key)
return self._extract_iloc_masked_array(key=key)
#---------------------------------------------------------------------------
def _extract_iloc_assign(self, key: GetItemKeyTypeCompound) -> 'FrameAssign':
return FrameAssign(self, iloc_key=key)
def _extract_loc_assign(self, key: GetItemKeyTypeCompound) -> 'FrameAssign':
# extract if tuple, then pack back again
key = self._compound_loc_to_iloc(key)
return self._extract_iloc_assign(key=key)
def _extract_getitem_assign(self, key: GetItemKeyTypeCompound) -> 'FrameAssign':
# extract if tuple, then pack back again
key = self._compound_loc_to_getitem_iloc(key)
return self._extract_iloc_assign(key=key)
#---------------------------------------------------------------------------
def _extract_getitem_astype(self, key: GetItemKeyType) -> 'FrameAsType':
# extract if tuple, then pack back again
_, key = self._compound_loc_to_getitem_iloc(key)
return FrameAsType(self, column_key=key)
#---------------------------------------------------------------------------
# dictionary-like interface
def keys(self):
'''Iterator of column labels.
'''
return self._columns
def __iter__(self):
'''
Iterator of column labels, same as :py:meth:`Frame.keys`.
'''
return self._columns.__iter__()
def __contains__(self, value) -> bool:
'''
Inclusion of value in column labels.
'''
return self._columns.__contains__(value)
def items(self) -> tp.Generator[tp.Tuple[tp.Any, Series], None, None]:
'''Iterator of pairs of column label and corresponding column :py:class:`Series`.
'''
return zip(self._columns.values,
(Series(v, index=self._index) for v in self._blocks.axis_values(0)))
def get(self, key, default=None):
'''
Return the value found at the columns key, else the default if the key is not found. This method is implemented to complete the dictionary-like interface.
'''
if key not in self._columns:
return default
return self.__getitem__(key)
#---------------------------------------------------------------------------
# operator functions
def _ufunc_unary_operator(self, operator: tp.Callable) -> 'Frame':
# call the unary operator on _blocks
return self.__class__(
self._blocks._ufunc_unary_operator(operator=operator),
index=self._index,
columns=self._columns)
def _ufunc_binary_operator(self, *, operator, other):
if isinstance(other, Frame):
# reindex both dimensions to union indices
columns = self._columns.union(other._columns)
index = self._index.union(other._index)
self_tb = self.reindex(columns=columns, index=index)._blocks
other_tb = other.reindex(columns=columns, index=index)._blocks
return self.__class__(self_tb._ufunc_binary_operator(
operator=operator, other=other_tb),
index=index,
columns=columns,
own_data=True
)
elif isinstance(other, Series):
columns = self._columns.union(other._index)
self_tb = self.reindex(columns=columns)._blocks
other_array = other.reindex(columns).values
return self.__class__(self_tb._ufunc_binary_operator(
operator=operator, other=other_array),
index=self._index,
columns=columns,
own_data=True
)
# handle single values and lists that can be converted to appropriate arrays
if not isinstance(other, np.ndarray) and hasattr(other, '__iter__'):
other = | np.array(other) | numpy.array |
# -*- coding: utf-8 -*-
"""
Created on Tue Apr 28 09:49:28 2020
@author: youngmin
library functions
"""
import time
import os
import dill
#import sys
import numpy as np
import sympy as sym
import matplotlib.pyplot as plt
#from scipy.interpolate import interp1d
from sympy.physics.quantum import TensorProduct as kp
#from sympy.utilities.lambdify import lambdify, implemented_function
from scipy.integrate import solve_ivp
def vec(a):
"""
vec array operator. stack columns.
reshape command stacks rows. so transpose then reshape to stack columns
https://stackoverflow.com/questions/55444777/...
numpy-array-stack-multiple-columns-into-one-using-reshape
"""
#print(type(a))
#a = np.asarray(a)
if np.asarray(a).ndim == 0:
return a
else:
return a.T.reshape(len(a[:,0])*len(a[0,:]),1)
def grad(fn,xvec):
"""
fn is a scalar valued function.
xvec is the general input to fn. size of xvec is the dimension of domain
xevc contains the domain sympy variables [x1,x2,...,xn]
return row vector
"""
n = len(xvec)
gradf = sym.zeros(1,n)
for i in range(n):
gradf[0,i] = sym.diff(fn,xvec[i])
return gradf
def df(fn,xvec,k):
"""
distinct from grad. we alternate applying vec and grad to fn k times
f is map from RN to R (see Eq. 13 Wilson 2020)
fn is a function of xvec.
step k=1
-apply vec to transform gives 1 x 1
-derivative gives 1 x N
-end if k=1
step k=2
-apply vec to previous step gives N x 1
-deriv gives NxN
-end if k=2
step k=3
-apply vec to previous step gives 2*N x 1
-deriv gives 2*N x N
-end if k=3
etc.
output size N^(k-1) x N
"""
df = fn
n = len(xvec)
if k == 0:
return df
if k == 1:
df = grad(df,xvec)
return df
# f^(1)
df = grad(df,xvec)
#print()
#print(np.shape(df))
#print('df1',df)
# proceed with higher derivs
#print('k',k)
for i in range(2,k+1):
#print('i,k',i,k)
df = vec(df)
# preallocate N^(k-1) x N
df_temp = sym.zeros(n**(i-1),n)
#print(np.shape(df_temp))
# now loop over rows of df_temp and save gradient
for j in range(len(df_temp[:,0])):
df_temp[j,:] = grad(df[j,:],xvec)
df = df_temp
#print(np.shape(df))
#print('############ df, i,k',np.shape(df),i,k,df)
#print('i,df',i,df)
#print(np.shape(df))
#print('############ df, k',np.shape(df),k,df)
return df
def monodromy(t,z,jacLC):
"""
calculate right-hand side of system
\dot \Phi = J\Phi, \Phi(0)=I
\Phi is a matrix solution
jacLC is the jacobian evaluated along the limit cycle
"""
n = int(np.sqrt(len(z)))
z = np.reshape(z,(n,n))
#print(n)
dy = np.dot(jacLC(t),z)
return np.reshape(dy,n*n)
def kProd(k,dx):
"""
Kronecker product applied k times to vector dx (1,n)
k=1 returns dx
k=2 returns (1,n^2)
generally returns (1,n^(k))
"""
out = dx
for i in range(k-1):
#print('out',out)
out = kp(out,dx)
return out
def files_exist(*fnames,dictionary=False):
fname_list = []
for i in range(len(fnames)):
fname_list += fnames[i]
flag = 0
for i in range(len(fname_list)):
# check if each fname exists
flag += not(os.path.isfile(fname_list[i]))
if flag != 0:
return False
else:
return True
def load_dill(fnames):
#print(fnames)
templist = []
for i in range(len(fnames)):
templist.append(dill.load(open(fnames[i],'rb')))
return templist
def run_newton2(obj,fn,init,k,het_lams,max_iter=10,
rel_tol=1e-12,rel_err=10,backwards=True,eps=1e-1,
exception=False,alpha=1,min_iter=5,
dense=False):
if backwards:
tLC = -obj.tLC
else:
tLC = obj.tLC
# run newton's method
counter = 0
dx = 100
#smallest_init = np.zeros(len(init))+10
dx_smallest = np.zeros(len(init))+10
init_smallest = init
try:
while counter < max_iter:
if ( | np.linalg.norm(dx) | numpy.linalg.norm |
from IMLearn.learners import UnivariateGaussian, MultivariateGaussian
import numpy as np
import pandas as pd
import plotly.graph_objects as go
import plotly.express as px
import plotly.io as pio
pio.templates.default = "simple_white"
def test_univariate_gaussian():
# Question 1 - Draw samples and print fitted model
expectation, variance, sample_size = 10, 1, 1000
samples = np.random.normal(expectation, variance, sample_size)
univariate_1 = UnivariateGaussian()
univariate_1.fit(samples)
print("Q1) Estimated expectation and variance of univariate gaussian:")
print(f"(expectation, variance) = ({univariate_1.mu_}, {univariate_1.var_})")
print("\n")
# Question 2 - Empirically showing sample mean is consistent
univariate_2 = UnivariateGaussian()
expectation_error = np.zeros(sample_size)
sample_sizes = np.arange(10, 1010, 10)
for i in range(sample_size // 10):
univariate_2.fit(samples[:10 * (i + 1)])
expectation_error[i] = abs(univariate_2.mu_ - expectation)
layout_2 = go.Layout(dict(title="Q2) Error of Estimated Expectation of a Univariate Gaussian",
xaxis_title="Sample Size",
yaxis_title="Error",
yaxis_range=[0, 0.8]))
fig_2 = go.Figure(data=go.Scatter(x=sample_sizes, y=expectation_error), layout=layout_2)
fig_2.show()
# Question 3 - Plotting Empirical PDF of fitted model
pdfs = univariate_1.pdf(samples)
data_frame = pd.DataFrame({"Samples": samples, "PDF Values": pdfs})
fig_3 = px.scatter(data_frame, x="Samples", y="PDF Values",
title="Q3) Empirical PDF of the Fitted Model")
fig_3.show()
def test_multivariate_gaussian():
# Question 4 - Draw samples and print fitted model
expectation = np.array([0, 0, 4, 0])
covariance = np.array([[1, 0.2, 0, 0.5],
[0.2, 2, 0, 0],
[0, 0, 1, 0],
[0.5, 0, 0, 1]])
samples = np.random.multivariate_normal(expectation, covariance, 1000)
multivariate = MultivariateGaussian()
multivariate.fit(samples)
print("Q4) Estimated expectation and covariance of multivariate gaussian:")
print("Expectation Vector:")
print(multivariate.mu_)
print("Covariance Matrix:")
print(multivariate.cov_)
print("\n")
# Question 5 - Likelihood evaluation
feature = np.linspace(-10, 10, 200)
likelihood_mat = | np.empty((feature.size, feature.size)) | numpy.empty |
"""
A bot attack agent for the gym-idsgame environment that acts greedily according to a pre-trained policy network
"""
import numpy as np
import torch
import traceback
from sklearn import preprocessing
from gym_idsgame.agents.bot_agents.bot_agent import BotAgent
from gym_idsgame.envs.dao.game_state import GameState
from gym_idsgame.envs.dao.game_config import GameConfig
from gym_idsgame.agents.training_agents.policy_gradient.pg_agent_config import PolicyGradientAgentConfig
from gym_idsgame.agents.training_agents.openai_baselines.common.ppo.ppo import PPO
from gym_idsgame.envs.idsgame_env import IdsGameEnv
import gym_idsgame.envs.util.idsgame_util as util
from sklearn.preprocessing import normalize
class PPOBaselineAttackerBotAgent(BotAgent):
"""
Class implementing an attack policy that acts greedily according to a given policy network
"""
def __init__(self, pg_config: PolicyGradientAgentConfig, game_config: GameConfig, model_path: str = None,
env: IdsGameEnv = None):
"""
Constructor, initializes the policy
:param game_config: the game configuration
"""
super(PPOBaselineAttackerBotAgent, self).__init__(game_config)
if model_path is None:
raise ValueError("Cannot create a PPOBaselineAttackerBotAgent without specifying the path to the model")
self.idsgame_env = env
self.config = pg_config
self.model_path = model_path
self.initialize_models()
self.device = "cpu" if not self.config.gpu else "cuda:" + str(self.config.gpu_id)
def initialize_models(self) -> None:
"""
Initialize models
:return: None
"""
policy = "MlpPolicy"
if self.config.cnn_feature_extractor:
policy = "CnnPolicy"
# Initialize models
self.model = PPO.load(self.config.attacker_load_path, policy, pg_agent_config=self.config)
def action(self, game_state: GameState) -> int:
"""
Samples an action from the policy.
:param game_state: the game state
:return: action_id
"""
try:
# Feature engineering
attacker_obs = game_state.get_attacker_observation(
self.game_config.network_config, local_view=self.idsgame_env.local_view_features(),
reconnaissance=self.game_config.reconnaissance_actions,
reconnaissance_bool_features=self.idsgame_env.idsgame_config.reconnaissance_bool_features)
defender_obs = game_state.get_defender_observation(self.game_config.network_config)
attacker_state = self.update_state(attacker_obs=attacker_obs, defender_obs=defender_obs, state=[],
attacker=True)
if not self.config.ar_policy:
actions = list(range(self.idsgame_env.num_attack_actions))
non_legal_actions = list(filter(lambda action: not self.is_attack_legal(action, attacker_obs, game_state), actions))
obs_tensor_a = torch.as_tensor(attacker_state.flatten()).to(self.device)
attacker_actions, attacker_values, attacker_log_probs = self.model.attacker_policy.forward(
obs_tensor_a, self.idsgame_env, device=self.device, attacker=True, non_legal_actions=non_legal_actions)
attacker_action = attacker_actions.cpu().numpy()[0]
else:
actions = list(range(self.config.attacker_node_net_output_dim))
non_legal_actions = list(filter(lambda action: not self.is_attack_legal(action, attacker_obs, game_state, node=True), actions))
obs_tensor_a = torch.as_tensor(attacker_state.flatten()).to(self.device)
attacker_node_actions, attacker_node_values, attacker_node_log_probs, attacker_node_lstm_state = self.model.attacker_node_policy.forward(
obs_tensor_a, self.idsgame_env, device=self.device, attacker=True, non_legal_actions=non_legal_actions)
attacker_node_probs = self.model.attacker_node_policy.get_action_dist(obs_tensor_a, self.idsgame_env, device=self.device, attacker=True,
non_legal_actions=non_legal_actions)
attacker_node_actions = attacker_node_actions.cpu().numpy()
node = attacker_node_actions[0]
obs_tensor_a_1 = obs_tensor_a.reshape(self.idsgame_env.idsgame_config.game_config.num_nodes, self.config.attacker_at_net_input_dim)
obs_tensor_a_at = obs_tensor_a_1[node]
attacker_at_actions, attacker_at_values, attacker_at_log_probs, attacker_at_lstm_state = self.model.attacker_at_policy.forward(
obs_tensor_a_at, self.idsgame_env, device=self.device, attacker=True, non_legal_actions = non_legal_actions)
attacker_at_probs = self.model.attacker_at_policy.get_action_dist(obs_tensor_a_at, self.idsgame_env,
device=self.device, attacker=True,
non_legal_actions=non_legal_actions)
# print("attacker node probs:{}".format(attacker_node_probs.detach().cpu().numpy()))
# print("attacker at probs:{}".format(attacker_at_probs.detach().cpu().numpy()))
self.create_policy_plot(attacker_at_probs.detach().cpu().numpy(), 0, attacker=True)
attacker_at_actions = attacker_at_actions.cpu().numpy()
attack_id = util.get_attack_action_id(node, attacker_at_actions[0], self.idsgame_env.idsgame_config.game_config)
attacker_action = attack_id
except Exception as e:
print(str(e))
traceback.print_exc()
if self.idsgame_env.local_view_features():
attack = self.convert_local_attacker_action_to_global(attacker_action, attacker_obs)
return attack
else:
return attacker_action
def is_attack_legal(self, action, obs, game_state, node :bool= False) -> bool:
"""
Check if a given attack is legal or not.
:param attack_action: the attack to verify
:return: True if legal otherwise False
"""
if not self.config.ar_policy:
if self.idsgame_env.local_view_features():
action = self.convert_local_attacker_action_to_global(action, obs)
if action == -1:
return False
return util.is_attack_id_legal(action, self.game_config,
game_state.attacker_pos, game_state, [])
else:
if node:
return util.is_node_attack_legal(action, game_state.attacker_pos,
self.game_config.network_config)
else:
return True
def convert_local_attacker_action_to_global(self, action_id, attacker_obs):
num_attack_types = self.idsgame_env.idsgame_config.game_config.num_attack_types
neighbor = action_id // (num_attack_types + 1)
attack_type = action_id % (num_attack_types + 1)
target_id = int(attacker_obs[neighbor][num_attack_types])
if target_id == -1:
return -1
attacker_action = target_id * (num_attack_types + 1) + attack_type
return attacker_action
def update_state(self, attacker_obs: np.ndarray = None, defender_obs: np.ndarray = None,
state: np.ndarray = None, attacker: bool = True) -> np.ndarray:
"""
Update approximative Markov state
:param attacker_obs: attacker obs
:param defender_obs: defender observation
:param state: current state
:param attacker: boolean flag whether it is attacker or not
:return: new state
"""
if attacker and self.idsgame_env.idsgame_config.game_config.reconnaissance_actions:
a_obs_len = self.idsgame_env.idsgame_config.game_config.num_attack_types + 1
defender_obs = attacker_obs[:,
a_obs_len:a_obs_len + self.idsgame_env.idsgame_config.game_config.num_attack_types]
if self.idsgame_env.idsgame_config.reconnaissance_bool_features:
d_bool_features = attacker_obs[:,
a_obs_len + self.idsgame_env.idsgame_config.game_config.num_attack_types:]
attacker_obs = attacker_obs[:, 0:a_obs_len]
if not attacker and self.idsgame_env.local_view_features():
attacker_obs = self.idsgame_env.state.get_attacker_observation(
self.idsgame_env.idsgame_config.game_config.network_config,
local_view=False,
reconnaissance=self.idsgame_env.idsgame_config.reconnaissance_actions)
# Zero mean
if self.config.zero_mean_features:
if not self.idsgame_env.local_view_features() or not attacker:
attacker_obs_1 = attacker_obs[:, 0:-1]
else:
attacker_obs_1 = attacker_obs[:, 0:-2]
zero_mean_attacker_features = []
for idx, row in enumerate(attacker_obs_1):
mean = np.mean(row)
if mean != 0:
t = row - mean
else:
t = row
if np.isnan(t).any():
t = attacker_obs[idx]
else:
t = t.tolist()
if not self.idsgame_env.local_view_features() or not attacker:
t.append(attacker_obs[idx][-1])
else:
t.append(attacker_obs[idx][-2])
t.append(attacker_obs[idx][-1])
zero_mean_attacker_features.append(t)
defender_obs_1 = defender_obs[:, 0:-1]
zero_mean_defender_features = []
for idx, row in enumerate(defender_obs_1):
mean = np.mean(row)
if mean != 0:
t = row - mean
else:
t = row
if np.isnan(t).any():
t = defender_obs[idx]
else:
t = t.tolist()
t.append(defender_obs[idx][-1])
zero_mean_defender_features.append(t)
attacker_obs = np.array(zero_mean_attacker_features)
defender_obs = np.array(zero_mean_defender_features)
# Normalize
if self.config.normalize_features:
if not self.idsgame_env.local_view_features() or not attacker:
attacker_obs_1 = attacker_obs[:, 0:-1] / np.linalg.norm(attacker_obs[:, 0:-1])
else:
attacker_obs_1 = attacker_obs[:, 0:-2] / np.linalg.norm(attacker_obs[:, 0:-2])
normalized_attacker_features = []
for idx, row in enumerate(attacker_obs_1):
if np.isnan(attacker_obs_1).any():
t = attacker_obs[idx]
else:
t = row.tolist()
if not self.idsgame_env.local_view_features() or not attacker:
t.append(attacker_obs[idx][-1])
else:
t.append(attacker_obs[idx][-2])
t.append(attacker_obs[idx][-1])
normalized_attacker_features.append(t)
if attacker and self.idsgame_env.idsgame_config.game_config.reconnaissance_actions:
defender_obs_1 = defender_obs[:, 0:-1] / np.linalg.norm(defender_obs[:, 0:-1])
else:
defender_obs_1 = defender_obs / np.linalg.norm(defender_obs)
normalized_defender_features = []
for idx, row in enumerate(defender_obs_1):
if np.isnan(defender_obs_1).any():
t = defender_obs[idx]
else:
if attacker and self.idsgame_env.idsgame_config.game_config.reconnaissance_actions:
t = row.tolist()
t.append(defender_obs[idx][-1])
else:
t = row
normalized_defender_features.append(t)
attacker_obs = np.array(normalized_attacker_features)
defender_obs = np.array(normalized_defender_features)
if self.idsgame_env.local_view_features() and attacker:
if not self.idsgame_env.idsgame_config.game_config.reconnaissance_actions:
neighbor_defense_attributes = np.zeros((attacker_obs.shape[0], defender_obs.shape[1]))
for node in range(attacker_obs.shape[0]):
id = int(attacker_obs[node][-1])
neighbor_defense_attributes[node] = defender_obs[id]
else:
neighbor_defense_attributes = defender_obs
if self.idsgame_env.fully_observed() or \
(self.idsgame_env.idsgame_config.game_config.reconnaissance_actions and attacker):
if self.config.merged_ad_features:
if not self.idsgame_env.local_view_features() or not attacker:
a_pos = attacker_obs[:, -1]
if not self.idsgame_env.idsgame_config.game_config.reconnaissance_actions:
det_values = defender_obs[:, -1]
temp = defender_obs[:, 0:-1] - attacker_obs[:, 0:-1]
else:
temp = defender_obs[:, 0:] - attacker_obs[:, 0:-1]
features = []
for idx, row in enumerate(temp):
t = row.tolist()
t.append(a_pos[idx])
if not self.idsgame_env.idsgame_config.game_config.reconnaissance_actions:
t.append(det_values[idx])
features.append(t)
else:
node_ids = attacker_obs[:, -1]
# node_reachable = attacker_obs[:, -1]
if not self.idsgame_env.idsgame_config.game_config.reconnaissance_actions:
det_values = neighbor_defense_attributes[:, -1]
if not self.idsgame_env.idsgame_config.game_config.reconnaissance_actions:
temp = neighbor_defense_attributes[:, 0:-1] - attacker_obs[:, 0:-1]
else:
temp = | np.full(neighbor_defense_attributes.shape, -1) | numpy.full |
import matplotlib.pyplot as plt
import matplotlib
import numpy as np
import os
import imageio
from timeit import timeit
from mpl_toolkits import mplot3d
from PIL import Image
#import png
import svd_tools_copy as svdt
import image_tools_copy as it
#import ../../../david/watermark as watermarktools
#sunset = it.load_image('../res/sunset.png')
#rainbow = it.load_image('../res/rainbow.png')
#view = it.load_image('../res/view.png')
view = it.load_image('../res/view.jpg')
tree = it.load_image('../res/tree.jpg')
plt.rcParams['font.size'] = '18'
def sv_plot_save(img, fname): #plotting the singular values, can only be used on a stacked matrix
#formatting
img = img.astype(np.float64)
#stacking color channels
img_rows, img_columns = img.shape[:2]
img_stacked = img.reshape(img_rows, -1)
u, s, v = np.linalg.svd(img_stacked, full_matrices=False)
plt.plot(s)
plt.savefig(fname)
#EXTRACTION ERROR = NORM(ORIGINAL WATERMARK - EXTRACTED WATERMARK)
#1. COMPUTE EMBEDDING AND EXTRACTION
#2. COMPUTE NORM(ORIGINAL WATERMARK - EXTRACTED WATERMARK)/NORM(ORIGINAL WATERMARK)
def reversepad(watermark_extracted,original_watermark):
sizes = original_watermark.shape
watermark_extracted = watermark_extracted[:sizes[0],:sizes[1]]
return watermark_extracted
def reversepad3d(watermark_extracted,original_watermark):
sizes = original_watermark.shape
watermark_extracted = watermark_extracted[:sizes[0],:sizes[1],:sizes[2]]
return watermark_extracted
def watermark_embed_liutan(img, watermark, scale, save):
#embeds watermark into image. if save == 'yes', then it will save to out/watermarking/watermarked_image/liutan
img_watermarked, watermarked_u, mat_s, watermarked_vh = it.embed_watermark(img, watermark, scale=scale)
img_watermarked = img_watermarked.astype(np.int32)
if save=='no':
return img_watermarked
elif save=='yes':
it.save_image(img_watermarked,'../out/watermarking/watermarked_image/liutan/watermarked_image_alpha_{}.png'.format(scale))
#Image.fromarray(img_watermarked,'RGB').save('../out/watermarking/watermarked_image/liutan/watermarked_image_alpha_{}.png'.format(scale), 'PNG')
def watermark_extract_liutan(img, watermark, scale, save):
#embeds watermark into image and then extracts the watermark. if save == 'yes', then it will save to out/res/watermark
img_watermarked, watermarked_u, mat_s, watermarked_vh = it.embed_watermark(img, watermark, scale=scale)
watermark_extracted = it.extract_watermark(img_watermarked, watermarked_u, mat_s, watermarked_vh,
scale=scale)
watermark_extracted_final = reversepad(watermark_extracted, watermark)
watermark_extracted_final = watermark_extracted_final.astype(np.int32)
if save=='no':
return watermark_extracted_final
elif save=='yes':
it.save_image(watermark_extracted_final,'../out/watermarking/extracted_watermark/liutan/extracted_watermark_alpha_{}.png'.format(scale))
def watermark_embed_jain(img, watermark, scale, save):
#embeds watermark into image. if save == 'yes', then it will save to out/watermarking/watermarked_image/jain
img_watermarked, watermark_vh = it.embed_watermark_jain(img, watermark, scale=scale)
img_watermarked = img_watermarked.astype(np.int32)
if save=='no':
return img_watermarked
elif save=='yes':
it.save_image(img_watermarked,'../out/watermarking/watermarked_image/jain/watermarked_image_alpha_{}.png'.format(scale))
def watermark_extract_jain(img, watermark, scale, save):
#embeds watermark into image. if save == 'yes', then it will save to out/watermarking/watermarked_image/jain
img_watermarked, watermark_vh = it.embed_watermark_jain(img, watermark, scale=scale)
watermark_extracted = it.extract_watermark_jain(img_watermarked, img, watermark_vh, scale)
watermark_extracted_final = reversepad(watermark_extracted, watermark)
watermark_extracted_final = watermark_extracted_final.astype(np.int32)
if save=='no':
return watermark_extracted_final
elif save=='yes':
it.save_image(watermark_extracted_final,'../out/watermarking/extracted_watermark/jain/extracted_watermark_alpha_{}.png'.format(scale))
def watermark_embed_jain_mod(img, watermark, scale, save):
#embeds watermark into image. if save == 'yes', then it will save to out/watermarking/watermarked_image/jainmod
img_watermarked, watermark_vh = it.embed_watermark_jain_mod(img, watermark, scale=scale)
img_watermarked = img_watermarked.astype(np.int32)
if save=='no':
return img_watermarked
elif save=='yes':
it.save_image(img_watermarked,'../out/watermarking/watermarked_image/jainmod/watermarked_image_alpha_{}.png'.format(scale))
def watermark_extract_jain_mod(img, watermark, scale, save):
#embeds watermark into image. if save == 'yes', then it will save to out/watermarking/watermarked_image/jainmod
img_watermarked, watermark_vh = it.embed_watermark_jain_mod(img, watermark, scale=scale)
watermark_extracted = it.extract_watermark_jain_mod(img_watermarked, img, watermark_vh, scale)
watermark_extracted_final = reversepad(watermark_extracted, watermark)
watermark_extracted_final = watermark_extracted_final.astype(np.int32)
if save=='no':
return watermark_extracted_final
elif save=='yes':
it.save_image(watermark_extracted_final,'../out/watermarking/extracted_watermark/jainmod/extracted_watermark_alpha_{}.png'.format(scale))
def perceptibility_liutan(img, watermark, scale):
#watermarked image
img_watermarked, watermarked_u, mat_s, watermarked_vh = it.embed_watermark(img, watermark, scale=scale)
#stacking watermarked image
img_watermarked = img_watermarked.astype(np.int32)
img_watermarked_rows, img_watermarked_columns = img_watermarked.shape[:2]
img_watermarked_stacked = img_watermarked.reshape(img_watermarked_rows, -1)
#stacking image
img = img.astype(np.int32)
img_rows, img_columns = img.shape[:2]
img_stacked = img.reshape(img_rows, -1)
#norm difference
error = (np.linalg.norm(img_watermarked_stacked-img_stacked))/(np.linalg.norm(img_stacked))
return error
def perceptibility_jain(img, watermark, scale):
#watermarked image
img_watermarked, watermark_vh = it.embed_watermark_jain(img, watermark, scale=scale)
#stacking watermarked image
img_watermarked = img_watermarked.astype(np.int32)
img_watermarked_rows, img_watermarked_columns = img_watermarked.shape[:2]
img_watermarked_stacked = img_watermarked.reshape(img_watermarked_rows, -1)
#stacking image
img = img.astype(np.int32)
img_rows, img_columns = img.shape[:2]
img_stacked = img.reshape(img_rows, -1)
#norm difference
error = (np.linalg.norm(img_watermarked_stacked-img_stacked))/(np.linalg.norm(img_stacked))
return error
def perceptibility_jain_mod(img, watermark, scale):
#watermarked image
img_watermarked, watermark_vh = it.embed_watermark_jain_mod(img, watermark, scale=scale)
#stacking watermarked image
img_watermarked = img_watermarked.astype(np.int32)
img_watermarked_rows, img_watermarked_columns = img_watermarked.shape[:2]
img_watermarked_stacked = img_watermarked.reshape(img_watermarked_rows, -1)
#stacking image
img = img.astype(np.int32)
img_rows, img_columns = img.shape[:2]
img_stacked = img.reshape(img_rows, -1)
#norm difference
error = (np.linalg.norm(img_watermarked_stacked-img_stacked))/(np.linalg.norm(img_stacked))
return error
def watermarkedplot(img,watermark,plottype):
scales = np.arange(0.05,2.05,0.05)
differences = []
#liu tan
if plottype == 1:
for scale in scales:
print(scale)
difference = perceptibility_liutan(img, watermark, scale)
differences.append(difference)
#jain
if plottype == 2:
for scale in scales:
print(scale)
difference = perceptibility_jain(img, watermark, scale)
differences.append(difference)
#jain mod
if plottype == 3:
for scale in scales:
print(scale)
difference = perceptibility_jain_mod(img, watermark, scale)
differences.append(difference)
drawgraph_difference(scales,differences,plottype)
def drawgraph_difference(x,y,plottype):
plt.plot(x,y,marker='o')
plt.xlabel('Alpha')
plt.ylabel('Error')
#plt.show()
#liutan
if plottype == 1:
plt.savefig('../out/watermarking/plots/perceptibility/liutan/perceptibility_liutan.png')
if plottype == 2:
plt.savefig('../out/watermarking/plots/perceptibility/jain/perceptibility_jain.png')
if plottype == 3:
plt.savefig('../out/watermarking/plots/perceptibility/jainmod/perceptibility_jain_mod.png')
plt.show()
#lowrank extraction error
def lowrank_image_liutan(img, watermark, scale, rank, save):
#watermarked image
img_watermarked, watermarked_u, mat_s, watermarked_vh = it.embed_watermark(img, watermark, scale=scale)
img_watermarked = img_watermarked.astype(np.int32)
#applying low rank compression to watermarked image
img_watermarked_approx = it.lowrankapprox(img_watermarked,rank)
#extracting watermark using original extraction key and compressed watermarked image
watermark_extracted = it.extract_watermark(img_watermarked_approx, watermarked_u, mat_s, watermarked_vh,
scale=scale)
watermark_extracted = reversepad(watermark_extracted, watermark)
watermark_extracted = watermark_extracted.astype(np.int32)
if save=='no':
return watermark_extracted
elif save=='yes':
it.save_image(watermark_extracted,'../out/watermarking/robustness/lowrankextraction/liutan/extraction_rank_{}_alpha_{}.png'.format(rank,scale))
def lowrank_watermarked_image_liutan(img, watermark, scale, rank, save):
#watermarked image
img_watermarked, watermarked_u, mat_s, watermarked_vh = it.embed_watermark(img, watermark, scale=scale)
img_watermarked = img_watermarked.astype(np.int32)
#applying low rank compression to watermarked image
img_watermarked_approx = it.lowrankapprox(img_watermarked,rank)
img_watermarked_approx = img_watermarked_approx.astype(np.int32)
if save=='no':
return img_watermarked_approx
elif save=='yes':
it.save_image(img_watermarked_approx,'../out/watermarking/robustness/lowrankembedding/liutan/embedding_rank_{}_alpha_{}.png'.format(rank,scale))
def lowrank_image_jain(img, watermark, scale, rank, save):
#watermarked image
img_watermarked, watermark_vh = it.embed_watermark_jain(img, watermark, scale=scale)
img_watermarked = img_watermarked.astype(np.int32)
#applying low rank compression to watermarked image
img_watermarked_approx = it.lowrankapprox(img_watermarked,rank)
#extracting watermark using original extraction key and compressed watermarked image
watermark_extracted = it.extract_watermark_jain(img_watermarked_approx, img, watermark_vh, scale)
watermark_extracted = reversepad(watermark_extracted, watermark)
watermark_extracted = watermark_extracted.astype(np.int32)
if save=='no':
return watermark_extracted
elif save=='yes':
it.save_image(watermark_extracted,'../out/watermarking/robustness/lowrankextraction/jain/extraction_rank_{}_alpha_{}.png'.format(rank,scale))
def lowrank_watermarked_image_jain(img, watermark, scale, rank, save):
#watermarked image
img_watermarked, watermark_vh = it.embed_watermark_jain(img, watermark, scale=scale)
img_watermarked = img_watermarked.astype(np.int32)
#applying low rank compression to watermarked image
img_watermarked_approx = it.lowrankapprox(img_watermarked,rank)
img_watermarked_approx = img_watermarked_approx.astype(np.int32)
if save=='no':
return img_watermarked_approx
elif save=='yes':
it.save_image(img_watermarked_approx,'../out/watermarking/robustness/lowrankembedding/jain/embedding_rank_{}_alpha_{}.png'.format(rank,scale))
def lowrank_image_jain_mod(img, watermark, scale, rank,save):
#watermarked image
img_watermarked, watermark_vh = it.embed_watermark_jain_mod(img, watermark, scale=scale)
img_watermarked = img_watermarked.astype(np.int32)
#applying low rank compression to watermarked image
img_watermarked_approx = it.lowrankapprox(img_watermarked,rank)
#extracting watermark using original extraction key and compressed watermarked image
watermark_extracted = it.extract_watermark_jain_mod(img_watermarked, img, watermark_vh, scale=scale)
watermark_extracted = reversepad(watermark_extracted, watermark)
watermark_extracted = watermark_extracted.astype(np.int32)
if save=='no':
return watermark_extracted
elif save=='yes':
it.save_image(watermark_extracted,'../out/watermarking/robustness/lowrankextraction/jainmod/extraction_rank_{}_alpha_{}.png'.format(rank,scale))
def lowrank_watermarked_image_jain_mod(img, watermark, scale, rank,save):
#watermarked image
img_watermarked, watermark_vh = it.embed_watermark_jain_mod(img, watermark, scale=scale)
img_watermarked = img_watermarked.astype(np.int32)
#applying low rank compression to watermarked image
img_watermarked_approx = it.lowrankapprox(img_watermarked,rank)
img_watermarked_approx = img_watermarked_approx.astype(np.int32)
if save=='no':
return img_watermarked_approx
elif save=='yes':
it.save_image(img_watermarked_approx,'../out/watermarking/robustness/lowrankembedding/jainmod/embedding_rank_{}_alpha_{}.png'.format(rank,scale))
def lowrank_error_liutan(img, watermark, scale, rank):
#watermarked image
img_watermarked, watermarked_u, mat_s, watermarked_vh = it.embed_watermark(img, watermark, scale=scale)
#applying low rank compression to watermarked image
img_watermarked_approx = it.lowrankapprox(img_watermarked,rank)
#extracting watermark using original extraction key and compressed watermarked image
watermark_extracted = it.extract_watermark(img_watermarked_approx, watermarked_u, mat_s, watermarked_vh,
scale=scale)
watermark_extracted = reversepad(watermark_extracted, watermark)
#stacking extracted watermark
watermark_extracted = watermark_extracted.astype(np.float64)
watermark_extracted_rows, watermark_extracted_columns = watermark_extracted.shape[:2]
watermark_extracted_stacked = watermark_extracted.reshape(watermark_extracted_rows, -1)
#stacking original watermark
watermark = watermark.astype(np.float64)
watermark_rows, watermark_columns = watermark.shape[:2]
watermark_stacked = watermark.reshape(watermark_rows, -1)
#norm difference
error = (np.linalg.norm(watermark_extracted_stacked-watermark_stacked))/(np.linalg.norm(watermark_stacked))
return error
def lowrank_error_jain(img, watermark, scale, rank):
#watermarked image
img_watermarked, watermark_vh = it.embed_watermark_jain(img, watermark, scale=scale)
#applying low rank compression to watermarked image
img_watermarked_approx = it.lowrankapprox(img_watermarked,rank)
#extracting watermark using original extraction key and compressed watermarked image
watermark_extracted = it.extract_watermark_jain(img_watermarked_approx, img, watermark_vh, scale)
watermark_extracted = reversepad(watermark_extracted, watermark)
#stacking extracted watermark
watermark_extracted = watermark_extracted.astype(np.float64)
watermark_extracted_rows, watermark_extracted_columns = watermark_extracted.shape[:2]
watermark_extracted_stacked = watermark_extracted.reshape(watermark_extracted_rows, -1)
#stacking original watermark
watermark = watermark.astype(np.float64)
watermark_rows, watermark_columns = watermark.shape[:2]
watermark_stacked = watermark.reshape(watermark_rows, -1)
#norm difference
error = (np.linalg.norm(watermark_extracted_stacked-watermark_stacked))/(np.linalg.norm(watermark_stacked))
return error
def lowrank_error_jain_mod(img, watermark, scale, rank):
#watermarked image
img_watermarked, watermark_vh = it.embed_watermark_jain_mod(img, watermark, scale=scale)
#applying low rank compression to watermarked image
img_watermarked_approx = it.lowrankapprox(img_watermarked,rank)
#extracting watermark using original extraction key and compressed watermarked image
watermark_extracted = it.extract_watermark_jain_mod(img_watermarked_approx, img, watermark_vh, scale=scale)
watermark_extracted = reversepad(watermark_extracted, watermark)
#stacking extracted watermark
watermark_extracted = watermark_extracted.astype(np.float64)
watermark_extracted_rows, watermark_extracted_columns = watermark_extracted.shape[:2]
watermark_extracted_stacked = watermark_extracted.reshape(watermark_extracted_rows, -1)
#stacking original watermark
watermark = watermark.astype(np.float64)
watermark_rows, watermark_columns = watermark.shape[:2]
watermark_stacked = watermark.reshape(watermark_rows, -1)
#norm difference
error = (np.linalg.norm(watermark_extracted_stacked-watermark_stacked))/(np.linalg.norm(watermark_stacked))
return error
def lowrank_extractionerror_plot_liutan(img,watermark):
alphas = (0.05,0.1,0.5,0.75)
ranks = | np.arange(1,300) | numpy.arange |
# Copyright (c) Facebook, Inc. and its affiliates.
import os
'''
This forces the environment to use only 1 cpu when running.
This could be helpful when launching multiple environment simulatenously.
'''
os.environ['OPENBLAS_NUM_THREADS'] = '1'
os.environ['MKL_NUM_THREADS'] = '1'
# os.environ['CUDA_VISIBLE_DEVICES'] = '-1'
import numpy as np
import copy
import pybullet as pb
import pybullet_data
from bullet import bullet_client
from bullet import bullet_utils as bu
from fairmotion.ops import conversions
from fairmotion.ops import math
from fairmotion.utils import constants
import sim_agent
import sim_obstacle
import importlib.util
class Env(object):
'''
This environment defines a base environment where the simulated
characters exist and they are controlled by tracking controllers
'''
def __init__(self,
fps_sim,
fps_act,
char_info_module,
sim_char_file,
ref_motion_scale,
actuation,
self_collision=None,
contactable_body=None,
verbose=False,
):
self._num_agent = len(sim_char_file)
assert self._num_agent > 0
assert self._num_agent == len(char_info_module)
assert self._num_agent == len(ref_motion_scale)
self._char_info = []
for i in range(self._num_agent):
''' Load Character Info Moudle '''
spec = importlib.util.spec_from_file_location(
"char_info%d"%(i), char_info_module[i])
char_info = importlib.util.module_from_spec(spec)
spec.loader.exec_module(char_info)
self._char_info.append(char_info)
''' Modfiy Contactable Body Parts '''
if contactable_body:
contact_allow_all = True if 'all' in contactable_body else False
for joint in list(char_info.contact_allow_map.keys()):
char_info.contact_allow_map[joint] = \
contact_allow_all or char_info.joint_name[joint] in contactable_body
self._v_up = self._char_info[0].v_up_env
''' Define PyBullet Client '''
self._pb_client = bullet_client.BulletClient(
connection_mode=pb.DIRECT, options=' --opengl2')
self._pb_client.setAdditionalSearchPath(pybullet_data.getDataPath())
''' timestep for physics simulation '''
self._dt_sim = 1.0/fps_sim
''' timestep for control of dynamic controller '''
self._dt_act = 1.0/fps_act
if fps_sim%fps_act != 0:
raise Exception('FPS_SIM should be a multiples of FPS_ACT')
self._num_substep = fps_sim//fps_act
self._verbose = verbose
self.setup_physics_scene(sim_char_file,
self._char_info,
ref_motion_scale,
self_collision,
actuation)
''' Elapsed time after the environment starts '''
self._elapsed_time = 0.0
''' For tracking the length of current episode '''
self._episode_len = 0.0
''' Create a Manager for Handling Obstacles '''
self._obs_manager = sim_obstacle.ObstacleManager(
self._pb_client, self._dt_act, self._char_info[0].v_up_env)
''' Save the initial pybullet state to clear all thing before calling reset '''
self._init_state = None
self.reset()
self._init_state = self._pb_client.saveState()
def setup_physics_scene(self, sim_char_file, char_info, ref_motion_scale, self_collision, actuation):
self._pb_client.resetSimulation()
self.create_ground()
self._agent = []
for i in range(self._num_agent):
self._agent.append(sim_agent.SimAgent(name='sim_agent_%d'%(i),
pybullet_client=self._pb_client,
model_file=sim_char_file[i],
char_info=char_info[i],
ref_scale=ref_motion_scale[i],
self_collision=self_collision[i],
actuation=actuation[i],
kinematic_only=False,
verbose=self._verbose))
def create_ground(self):
''' Create Plane '''
if np.allclose(np.array([0.0, 0.0, 1.0]), self._v_up):
R_plane = constants.eye_R()
else:
R_plane = math.R_from_vectors(np.array([0.0, 0.0, 1.0]), self._v_up)
self._plane_id = \
self._pb_client.loadURDF(
"plane_implicit.urdf",
[0, 0, 0],
conversions.R2Q(R_plane),
useMaximalCoordinates=True)
self._pb_client.changeDynamics(self._plane_id, linkIndex=-1, lateralFriction=0.9)
''' Dynamics parameters '''
assert np.allclose(np.linalg.norm(self._v_up), 1.0)
gravity = -9.8 * self._v_up
self._pb_client.setGravity(gravity[0], gravity[1], gravity[2])
self._pb_client.setTimeStep(self._dt_sim)
self._pb_client.setPhysicsEngineParameter(numSubSteps=2)
self._pb_client.setPhysicsEngineParameter(numSolverIterations=10)
# self._pb_client.setPhysicsEngineParameter(solverResidualThreshold=1e-10)
def check_collision(self, body_id1, body_id2, link_id1=None, link_id2=None):
''' collision between two bodies '''
pts = self._pb_client.getContactPoints(
bodyA=body_id1, bodyB=body_id2, linkIndexA=link_id1, linkIndexB=link_id2)
return len(p) > 0
# def check_falldown(self, agent, plane_id=None):
# ''' check if any non-allowed body part hits the ground '''
# if plane_id is None: plane_id = self._plane_id
# pts = self._pb_client.getContactPoints()
# for p in pts:
# part = None
# #ignore self-collision
# if p[1] == p[2]: continue
# if p[1] == agent._body_id and p[2] == plane_id: part = p[3]
# if p[2] == agent._body_id and p[1] == plane_id: part = p[4]
# #ignore collision of other agents
# if part == None: continue
# if not agent._char_info.contact_allow_map[part]: return True
# return False
def check_falldown(self, agent, plane_id=None):
''' check if any non-allowed body part hits the ground '''
if plane_id is None: plane_id = self._plane_id
pts = self._pb_client.getContactPoints(
bodyA=agent._body_id, bodyB=plane_id)
for p in pts:
part = p[3] if p[1] == agent._body_id else p[4]
if agent._char_info.contact_allow_map[part]:
continue
else:
return True
return False
def is_sim_div(self, agent):
''' TODO: check divergence of simulation '''
return False
def step(self, target_poses=[]):
'''
One Step-forward Simulation
'''
''' Increase elapsed time '''
self._elapsed_time += self._dt_act
self._episode_len += self._dt_act
''' Update simulation '''
for _ in range(self._num_substep):
for i, target_pose in enumerate(target_poses):
self._agent[i].actuate(pose=target_pose,
vel=None)
self._pb_client.stepSimulation()
self._obs_manager.update()
def reset(self, time=0.0, poses=None, vels=None, pb_state_id=None):
''' remove obstacles in the scene '''
self._obs_manager.clear()
'''
Restore internal pybullet state
by uisng the saved info when Env was initially created
'''
if pb_state_id is not None:
self._pb_client.restoreState(pb_state_id)
self._elapsed_time = time
if poses is None:
if self._init_state is not None:
self._pb_client.restoreState(self._init_state)
else:
for i in range(self._num_agent):
pose = poses[i]
vel = None if vels is None else vels[i]
self._agent[i].set_pose(pose, vel)
self._episode_len = 0.0
def add_noise_to_pose_vel(self, agent, pose, vel=None, return_as_copied=True):
'''
Add a little bit of noise to the given pose and velocity
'''
ref_pose = copy.deepcopy(pose) if return_as_copied else pose
if vel:
ref_vel = copy.deepcopy(vel) if return_as_copied else vel
dof_cnt = 0
for j in agent._joint_indices:
joint_type = agent.get_joint_type(j)
''' Ignore fixed joints '''
if joint_type == self._pb_client.JOINT_FIXED:
continue
''' Ignore if there is no corresponding joint '''
if agent._char_info.bvh_map[j] == None:
continue
T = ref_pose.get_transform(agent._char_info.bvh_map[j], local=True)
R, p = conversions.T2Rp(T)
if joint_type == self._pb_client.JOINT_SPHERICAL:
dR = math.random_rotation(
mu_theta=agent._char_info.noise_pose[j][0],
sigma_theta=agent._char_info.noise_pose[j][1],
lower_theta=agent._char_info.noise_pose[j][2],
upper_theta=agent._char_info.noise_pose[j][3])
dof_cnt += 3
elif joint_type == self._pb_client.JOINT_REVOLUTE:
theta = math.truncnorm(
mu=agent._char_info.noise_pose[j][0],
sigma=agent._char_info.noise_pose[j][1],
lower=agent._char_info.noise_pose[j][2],
upper=agent._char_info.noise_pose[j][3])
joint_axis = agent.get_joint_axis(j)
dR = conversions.A2R(joint_axis*theta)
dof_cnt += 1
else:
raise NotImplementedError
T_new = conversions.Rp2T( | np.dot(R, dR) | numpy.dot |
import numpy as np
import torch
import torch.nn.functional as F
from scipy.spatial.transform import Rotation as R
from torch import nn
import spherical_sampling
from module_utils import MLP
from unet_parts import *
class UNet(nn.Module):
def __init__(self, n_channels, n_classes, bilinear=True):
super(UNet, self).__init__()
self.n_channels = n_channels
self.n_classes = n_classes
self.bilinear = bilinear
self.inc = Conv(n_channels, 32)
self.down1 = Down(32, 64)
self.down2 = Down(64, 128)
self.down3 = Down(128, 256)
factor = 2 if bilinear else 1
self.down4 = Down(256, 512 // factor)
self.up1 = Up(512, 256 // factor, bilinear)
self.up2 = Up(256, 128 // factor, bilinear)
self.up3 = Up(128, 64 // factor, bilinear)
self.up4 = Up(64, 32, bilinear)
self.outc = OutConv(32, n_classes)
def forward(self, x):
x1 = self.inc(x)
x2 = self.down1(x1)
x3 = self.down2(x2)
x4 = self.down3(x3)
x5 = self.down4(x4)
x = self.up1(x5, x4)
x = self.up2(x, x3)
x = self.up3(x, x2)
x = self.up4(x, x1)
logits = self.outc(x)
return logits
class DirModel(nn.Module):
def __init__(self, num_directions, model_type):
super().__init__()
self.num_directions = num_directions
self.model_type = model_type
self.raw_directions = spherical_sampling.fibonacci(num_directions, co_ords='cart')
image_feature_dim = 256
action_feature_dim = 128
output_dim = 1
self.sgn_action_encoder = MLP(3, action_feature_dim, [action_feature_dim, action_feature_dim])
self.mag_action_encoder = MLP(3, action_feature_dim, [action_feature_dim, action_feature_dim])
if 'sgn' in model_type:
self.sgn_image_encoder_1 = Conv(20, 32)
self.sgn_image_encoder_2 = Down(32, 64)
self.sgn_image_encoder_3 = Down(64, 128)
self.sgn_image_encoder_4 = Down(128, 256)
self.sgn_image_encoder_5 = Down(256, 512)
self.sgn_image_encoder_6 = Down(512, 512)
self.sgn_image_encoder_7 = Down(512, 512)
self.sgn_image_feature_extractor = MLP(512*7*10, image_feature_dim, [image_feature_dim])
self.sgn_decoder = MLP(image_feature_dim + action_feature_dim, 3 * output_dim, [1024, 1024, 1024])
if 'mag' in model_type:
num_channels = 20 if model_type == 'mag' else 10
self.mag_image_encoder_1 = Conv(num_channels, 32)
self.mag_image_encoder_2 = Down(32, 64)
self.mag_image_encoder_3 = Down(64, 128)
self.mag_image_encoder_4 = Down(128, 256)
self.mag_image_encoder_5 = Down(256, 512)
self.mag_image_encoder_6 = Down(512, 512)
self.mag_image_encoder_7 = Down(512, 512)
self.mag_image_feature_extractor = MLP(512*7*10, image_feature_dim, [image_feature_dim])
self.mag_decoder = MLP(image_feature_dim + action_feature_dim, output_dim, [1024, 1024, 1024])
# Initialize random weights
for m in self.named_modules():
if isinstance(m[1], nn.Conv2d) or isinstance(m[1], nn.Conv3d):
nn.init.kaiming_normal_(m[1].weight.data)
elif isinstance(m[1], nn.BatchNorm2d) or isinstance(m[1], nn.BatchNorm3d):
m[1].weight.data.fill_(1)
m[1].bias.data.zero_()
def forward(self, observation, directions=None):
if 'sgn' in self.model_type:
x0 = observation
x1 = self.sgn_image_encoder_1(x0)
x2 = self.sgn_image_encoder_2(x1)
x3 = self.sgn_image_encoder_3(x2)
x4 = self.sgn_image_encoder_4(x3)
x5 = self.sgn_image_encoder_5(x4)
x6 = self.sgn_image_encoder_6(x5)
x7 = self.sgn_image_encoder_7(x6)
embedding = x7.reshape([x7.size(0), -1])
sgn_feature = self.sgn_image_feature_extractor(embedding)
if 'mag' in self.model_type:
x0 = observation if self.model_type == 'mag' else observation[:, :10]
x1 = self.mag_image_encoder_1(x0)
x2 = self.mag_image_encoder_2(x1)
x3 = self.mag_image_encoder_3(x2)
x4 = self.mag_image_encoder_4(x3)
x5 = self.mag_image_encoder_5(x4)
x6 = self.mag_image_encoder_6(x5)
x7 = self.mag_image_encoder_7(x6)
embedding = x7.reshape([x7.size(0), -1])
mag_feature = self.mag_image_feature_extractor(embedding)
batch_size = observation.size(0)
if directions is None:
directions = list()
for _ in range(observation.size(0)):
r_mat_T = R.from_euler('xyz', np.random.rand(3) * 360, degrees=True).as_matrix().T
directions.append(self.raw_directions @ r_mat_T)
directions = np.asarray(directions)
else:
if len(directions.shape) == 2:
directions = directions[:, np.newaxis]
num_directions = directions.shape[1]
torch_directions = torch.from_numpy(directions.astype(np.float32)).to(observation.device)
sgn_direction_features = [self.sgn_action_encoder(torch_directions[:, i]) for i in range(num_directions)]
mag_direction_features = [self.mag_action_encoder(torch_directions[:, i]) for i in range(num_directions)]
sgn_output, mag_output = None, None
if 'sgn' in self.model_type:
sgn_output = list()
for i in range(num_directions):
feature_input = torch.cat([sgn_feature, sgn_direction_features[i]], dim=1)
sgn_output.append(self.sgn_decoder(feature_input))
sgn_output = torch.stack(sgn_output, dim=1)
if 'mag' in self.model_type:
mag_output = list()
for i in range(num_directions):
feature_input = torch.cat([mag_feature, mag_direction_features[i]], dim=1)
mag_output.append(self.mag_decoder(feature_input))
mag_output = torch.stack(mag_output, dim=1).squeeze(2)
output = sgn_output, mag_output, directions
return output
class Model():
def __init__(self, num_directions, model_type):
self.num_directions = num_directions
self.model_type = model_type
self.pos_model = UNet(10, 2)
self.dir_model = DirModel(num_directions, model_type)
def get_direction_affordance(self, observations, model_type, torch_tensor=False, directions=None):
"""Get position affordance maps.
Args:
observations: list of dict
- image: [W, H, 10]. dtype: float32
- image_init: [W, H, 10]. dtype: float32
model_type: 'sgn', 'mag', 'sgn_mag'
torch_tensor: Whether the retuen value is torch tensor (default is numpy array). torch tensor is used for training.
Return:
affordance_maps: numpy array/torch tensor, [B, K, W, H]
directions: list of direction vector
"""
skip_id_list = list()
scene_inputs = []
for id, observation in enumerate(observations):
if observation is None:
skip_id_list.append(id)
continue
scene_inputs.append(np.concatenate([observation['image'].transpose([2, 0, 1]), observation['image_init'].transpose([2, 0, 1])], axis=0))
scene_input_tensor = torch.from_numpy(np.stack(scene_inputs))
sgn_output, mag_output, skipped_directions = self.dir_model.forward(scene_input_tensor.to(self.device_dir), directions=directions) # [B, K, W, H]
if torch_tensor:
assert len(skip_id_list) == 0
return sgn_output, mag_output, None
else:
if model_type == 'sgn':
affordance_maps = 1 - F.softmax(sgn_output, dim=2)[:, :, 1]
elif model_type == 'mag':
affordance_maps = mag_output
elif model_type == 'sgn_mag':
sgn = sgn_output.max(2)[1] - 1
affordance_maps = sgn * F.relu(mag_output)
skipped_affordance_maps = affordance_maps.data.cpu().numpy()
affordance_maps = list()
directions = list()
cur = 0
for id in range(len(skipped_affordance_maps)+len(skip_id_list)):
if id in skip_id_list:
affordance_maps.append(None)
directions.append(None)
else:
affordance_maps.append(skipped_affordance_maps[cur])
directions.append(skipped_directions[cur])
cur += 1
return affordance_maps, directions
def get_position_affordance(self, observations, torch_tensor=False):
"""Get position affordance maps.
Args:
observations: list of dict
- image: [W, H, 10]. dtype: float32
torch_tensor: Whether the retuen value is torch tensor (default is numpy array). torch tensor is used for training.
Return:
affordance_maps: numpy array/torch tensor, [B, K, W, H]
"""
skip_id_list = list()
scene_inputs = []
for observation in observations:
scene_inputs.append(observation['image'].transpose([2, 0, 1]))
scene_input_tensor = torch.from_numpy( | np.stack(scene_inputs) | numpy.stack |
import os.path
from scipy.optimize import fsolve
import math
import numpy as np
from matplotlib import pyplot as plt
import pandas as pd
import utils_Florian as utils
def equations(p, t_peak, t_half):
x, y = p
return (0.5 * (math.exp(-x * t_peak) - math.exp(-y * t_peak)) - (math.exp(-x * t_half) - math.exp(-y * t_half)), -x * math.exp(-x * t_peak) + y * math.exp(-y * t_peak))
results = pd.DataFrame()
t_peaks = []
t_halfs = []
xs = []
ys = []
initial_conditions = ((12, 5),
(14, 4),
(14, 4),
(30, 1),
(30, 1),
(30, 1),
(30, 1),
(30, 1),
(30, 1),
(30, 1),
(30, 1),
(30, 1),
(30, 1),
(30, 1),
(30, 1),
(30, 1))
for alpha in range(1, 16):
t_peak = 0.1415
t_half = t_peak + 0.2 + alpha * 0.05
print("Target: ", t_half)
x, y = fsolve(equations, initial_conditions[alpha], args=(t_peak, t_half))
t_peaks.append(t_peak)
t_halfs.append(t_half - t_peak)
xs.append(x)
ys.append(y)
t = np.linspace(0, 2.0, 10000)
crf = -np.exp(-x * t) + np.exp(-y * t)
crf = crf / sum(crf)
print("t peak", t[ | np.argmax(crf) | numpy.argmax |
import os
import glob
import numpy as np
from scipy import interpolate
from scipy.spatial.transform import Rotation as R, RotationSpline
from copy import deepcopy
from collections import defaultdict
# nested dict replacement
from .rotations import convert_quat_wxyz_to_xyzw, \
convert_quat_xyzw_to_wxyz, quat_mult, convert_quat_to_euler,\
convert_euler_to_quat, quat_inv, vec_rotate
nested_dict = lambda: defaultdict(nested_dict)
def convert_nestedddict_to_regular(d):
"""
Converts nested defaultdict object to regular python nested dicts.
"""
if isinstance(d, defaultdict):
d = {k: convert_nestedddict_to_regular(v) for k, v in d.items()}
return d
def truncate_dict_of_arrays(dict_array, s_idx=0, e_idx=999999999999999,
inplace=False):
"""Truncate arrays inside a dictionary to desired start/end idx."""
if not inplace:
dict_array = deepcopy(dict_array)
for k, v in dict_array.items():
if isinstance(v, (np.ndarray, list)):
dict_array[k] = v[s_idx:e_idx]
return dict_array
def select_idx_dict_of_arrays(dict_array, axis_idx_dict,
inplace=False, ignore_idx_errors=True):
"""Selects indexes in desired axis in arrays inside a dictionary.
Args:
dict_array(dict[str,np.ndarray): dictionary with
arrays or tensors.
axis_idx_dict(dict[int,list[int]]): dictionary with indexes to
select for each axis as {axis:indexes}.
inplace(bool): if changes should be done on input arrays.
ignore_idx_errors(bool): if errors on arrays with missing
dimensions should be ignored.
Return:
dict[str,np.ndarray]:
"""
out_dict_array = (dict_array if inplace else dict())
for k, v in dict_array.items():
ix = [axis_idx_dict.get(dim, slice(None)) for dim in range(v.ndim)]
if isinstance(v, (np.ndarray)):
try:
out_dict_array[k] = v[tuple(ix)]
except IndexError as e:
if not ignore_idx_errors:
raise e
else:
out_dict_array[k] = v
return out_dict_array
def find_resource_path(path, max_up=5):
"""Recursively looks for files on the current directory and parents.
Args:
path(str): base path or files name of resource to search.
max_up(int): max parent directory from which to recurse.
Returns:
(str): path to found resource or None if it was not found.
"""
for i in range(max_up):
pmatch = glob.glob(os.path.join("**/", path), recursive=True)
if not pmatch:
path = "../" + path
else:
path = pmatch[0]
return path
return None
def resample_data_frequency_factor(data, factor, axis=0, method="cubic"):
"""
Resample the data by the desired factor (assumes uniform sampling).
Args:
data(np.ndarray): data array to resample.
factor(float): factor to resample the data. if factor<1.0, decimation
is performed, otherwise, interpolation is performed.
axis(int): index of the axis to interpolate data along.
method(str): method to use for resampling the data. Defaults to
cubic spline. When resampling quaternion rotations,
use "slerp".
Returns:
(np.ndarray): The resampled data array.
"""
x = np.arange(data.shape[axis])
x_new = np.linspace(0, data.shape[axis] - 1, round(data.shape[axis] * factor), endpoint=True)
return resample_data_frequency(data, orig_t=x, target_t=x_new,
axis=axis, method=method)
def resample_data_frequency(data, orig_t, target_t, axis=0, method="cubic"):
"""
Resample the data from original sampling to target.
Args:
data(np.ndarray): data array to resample.
orig_t(np.ndarray): original timestamps for each data point.
target_t(np.ndarray): target timestamps for resampled points.
axis(int): index of the axis to interpolate data along.
method(str): method to use for resampling the data. Defaults to
cubic spline. When resampling quaternion rotations,
use "slerp".
Returns:
(np.ndarray): The resampled data array.
"""
if method == "slerp":
assert axis == 0, "Spherical Rotation Spline only works when axis=0"
data = convert_quat_wxyz_to_xyzw(data) # convert quats to scalar_last for scipy
if len(np.shape(data)) == 3: # multiple segments
sampled_data = np.array(
[RotationSpline(orig_t, R.from_quat(ori))(target_t).as_quat()
for ori in data.transpose((1, 0, 2))
]).transpose((1, 0, 2))
else:
sampled_data = RotationSpline(orig_t, R.from_quat(data))(target_t).as_quat()
return convert_quat_xyzw_to_wxyz(sampled_data) # convert quats back to scalar_first
else:
return interpolate.interp1d(orig_t, data, kind=method, axis=axis,
bounds_error=False, fill_value="extrapolate")(target_t)
def find_low_variability_sections(data, threshold, window_size=21,
axis=0, thresh_method="max"):
"""
Find sections of data which contain low variability.
Args:
data(np.ndarray): data to search.
window_size(int): window size to average data.
threshold(float): threshold value to consider low_variability
axis(int): axis on which to search.
thresh_method(str): channel reduction method to compare with
threshold. One of ["mean", "min", "max"]
Returns:
(list[int]): indexes of samples with low variability over
the desired axis
"""
from scipy.signal import convolve
from scipy.signal.windows import gaussian
reduce_dims = list(np.arange(len(data.shape)))
reduce_dims.remove(axis)
reduce_dims = tuple(reduce_dims)
# apply gaussian smoothing over the axis
kernel = np.expand_dims(gaussian(window_size, std=window_size / 8), axis=reduce_dims)
smooth_signal = convolve(data, kernel / kernel.sum(), mode="same")
# calculate ||pointwise derivatives||
diff = np.abs(np.diff(smooth_signal, axis=0))
diff[-window_size:window_size, ...] = 0
# take (mean, min, max) variability over all channels
if thresh_method == "mean":
avg_diff = np.mean(diff, axis=reduce_dims)
elif thresh_method == "min":
avg_diff = np.min(diff, axis=reduce_dims)
elif thresh_method == "max":
avg_diff = np.max(diff, axis=reduce_dims)
else:
raise NotImplementedError
return list(np.where(avg_diff < threshold)[0])
def remove_outliers(data, std=2.0):
"""Statistical outlier removal for each axis independently.
Args:
data(np.ndarray): data from which to remove outliers.
std(float): stdev for vaues to be considered outliers.
Returns:
(np.ndarray): data with outliers removed
"""
assert len(data.shape) == 2
num, dim = data.shape
inliers = list(range(num))
outliers = []
for ax in range(dim):
diff = np.append(0., np.diff(data[..., ax]))
ax_outliers = np.where(np.abs(diff) > np.abs(np.mean(diff)) + (std * np.std(diff)))[0]
outliers.extend(ax_outliers)
inliers = np.array(list(set(inliers) - set(outliers)))
return data[inliers]
def reset_skeleton_position(pos_args_list, pos_ref, axis2reset=(True, True, False)):
"""
Resets position of data by removing a reference position.
Args:
pos_args_list(list[np.ndarray]): list with arrays of 3d points
from which to remove the reference.
pos_ref (np.ndarray[3x]): reference position.
axis2reset (tuple[bool]): operation mask. Only axis (xyz) with
True will be reset.
Returns:
(list[np.ndarray]): 3d arrays with reset position.
"""
return [(p - pos_ref * axis2reset) for p in pos_args_list]
def reset_skeleton_orientation(rot_ref, orient_arg_list=(),
pos_args_list=(), vec_args_list=(),
axis2reset=(False, False, True)):
"""
Reset orientation by removing a reference rotation.
Args:
rot_ref(np.ndarray): reference rotation.
orient_arg_list(tuple[np.ndarray]): list with arrays of
quaternion orientations.
pos_args_list(tuple[np.ndarray]): list with arrays of 3d
positions around a center point (assumed to be
index 0 - root).
vec_args_list(tuple[np.ndarray]): list with arrays of 3d
vectors.
axis2reset(tuple[bool, bool, bool]): operation mask. Only axis
(xyz) with True will be reset.
Returns:
(list[np.ndarray], list[np.ndarray], list[np.ndarray]):
orientation and position arrays with reset orientations.
"""
# reset heading (rot over z-axis, so that subject is facing forward)
inv_init_ori = quat_inv(
convert_euler_to_quat(
convert_quat_to_euler(rot_ref, seq="xyz") * axis2reset,
seq="xyz",
)
)
reset_ori_data = []
for ori in orient_arg_list:
ori_shape = ori.shape
ori = quat_mult(inv_init_ori, ori.reshape(-1, 4)).reshape(*ori_shape)
reset_ori_data.append(ori)
reset_pos_data = []
for pos in pos_args_list:
pos_shape = pos.shape
# center rotation to origin before rotating
init_pos = pos[0, 0]
pos = pos - init_pos
# apply rotation
pos = vec_rotate(pos.reshape(-1, 3), inv_init_ori).reshape(*pos_shape)
# restore position to original
pos = pos + init_pos
reset_pos_data.append(pos)
reset_vec_data = []
for vec in vec_args_list:
vec_shape = vec.shape
vec = vec_rotate(vec.reshape(-1, 3), inv_init_ori).reshape(*vec_shape)
reset_vec_data.append(vec)
return reset_ori_data, reset_pos_data, reset_vec_data
def apply_procrustes_alignment(pred, target):
"""
Applies procrustes alignment to find closest fit from "pred" to "target" data.
Args:
pred(np.ndarray[Nx3]): source array to be aligned.
target(np.ndarray[Nx3]): target array for alignment.
Returns:
predicted_aligned - Procrustes aligned data
"""
pred = pred[np.newaxis, ...]
target = target[np.newaxis, ...]
muX = np.mean(target, axis=1, keepdims=True)
muY = np.mean(pred, axis=1, keepdims=True)
X0 = target - muX
Y0 = pred - muY
normX = np.sqrt(np.sum(X0 ** 2, axis=(1, 2), keepdims=True))
normY = np.sqrt( | np.sum(Y0 ** 2, axis=(1, 2), keepdims=True) | numpy.sum |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 29 7:07pm 2020
Meant to interface with Lv2_dj_lsp and functions from stingray.pulse.pulsar to
analyze Swift data pertaining to NGC 300 X-1 in one place, instead of having
the analysis spread between Lv2_dj_lsp.py and test.py
"""
from __future__ import division, print_function
import numpy as np
from astropy.io import fits
import matplotlib.pyplot as plt
import Lv0_dirs,Lv2_dj_lsp,Lv2_swift_lc,Lv2_phase
import os
from scipy import stats
from scipy.optimize import curve_fit
from tqdm import tqdm
import subprocess
from matplotlib import cm
from PyAstronomy.pyasl import foldAt
from mpl_toolkits.mplot3d import Axes3D
import mplcursors
import pathlib
from stingray.pulse.pulsar import pulse_phase,phase_exposure,fold_events
#####
## Noting here first that all the barycentering, time-ordering, extracting events
## (with XSELECT), doing exposure corrections (xrtlccorr), and subsequently the
## background subtraction, are all done in Lv2_swift_lc. There's no need to do so here.
#####
##### Parameters
eventfile = '/Volumes/Samsung_T5/NGC300_ULX_Swift/xrt/event/ngc300x1/ngc300x1_merge_niceroverlap_all.evt' #1 year of data; overlaps with NICER
#eventfile = '/Volumes/Samsung_T5/NGC300_ULX_Swift/xrt/event/ngc300x1/ngc300x1_swift_dec16_may19.evt'
#eventfile = '/Volumes/Samsung_T5/NGC300_ULX_Swift/xrt/event/ngc300x1/ngc300x1_merge.evt' #all 14 years
eventfile_xmm = '/Volumes/Samsung_T5/NGC300_XMMdata/ngc300x1_pn.evt'
times = fits.open(eventfile)[1].data['TIME'] #getting array of times
times_xmm = fits.open(eventfile_xmm)[1].data['TIME']
gtis_data = fits.open(eventfile)[2].data #getting GTIs
gtis_data_xmm = fits.open(eventfile_xmm)[59].data #59 for pn, 15 for mos1, 19 for mos2
T = sum([ gtis_data[i]['STOP']-gtis_data[i]['START'] for i in range(len(gtis_data)) ]) #exposure time
T_xmm = sum([ gtis_data_xmm[i]['STOP']-gtis_data_xmm[i]['START'] for i in range(len(gtis_data_xmm)) ]) #exposure time
print(T_xmm)
T0_MJD = fits.open(eventfile)[1].header['MJDREFI'] + fits.open(eventfile)[1].header['MJDREFF'] + fits.open(eventfile)[1].header['TSTART']/86400 #SWIFT
T0_MJD_eclipse = 58239.3498 #mid-eclipse!
T0_MJD_xmm = fits.open(eventfile_xmm)[1].header['MJDREF'] + fits.open(eventfile_xmm)[1].header['TSTART']/86400 #XMM-NEWTON
MJDREFI = fits.open(eventfile)[1].header['MJDREFI'] #Swift
MJDREFF = fits.open(eventfile)[1].header['MJDREFF'] #Swift
MJDREF = fits.open(eventfile_xmm)[1].header['MJDREF'] #XMM-Newton
diff_swiftxmm = (MJDREFI+MJDREFF-MJDREF)*86400
##### Get the phase offset between Swift eclipse time and XMM's first event time:
Porb_days = (1/8.4712e-6)/86400
xmm_first = MJDREF + times_xmm[0]/86400
no_cycles = (T0_MJD_eclipse - T0_MJD_xmm)/Porb_days
xmm_ecl = T0_MJD_eclipse - int(no_cycles)*Porb_days #time of the mid-eclipse BEFORE the first XMM event
if xmm_ecl > xmm_first:
xmm_ecl -= Porb_days
phaseoff = (xmm_first-xmm_ecl)/Porb_days
print('Phase offset is ' + str(phaseoff))
##### Be careful here, as Swift and XMM have different MJDREFs!!!
gtis_conform = []
for i in range(len(gtis_data)):
gtis_conform.append([gtis_data[i][0],gtis_data[i][1]]) #conform to the input that Stingray uses
gtis_conform_xmm = []
for i in range(len(gtis_data_xmm)):
gtis_conform_xmm.append([gtis_data_xmm[i][0],gtis_data_xmm[i][1]]) #conform to the input that Stingray uses
#bary_outputfolder = '/Volumes/Samsung_T5/NGC300_ULX_Swift/xrt/event/lightcurve/'
#obsids = [str(i) for i in range(49834027,49834042)] + [str(i) for i in range(49834043,49834062)] + [str(i) for i in range(49834063,49834066)] + ['88810002'] + [str(i) for i in range(49834066,49834069)] + [str(i) for i in range(49834070,49834079)] + [str(i) for i in range(49834080,49834088)]
#corr_lc_files = [bary_outputfolder + 'sw000' + obsids[i] + '_corr.lc' for i in range(len(obsids))]
#corr_ulx1_files = [bary_outputfolder + 'sw000' + obsids[i] + '_ulx1_corr.lc' for i in range(len(obsids))]
#corr_bg_files = [bary_outputfolder + 'sw000' + obsids[i] + '_bg_corr.lc' for i in range(len(obsids))]
#bg_scale_x1 = (30/120)**2
#bg_scale_ulx1 = (35/120)**2
#completeness = np.array([0,10,20,30,40,50,60,70,80,90,100])/100
#rebinned_t, rebinned_rate, rebinned_err, rebinned_fracexp = Lv2_dj_lsp.rebin_lc(corr_lc_files,corr_bg_files,bg_scale_x1,100,0.5)
#rebinned_t_ulx1, rebinned_rate_ulx1, rebinned_err_ulx1, rebinned_fracexp_ulx1 = rebin_lc(corr_ulx1_files,corr_bg_files,bg_scale_ulx1,3600,0)
#tstart_49834027 = 546830295.758713
"""
### Writing the data from the light curves of X-1 and ULX-1 into text files; also plotting the light curve, This is mainly for 3600s bins
x1_text = open(bary_outputfolder + 'ngc300x1_bg_exp_corr_lc_3600s.txt','w')
ulx1_text = open(bary_outputfolder + 'ngc300ulx1_bg_exp_corr_lc_3600s.txt','w')
for i in range(len(rebinned_t)):
x1_text.write(str(51910 + 7.428703700000000E-04+(rebinned_t[i]+tstart_49834027)/86400) + ' ' + str(rebinned_rate[i]) + ' ' + str(rebinned_err[i]) + '\n')
x1_text.close()
for i in range(len(rebinned_t_ulx1)):
ulx1_text.write(str(51910 + 7.428703700000000E-04 + (rebinned_t_ulx1[i]+tstart_49834027)/86400) + ' ' + str(rebinned_rate_ulx1[i]) + ' ' + str(rebinned_err_ulx1[i]) + '\n')
ulx1_text.close()
mjd = 51910 + 7.428703700000000E-04 + (tstart_49834027+rebinned_t)/86400
mjd_ulx1 = 51910 + 7.428703700000000E-04 + (tstart_49834027+rebinned_t_ulx1)/86400
plt.errorbar(x=mjd[rebinned_err<=0.06],y=rebinned_rate[rebinned_err<=0.06],yerr=rebinned_err[rebinned_err<=0.06],fmt='rx')
plt.errorbar(x=mjd_ulx1[rebinned_err_ulx1<=0.06],y=rebinned_rate_ulx1[rebinned_err_ulx1<=0.06],yerr=rebinned_err_ulx1[rebinned_err_ulx1<=0.06],fmt='bx')
plt.legend(('X-1','ULX-1'),fontsize=12)
plt.xlabel('Time (MJD)',fontsize=12)
plt.ylabel('[Exposure-corrected] Count rate (c/s)',fontsize=12)
plt.axhline(y=0,color='k',lw=0.5,alpha=0.5)
plt.show()
"""
### Running Lv2_dj_lsp.lsp
"""
for i in range(len(completeness)):
rebinned_t, rebinned_rate, rebinned_err, rebinned_fracexp = Lv2_dj_lsp.rebin_lc(corr_lc_files,corr_bg_files,bg_scale_x1,100,completeness[i])
omega,psd,prob3,prob4,prob5 = Lv2_dj_lsp.lsp(rebinned_t,rebinned_rate)
nu_reg = omega/(2.0*np.pi)
freq = omega/(2*np.pi)
plt.figure()
plt.plot(freq,psd,'rx-')
#plt.yscale('log')
#plt.xscale('log')
plt.xlabel('Frequency (Hz)',fontsize=12)
plt.ylabel('Normalized Power',fontsize=12)
plt.axhline(y=prob3,lw=0.5,alpha=0.5)
plt.axhline(y=prob4,lw=0.5,alpha=0.5)
plt.axhline(y=prob5,lw=0.5,alpha=0.5)
#print(prob3,prob4,prob5)
print(np.max(psd),freq[psd==np.max(psd)][0])
#plt.show()
"""
### Doing FR/RSS
#for i in range(len(completeness)):
# rebinned_t, rebinned_rate, rebinned_err, rebinned_fracexp = Lv2_dj_lsp.rebin_lc(corr_lc_files,corr_bg_files,bg_scale_x1,100,completeness[i])
# freqs_list, psd_list = Lv2_dj_lsp.psd_error(rebinned_t,rebinned_rate,rebinned_err)
# print(str(completeness[i]) + '%')
# print('Median frequency: ' + str(np.median(freqs_list)))
# print('Error in frequency: ' + str(np.std(freqs_list)))
#print('Powers: ' + str(psd_list))
################################################################################
################################### FOLDING ####################################
################################################################################
"""
##### Folding using my routine; confirmed that the folding of the raw data agrees with Stingray's and foldAt
nbins = 20
freq = 8.4712e-6
offset = -0.215*nbins
#freq = 8.6088e-6
freqdot = 0
freqdotdot = 0
phase_frac = (T0_MJD_eclipse-T0_MJD)/((1/freq)/86400)
#print('MID ECLIPSE TIME:')
#print( fits.open(eventfile)[1].header['MJDREFI'] + fits.open(eventfile)[1].header['MJDREFF'] + (times[0] + 0.21569724*1/freq)/86400)
#T0_MJD = fits.open(eventfile)[1].header['MJDREF'] + times[0]/86400
##### Using Lv2_phase
plt.figure()
phase,profile,profile_error = Lv2_phase.pulse_folding(times,T,T0_MJD,freq,freqdot,freqdotdot,nbins,"SWIFT")
plt.errorbar(x=phase[:-1],y=profile,yerr=profile_error,color='r',drawstyle='steps-mid')
expos = Lv2_phase.phase_exposure(times[0]-times[0],times[-1]-times[0],1/freq,nbin=nbins,gtis=np.array(gtis_conform)-times[0])
total_expos = np.array(list(expos) + list(expos))
plt.errorbar(x=phase[:-1],y=profile/total_expos,yerr=profile_error/total_expos,color='b',drawstyle='steps-mid')
plt.title(str(pathlib.Path(eventfile).name) +', exposure-corrected (using Lv2_phase)',fontsize=12)
plt.xlabel('Phase',fontsize=12)
plt.ylabel('Counts/s',fontsize=12)
plt.legend(('Folded profile','Exposure-corrected profile'),loc='best',fontsize=12)
print('Original expos:')
print(expos)
##### Using stingray.pulse.pulsar's fold_events
phase_sr,prof_sr,err_sr = fold_events(times,freq,freqdot,freqdotdot,gtis=np.array(gtis_conform),ref_time=times[0],nbin=nbins)
phase_sr_expo,prof_sr_expo,err_sr_expo = fold_events(times,freq,freqdot,freqdotdot,gtis=np.array(gtis_conform),ref_time=times[0],expocorr=True,nbin=nbins)
total_phase_sr = list(phase_sr) + list(phase_sr+1)
total_prof_sr = list(prof_sr)*2
total_err_sr = list(err_sr)*2
total_phase_sr_expo = list(phase_sr_expo) + list(phase_sr_expo+1)
total_prof_sr_expo = list(prof_sr_expo)*2
total_err_sr_expo = list(err_sr_expo)*2
if nbins % 2 == 0:
fft_x = np.array(list(np.arange(int(nbins/2)+1)) + list(np.arange(int(nbins/2)-1) - (int(nbins/2)-1)))
else:
fft_x = np.array(list(np.arange(int(nbins/2)+1)) + list(np.arange(int(nbins/2)) - int(nbins/2)))
shift = np.exp(-2j*np.pi*fft_x*offset/nbins)
shifted_prof_sr = np.real(np.fft.ifft(np.fft.fft(prof_sr_expo)*shift)) #taking the real component of the inverse transform of the shifted Fourier transform of the original folded profile
shifted_err_sr = np.real(np.fft.ifft(np.fft.fft(err_sr_expo)*shift)) #taking the real component of the inverse transform of the shifted Fourier transform of the original folded profile
a = np.array(list(shifted_prof_sr)*2)/T
b = np.array(list(shifted_err_sr)*2)/T
swift_lc = open(Lv0_dirs.NGC300_2020 + 'swift_shifted_folded_curve.txt','w')
for i in range(len(total_expos)):
swift_lc.write(str(total_phase_sr[i]) + ' ' + str(a[i]) + ' ' + str(b[i]) + '\n')
swift_lc.close()
plt.figure()
plt.errorbar(x=total_phase_sr,y=total_prof_sr/T,yerr=total_err_sr/T,color='r',drawstyle='steps-mid')
plt.errorbar(x=total_phase_sr_expo,y=total_prof_sr_expo/T,yerr=total_err_sr_expo/T,color='b',drawstyle='steps-mid')
plt.legend(('Folded profile','Exposure-corrected'),loc='best',fontsize=12)
plt.title(str(pathlib.Path(eventfile).name) +', exposure-corrected (using Stingray fold_events)',fontsize=12)
plt.xlabel('Phase',fontsize=12)
plt.ylabel('Counts/s',fontsize=12)
plt.show()
"""
"""
##### Using foldAt by PyAstronomy
plt.figure()
phase_bins = np.linspace(0,1,21)
phases = foldAt(times,1/freq,T0=times[0]-(1-phase_frac)*1/freq)
expos = Lv2_phase.phase_exposure(times[0]-times[0],times[-1]-times[0],1/freq,nbin=nbins,gtis=np.array(gtis_conform)-times[0])
total_expos = np.array(list(expos) + list(expos))
expos_index = int(phase_frac/(phase_bins[1]-phase_bins[0])) #starting point for exposures
altered_expos = np.array(list(total_expos[expos_index:]) + list(total_expos[:expos_index]))
#print('Altered expos:')
#print(altered_expos)
profile,bin_edges,binnumber = stats.binned_statistic(phases,np.ones(len(phases)),statistic='sum',bins=phase_bins)
error = np.sqrt(profile)
phase_to_2 = np.array(list(phase_bins[:-1]) + list(phase_bins+1))
profile_to_2 = np.array(list(profile)*2)
error_to_2 = np.array(list(error)*2)
plt.errorbar(phase_to_2[:-1],profile_to_2/(T*altered_expos),yerr=error_to_2/(T*altered_expos),color='b',drawstyle='steps-mid')
plt.legend(('Folded profile','Exposure-corrected'),loc='best',fontsize=12)
plt.title(str(pathlib.Path(eventfile).name) +', exposure-corrected (using PyAstronomy foldAt)',fontsize=12)
plt.xlabel('Phase',fontsize=12)
plt.ylabel('Counts/s',fontsize=12)
##### Shifting pulse profiles through a shifted FT (see Deepto's 7/20/2020 email)
if nbins % 2 == 0:
fft_x = np.array(list(np.arange(int(nbins/2)+1)) + list(np.arange(int(nbins/2)-1) - (int(nbins/2)-1)))
else:
fft_x = np.array(list(np.arange(int(nbins/2)+1)) + list(np.arange(int(nbins/2)) - int(nbins/2)))
shift = np.exp(-2j*np.pi*fft_x*offset/nbins)
shifted_prof_sr = np.real(np.fft.ifft(np.fft.fft(prof_sr_expo)*shift)) #taking the real component of the inverse transform of the shifted Fourier transform of the original folded profile
shifted_err_sr = np.real(np.fft.ifft(np.fft.fft(err_sr_expo)*shift)) #taking the real component of the inverse transform of the shifted Fourier transform of the original folded profile
plt.figure()
plt.errorbar(x=total_phase_sr_expo,y=total_prof_sr_expo/T,yerr=total_err_sr_expo/T,color='b',drawstyle='steps-mid')
plt.errorbar(total_phase_sr,np.array(list(shifted_prof_sr)*2)/T,yerr=np.array(list(shifted_err_sr)*2)/T,color='r',drawstyle='steps-mid')
plt.xlabel('Phase',fontsize=12)
plt.ylabel('Counts/s',fontsize=12)
plt.title('Exposure-corrected, folded profiles for NGC 300 X-1 from Swift over May 2018 to May 2019')
plt.legend(('Folded with T0 = time of first event','Folded with T0 = inferred eclipse time/phase'),fontsize=12)
"""
"""
nbins_t = len(times)
offset = (1-0.215)*1/freq
##### Shifting pulse profiles through a shifted FT (see Deepto's 7/20/2020 email)
if nbins_t % 2 == 0:
fft_x = np.array(list(np.arange(int(nbins_t/2)+1)) + list(np.arange(int(nbins_t/2)-1) - (int(nbins_t/2)-1)))
else:
fft_x = np.array(list(np.arange(int(nbins_t/2)+1)) + list(np.arange(int(nbins_t/2)) - int(nbins_t/2)))
shift = np.exp(-2j*np.pi*fft_x*offset/nbins_t)
shifted_t = np.real(np.fft.ifft(np.fft.fft(times)*shift)) #taking the real component of the inverse transform of the shifted Fourier transform of the original folded profile
for i in range(20):
print(times[i],shifted_t[i])
phase_sr,prof_sr,err_sr = fold_events(shifted_t,freq,freqdot,freqdotdot,gtis=np.array(gtis_conform),ref_time=times[0],nbin=nbins)
phase_sr_expo,prof_sr_expo,err_sr_expo = fold_events(shifted_t,freq,freqdot,freqdotdot,gtis=np.array(gtis_conform),ref_time=times[0],expocorr=True,nbin=nbins)
plt.figure()
plt.errorbar(phase_sr,prof_sr/T,color='b',drawstyle='steps-mid')
plt.errorbar(phase_sr,prof_sr_expo/T,color='r',drawstyle='steps-mid')
plt.xlabel('Phase',fontsize=12)
plt.ylabel('Counts/s',fontsize=12)
"""
#plt.show()
"""
##### Fitting 6-model step-and-ramp parameters to the folded profile
plt.figure()
plt.errorbar(x=phase[:-1],y=profile,yerr=profile_error,color='r',drawstyle='steps-mid')
plt.errorbar(x=phase[:-1],y=profile/total_expos,yerr=profile_error/total_expos,color='b',drawstyle='steps-mid')
plt.title(str(pathlib.Path(eventfile).name) +', exposure-corrected (using Lv2_phase)',fontsize=12)
plt.xlabel('Phase',fontsize=12)
plt.ylabel('Counts/s',fontsize=12)
plt.legend(('Folded profile','Exposure-corrected profile'),loc='best',fontsize=12)
start_phase = 0.45
end_phase = 1.95
phase_model = np.linspace(start_phase,end_phase,1001)
x = phase[:-1][(phase[:-1]>=start_phase)&(phase[:-1]<=end_phase)]
y = profile[(phase[:-1]>=start_phase)&(phase[:-1]<=end_phase)]/total_expos[(phase[:-1]>=start_phase)&(phase[:-1]<=end_phase)]
y_err = profile_error[(phase[:-1]>=start_phase)&(phase[:-1]<=end_phase)]/total_expos[(phase[:-1]>=start_phase)&(phase[:-1]<=end_phase)]
def piecewise_linear(x,b1,b2,b3,b4,top,bottom):
return np.piecewise(x, [(x>=start_phase)&(x<=b1), (x>b1)&(x<=b2), (x>b2)&(x<=b3), (x>b3)&(x<=b4), (x>b4)&(x<=end_phase)], [lambda x:top, lambda x:((bottom-top)/(b2-b1)*x+bottom-(bottom-top)/(b2-b1)*b2), lambda x:bottom, lambda x:((top-bottom)/(b4-b3)*x+top-(top-bottom)/(b4-b3)*b4), lambda x:top])
pguess = np.array([1.05,1.15,1.30,1.45,0.0011,0.0003])
popt,pcov = curve_fit(piecewise_linear,x,y,p0=pguess)#,sigma=y_err)
print(popt)
print(np.diag(np.sqrt(pcov))/popt*100)
plt.plot(phase_model,piecewise_linear(phase_model,*popt),'k-')
"""
#plt.show()
########################### DOING CHI^2 EXPLORATION ############################
def lorentzian(f, f0, a, gam,const):
x = (f-f0)/(gam/2)
return a * 1/(1+x**2) + const
def gaussian(f,f0,a,sig,const):
return a * np.exp( -(f-f0)**2/(2*sig**2) ) + const
def sum(f,f0,a,gam,b,sig,const):
x = (f-f0)/(gam/2)
return a * 1/(1+x**2) + b * np.exp( -(f-f0)**2/(2*sig**2)) + const
"""
nbins=20
chi2 = []
freqs = np.arange(8.25e-6,8.7e-6,0.01e-6)
#freqs = np.arange(-9e-17,-1e-18,1e-20)
for i in tqdm(range(len(freqs))):
phase_sr_expo,prof_sr_expo,err_sr_expo = fold_events(times,freqs[i],gtis=np.array(gtis_conform),ref_time=times[0],expocorr=True,nbins=nbins)
chi2_freq = Lv2_phase.get_chi2(prof_sr_expo,err_sr_expo)
chi2.append( chi2_freq )
"""
"""
freqs_filter = freqs[(freqs>=8.4693e-6)&(freqs<=8.472e-6)] #8.47 to 8.47275 for 1-year data
chi2_filter = np.array(chi2)[(freqs>=8.4693e-6)&(freqs<=8.472e-6)]
freq_model = np.linspace(8.4693e-6,8.472e-6,1001)
pguess_l = np.array([8.4706e-6,650,0.002e-6])
popt_l,pcov_l = curve_fit(lorentzian,freqs_filter,chi2_filter,p0=pguess_l)
print(popt_l)
print(np.sqrt(np.diag(pcov_l)))
pguess_g = np.array([8.4706e-6,650,0.002e-6])
popt_g,pcov_g = curve_fit(gaussian,freqs_filter,chi2_filter,p0=pguess_g)
print(popt_g)
print(np.sqrt(np.diag(pcov_g)))
pguess_s = np.array([8.4706e-6,650,0.002e-6,600,0.002e-6])
popt_s,pcov_s = curve_fit(sum,freqs_filter,chi2_filter,p0=pguess_s)
print(popt_s)
print(np.sqrt(np.diag(pcov_s)))
"""
"""
fig,ax = plt.subplots()
def pdot_to_fdot(pdot):
return -pdot/(1/8.4712e-6)**2
def fdot_to_pdot(fdot):
return (-fdot/(8.4712e-6)**2)/1e-7
chi2 = np.array(chi2)
#secax = ax.secondary_xaxis('top',functions=(fdot_to_pdot,pdot_to_fdot))
#secax.set_xlabel('Period Derivative (1E-7 s/s)',fontsize=12)
print(np.max(chi2),freqs[chi2==np.max(chi2)])
ax.plot(freqs,chi2,'rx-')
#ax.axvline(x=-5.60e-17,lw=0.5,alpha=0.5,color='k')
#ax.axvline(x=-2.80e-17,lw=0.5,alpha=0.5,color='k')
ax.axhline(y=869.357,lw=0.5,alpha=0.5,color='b')
#plt.plot(freq_model,lorentzian(freq_model,popt_l[0],popt_l[1],popt_l[2]),'b-')
#plt.plot(freq_model,gaussian(freq_model,popt_g[0],popt_g[1],popt_g[2]),'k-')
#plt.plot(freq_model,sum(freq_model,popt_s[0],popt_s[1],popt_s[2],popt_s[3],popt_s[4]),'m-')
ax.set_xlabel('Frequency Derivative (Hz/s)',fontsize=12)
ax.set_ylabel('chi^2 [ sum( (profile-mean)^2/error^2) ]',fontsize=12)
#plt.legend(('manual chi^2','Lorentzian fit','Gaussian fit','L+G'),fontsize=12)
plt.show()
"""
def sinecurve(x,a,T,phi,c):
return a*np.sin(2*np.pi/T*x+phi) + c
##### Exploring reduced data from XMM-Newton
##### Doing sine curve fitting with the RATE data
"""
xmm_lc1 = '/Volumes/Samsung_T5/NGC300_XMMdata/0791010101/PROC/xmm_0791010101_lccorr.lc'
rebinned_t_xmm1 = fits.open(xmm_lc1)[1].data['TIME']
rebinned_rate_xmm1 = fits.open(xmm_lc1)[1].data['RATE']
rebinned_err_xmm1 = fits.open(xmm_lc1)[1].data['ERROR']
xmm_lc2 = '/Volumes/Samsung_T5/NGC300_XMMdata/0791010301/PROC/xmm_0791010301_lccorr.lc'
rebinned_t_xmm2 = fits.open(xmm_lc2)[1].data['TIME']
rebinned_rate_xmm2 = fits.open(xmm_lc2)[1].data['RATE']
rebinned_err_xmm2 = fits.open(xmm_lc2)[1].data['ERROR']
mjd_x1_xmm = fits.open(xmm_lc1)[1].header['MJDREF'] + np.array(list(rebinned_t_xmm1) + list(rebinned_t_xmm2))/86400
rebinned_t_xmm = np.array(list(rebinned_t_xmm1) + list(rebinned_t_xmm2))
rebinned_rate_xmm = np.array(list(rebinned_rate_xmm1) + list(rebinned_rate_xmm2))
rebinned_err_xmm = np.array(list(rebinned_err_xmm1) + list(rebinned_err_xmm2))
pguess = np.array([0.2,120e3,-0.5,0.2])
popt,pcov = curve_fit(sinecurve,rebinned_t_xmm,rebinned_rate_xmm,sigma=rebinned_err_xmm,absolute_sigma=True,p0=pguess)
print('amplitude: ' + str(popt[0]))
print('period: ' + str(popt[1]))
print('freq: ' + str(1/popt[1]))
print('phase shift: ' + str(popt[2]))
print('offset: ' + str(popt[3]))
print(np.sqrt(np.diag(pcov)))
plt.plot(rebinned_t_xmm,rebinned_rate_xmm,'r-')
plt.plot(rebinned_t_xmm,sinecurve(rebinned_t_xmm,*popt),'b-')
plt.xlabel('Time (s)',fontsize=12)
plt.ylabel('Rate (counts/s)',fontsize=12)
print('subset1')
subset_t = rebinned_t_xmm[(rebinned_t_xmm>=5.9845e8)&(rebinned_t_xmm<=5.98475e8)]
subset_rate = sinecurve(rebinned_t_xmm,*popt)[(rebinned_t_xmm>=5.9845e8)&(rebinned_t_xmm<=5.98475e8)]
print(np.min(subset_rate))
print(subset_t[subset_rate==np.min(subset_rate)][0])
print(50814 + subset_t[subset_rate==np.min(subset_rate)][0]/86400)
print('subset2')
subset_t = rebinned_t_xmm[rebinned_t_xmm>=5.9855e8]
subset_rate = sinecurve(rebinned_t_xmm,*popt)[rebinned_t_xmm>=5.9855e8]
print(np.min(subset_rate))
print(subset_t[subset_rate==np.min(subset_rate)][0])
print(50814 + subset_t[subset_rate==np.min(subset_rate)][0]/86400)
plt.show()
"""
"""
tbins = np.arange(times[0],times[-1]+100,100)
summed_data, bin_edges, binnumber = stats.binned_statistic(times,np.ones(len(times)),statistic='sum',bins=tbins)
t_used = tbins[:-1][summed_data>0]
counts_used = summed_data[summed_data>0]
pguess = np.array([10,120e3,5,15])
#popt,pcov = curve_fit(sinecurve,tbins[:-1],summed_data,sigma=np.sqrt(summed_data),absolute_sigma=True,p0=pguess)
popt,pcov = curve_fit(sinecurve,t_used,counts_used,sigma=np.sqrt(counts_used),absolute_sigma=True,p0=pguess,maxfev=10000)
print('amplitude: ' + str(popt[0]))
print('period: ' + str(popt[1]))
print('freq: ' + str(1/popt[1]))
print('phase shift: ' + str(popt[2]))
print('offset: ' + str(popt[3]))
print(np.sqrt(np.diag(pcov)))
plt.plot(t_used,counts_used,'r-')
plt.plot(t_used,sinecurve(t_used,*popt),'b-')
plt.xlabel('Time (s)',fontsize=12)
plt.ylabel('Counts',fontsize=12)
print('subset1')
subset_t = t_used[(t_used>=5.9845e8)&(t_used<=5.98475e8)]
subset_rate = sinecurve(t_used,*popt)[(t_used>=5.9845e8)&(t_used<=5.98475e8)]
print(np.min(subset_rate))
print(subset_t[subset_rate==np.min(subset_rate)][0])
print(50814 + subset_t[subset_rate==np.min(subset_rate)][0]/86400)
print('subset2')
subset_t = tbins[:-1][tbins[:-1]>=5.9855e8]
subset_rate = sinecurve(tbins[:-1],*popt)[tbins[:-1]>=5.9855e8]
print(np.min(subset_rate))
print(subset_t[subset_rate==np.min(subset_rate)][0])
print(50814 + subset_t[subset_rate==np.min(subset_rate)][0]/86400)
plt.show()
"""
###############################################################################
######################### Folding the XMM-Newton data #########################
###############################################################################
pb = 1/8.4712e-6
freqdot = 0
freqdotdot = 0
nbins = 20
gtis_conform = []
for i in range(len(gtis_data_xmm)):
gtis_conform.append([gtis_data_xmm[i][0],gtis_data_xmm[i][1]])
"""
nbins=20
chi2 = []
freqs = np.arange(8e-6,9e-6,0.001e-6)
#freqs = np.arange(-9e-17,-1e-18,1e-20)
for i in tqdm(range(len(freqs))):
phase_sr_expo,prof_sr_expo,err_sr_expo = fold_events(times_xmm,freqs[i],gtis=np.array(gtis_conform),ref_time=times_xmm[0],expocorr=True,nbins=nbins)
chi2_freq = Lv2_phase.get_chi2(prof_sr_expo,err_sr_expo)
chi2.append( chi2_freq )
fig,ax = plt.subplots()
chi2 = np.array(chi2)
#secax = ax.secondary_xaxis('top',functions=(fdot_to_pdot,pdot_to_fdot))
#secax.set_xlabel('Period Derivative (1E-7 s/s)',fontsize=12)
#print(np.max(chi2),freqs[chi2==np.max(chi2)])
ax.plot(freqs,chi2,'rx-')
ax.set_xlabel('Frequency (Hz)',fontsize=12)
ax.set_ylabel('chi^2 [ sum( (profile-mean)^2/error^2) ]',fontsize=12)
plt.show()
"""
##### Using Lv2_phase
plt.figure()
phase,profile,profile_error = Lv2_phase.pulse_folding(times_xmm,T_xmm,T0_MJD_xmm,1/pb,freqdot,freqdotdot,nbins,"XMM")
plt.errorbar(x=phase[:-1],y=profile,yerr=profile_error,color='r',drawstyle='steps-mid')
expos = Lv2_phase.phase_exposure(times_xmm[0]-times_xmm[0],times_xmm[-1]-times_xmm[0],pb,nbin=nbins,gtis=np.array(gtis_conform)-times_xmm[0])
total_expos = np.array(list(expos) + list(expos))
plt.errorbar(x=phase[:-1],y=profile/total_expos,yerr=profile_error/total_expos,color='b',drawstyle='steps-mid')
plt.title(str(pathlib.Path(eventfile_xmm).name) +', exposure-corrected (using Lv2_phase)',fontsize=12)
plt.xlabel('Phase',fontsize=12)
plt.ylabel('Counts/s',fontsize=12)
plt.legend(('Folded profile','Exposure-corrected profile'),loc='best',fontsize=12)
##### Using stingray.pulse.pulsar's fold_events
phase_sr,prof_sr,err_sr = fold_events(times_xmm,1/pb,freqdot,freqdotdot,gtis=np.array(gtis_conform),ref_time=times_xmm[0]-phaseoff*pb,nbin=nbins)
phase_sr_expo,prof_sr_expo,err_sr_expo = fold_events(times_xmm,1/pb,freqdot,freqdotdot,gtis=np.array(gtis_conform),ref_time=times_xmm[0]-phaseoff*pb,expocorr=True,nbin=nbins)
total_phase_sr = np.array(list(phase_sr) + list(phase_sr+1))
total_prof_sr = np.array(list(prof_sr)*2)
total_err_sr = np.array(list(err_sr)*2)
total_phase_sr_expo = np.array(list(phase_sr_expo) + list(phase_sr_expo+1))
total_prof_sr_expo = np.array(list(prof_sr_expo)*2)
total_err_sr_expo = np.array(list(err_sr_expo)*2)
plt.figure()
plt.errorbar(x=total_phase_sr,y=total_prof_sr/T_xmm,yerr=total_err_sr/T_xmm,color='r',drawstyle='steps-mid')
plt.errorbar(x=total_phase_sr_expo,y=total_prof_sr_expo/T_xmm,yerr=total_err_sr_expo/T_xmm,color='b',drawstyle='steps-mid')
plt.legend(('Folded profile','Exposure-corrected'),loc='best',fontsize=12)
plt.title(str(pathlib.Path(eventfile_xmm).name) +', exposure-corrected (using Stingray fold_events)',fontsize=12)
plt.xlabel('Phase',fontsize=12)
plt.ylabel('Counts/s',fontsize=12)
for i in range(len(total_phase_sr_expo)):
print(total_phase_sr_expo[i],total_prof_sr_expo[i]/T_xmm,total_err_sr_expo[i]/T_xmm)
def step_n_ramp(phase,prof,prof_err,start_phase,end_phase,pguess):
"""
Fitting 6-model step-and-ramp parameters to the folded profile
"""
phase_model = np.linspace(start_phase,end_phase,101)
x = phase[(phase>=start_phase)&(phase<=end_phase)]
y = prof[(phase>=start_phase)&(phase<=end_phase)]
y_err = prof_err[(phase>=start_phase)&(phase<=end_phase)]
def piecewise_linear(x,b1,b2,b3,b4,top,bottom):
return np.piecewise(x, [(x>=start_phase)&(x<=b1), (x>b1)&(x<=b2), (x>b2)&(x<=b3), (x>b3)&(x<=b4), (x>b4)&(x<=end_phase)], [lambda x:top, lambda x:((bottom-top)/(b2-b1)*x+bottom-(bottom-top)/(b2-b1)*b2), lambda x:bottom, lambda x:((top-bottom)/(b4-b3)*x+top-(top-bottom)/(b4-b3)*b4), lambda x:top])
plt.figure()
popt,pcov = curve_fit(piecewise_linear,x,y,p0=pguess,sigma=y_err,absolute_sigma=True)
pars = popt
pars_err = np.diag(np.sqrt(pcov))
print('Top: ' + str(pars[4]) + ' +- ' + str(pars_err[4]))
print('Bottom: ' + str(pars[5]) + ' +- ' + str(pars_err[5]))
print('Vertex 1: ' + str(pars[0]) + ' +- ' + str(pars_err[0]))
print('Vertex 2: ' + str(pars[1]) + ' +- ' + str(pars_err[1]))
print('Vertex 3: ' + str(pars[2]) + ' +- ' + str(pars_err[2]))
print('Vertex 4: ' + str(pars[3]) + ' +- ' + str(pars_err[3]))
plt.plot(phase_model,piecewise_linear(phase_model,*popt),'k-')
##### Plotting the folded profiles themselves
plt.errorbar(x=phase,y=prof,yerr=prof_err,color='r',drawstyle='steps-mid')
plt.title('Exposure-corrected (profiles from Stingray)',fontsize=12)
plt.xlabel('Phase',fontsize=12)
plt.ylabel('Counts/s',fontsize=12)
plt.legend(('Piecewise fit','Exposure-corrected profile'),loc='best',fontsize=12)
#step_n_ramp(total_phase_sr_expo,total_prof_sr_expo/T_xmm,total_err_sr_expo/T_xmm,0.225,1.775,np.array([0.65,0.75,1,1.25,0.016,0.0035]))
plt.show()
###############################################################################
######################## Combining Swift and XMM-Newton #######################
###############################################################################
"""
#pb = 117403.24413
#pb = 1/8.47145464e-6
pb = 1/8.4712e-6
freqdot = 0
freqdotdot = 0
nbins = 20
MJDREFI = fits.open(eventfile)[1].header['MJDREFI'] #Swift
MJDREFF = fits.open(eventfile)[1].header['MJDREFF'] #Swift
MJDREF = fits.open(eventfile_xmm)[1].header['MJDREF'] #XMM-Newton
diff_swiftxmm = (MJDREFI+MJDREFF-MJDREF)*86400
gtis_conform = []
for i in range(len(gtis_data_xmm)): #for each GTI in the XMM data
gtis_conform.append([gtis_data_xmm[i][0],gtis_data_xmm[i][1]])
for i in range(len(gtis_data)): #for each GTI in the Swift data
gtis_conform.append([gtis_data[i][0]+diff_swiftxmm,gtis_data[i][1]+diff_swiftxmm])
times_all = np.array(list(times_xmm) + list(diff_swiftxmm + times))
T_all = T + T_xmm
T0_MJD_all = T0_MJD_xmm
"""
"""
##### chi^2 exploration
chi2 = []
freqs = np.arange(8.4e-6,8.500000e-6,0.01e-6)
for i in tqdm(range(len(freqs))):
phase_sr_expo,prof_sr_expo,err_sr_expo = fold_events(times_all,freqs[i],gtis=np.array(gtis_conform),ref_time=times_all[0],expocorr=True,nbins=nbins)
chi2_freq = Lv2_phase.get_chi2(prof_sr_expo,err_sr_expo)
chi2.append( chi2_freq )
plt.figure()
plt.plot(freqs/1e-6,chi2,'rx-')
plt.axvline(x=8.4712,lw=0.5,alpha=0.5)
plt.xlabel('Frequency (microHz)',fontsize=12)
plt.ylabel('chi^2 [ sum( (profile-mean)^2/error^2) ]',fontsize=12)
plt.legend(('chi^2 exploration','8.4712E-6 Hz, freq. from Swift'),loc='best')
plt.show()
"""
"""
##### Using Lv2_phase
plt.figure()
phase,profile,profile_error = Lv2_phase.pulse_folding(times_all,T_all,T0_MJD_all,1/pb,freqdot,freqdotdot,nbins,"XMM")
plt.errorbar(x=phase[:-1],y=profile,yerr=profile_error,color='r',drawstyle='steps-mid')
expos = Lv2_phase.phase_exposure(times_all[0]-times_all[0],times_all[-1]-times_all[0],pb,nbin=nbins,gtis=np.array(gtis_conform)-times_all[0])
total_expos = np.array(list(expos) + list(expos))
plt.errorbar(x=phase[:-1],y=profile/total_expos,yerr=profile_error/total_expos,color='b',drawstyle='steps-mid')
plt.title('XMM + Swift, exposure-corrected (using Lv2_phase)',fontsize=12)
plt.xlabel('Phase',fontsize=12)
plt.ylabel('Counts/s',fontsize=12)
plt.legend(('Folded profile','Exposure-corrected profile'),loc='best',fontsize=12)
##### Using stingray.pulse.pulsar's fold_events
phase_sr,prof_sr,err_sr = fold_events(times_all,1/pb,freqdot,freqdotdot,gtis=np.array(gtis_conform),ref_time=times_all[0],nbin=nbins)
phase_sr_expo,prof_sr_expo,err_sr_expo = fold_events(times_all,1/pb,freqdot,freqdotdot,gtis=np.array(gtis_conform),ref_time=times_all[0],expocorr=True,nbin=nbins)
total_phase_sr = list(phase_sr) + list(phase_sr+1)
total_prof_sr = list(prof_sr)*2
total_err_sr = list(err_sr)*2
total_phase_sr_expo = list(phase_sr_expo) + list(phase_sr_expo+1)
total_prof_sr_expo = list(prof_sr_expo)*2
total_err_sr_expo = list(err_sr_expo)*2
plt.figure()
plt.errorbar(x=total_phase_sr,y=total_prof_sr/T_all,yerr=total_err_sr/T_all,color='r',drawstyle='steps-mid')
plt.errorbar(x=total_phase_sr_expo,y=total_prof_sr_expo/T_all,yerr=total_err_sr_expo/T_all,color='b',drawstyle='steps-mid')
plt.legend(('Folded profile','Exposure-corrected'),loc='best',fontsize=12)
plt.title('XMM + Swift, exposure-corrected (using Stingray fold_events)',fontsize=12)
plt.xlabel('Phase',fontsize=12)
plt.ylabel('Counts/s',fontsize=12)
plt.show()
"""
fig,ax = plt.subplots()
def pdot_to_fdot(pdot):
return -pdot/(1/8.4712e-6)**2
def fdot_to_pdot(fdot):
return (-fdot/(8.4712e-6)**2)/1e-7
##### Independent sums of chi^2 (Deepto's suggestion)
nbins = 20
"""
chi2 = []
chi2_swift_all = []
chi2_xmm_all = []
#freqs = np.arange(1.0/(40.0*3600.0),1.0/(20.0*3600.0),1e-11)
freqs = np.arange(8.45e-6,8.50e-6,1e-10)
freqdots = np.arange(1e-18,9e-17,1e-20)
chi2_all_write = open('/Volumes/Samsung_T5/NGC300_XMMdata/placeholder.txt','w')
chi2_swift_write = open('/Volumes/Samsung_T5/NGC300_XMMdata/placeholder2','w')
chi2_xmm_write = open('/Volumes/Samsung_T5/NGC300_XMMdata/placeholder3','w')
for i in tqdm(range(len(freqdots))):
for j in tqdm(range(len(freqdots))):
## Swift
phase_sr_expo,prof_sr_expo,err_sr_expo = fold_events(times,freqs[i],freqdots[j],gtis=np.array(gtis_conform),ref_time=times[0],expocorr=True,nbins=nbins)
chi2_swift = Lv2_phase.get_chi2(prof_sr_expo,err_sr_expo)
chi2_swift_all.append(chi2_swift)
chi2_swift_write.write(str(freqs[i]) + ' ' + str(freqdots[j]) + ' ' + str(chi2_swift) + '\n')
## XMM-Newton
phase_sr_expo_xmm,prof_sr_expo_xmm,err_sr_expo_xmm = fold_events(times_xmm,freqs[i],freqdots[j],gtis=np.array(gtis_conform_xmm),ref_time=times_xmm[0],expocorr=True,nbins=nbins)
chi2_xmm = Lv2_phase.get_chi2(prof_sr_expo_xmm,err_sr_expo_xmm)
chi2_xmm_all.append(chi2_xmm)
chi2_xmm_write.write(str(freqs[i]) + ' ' + str(freqdots[j]) + ' ' + str(chi2_xmm) + '\n')
chi2.append( chi2_swift + chi2_xmm )
chi2_all_write.write(str(freqs[i]) + ' ' + str(freqdots[j]) + ' ' + str(chi2_swift + chi2_xmm) + '\n')
chi2_all_write.close()
chi2_swift_write.close()
chi2_xmm_write.close()
secax = ax.secondary_xaxis('top',functions=(fdot_to_pdot,pdot_to_fdot))
secax.set_xlabel('Period Derivative (1E-7 s/s)',fontsize=12)
ax.plot(freqdots,chi2,'kx-')
ax.plot(freqdots,chi2_swift_all,'rx-',lw=0.5,alpha=0.5)
ax.plot(freqdots,chi2_xmm_all,'bx-',lw=0.5,alpha=0.5)
#plt.yscale('log')
ax.legend(('Swift+XMM','Swift','XMM'),fontsize=12)
ax.set_xlabel('Frequency Derivative (Hz/s)',fontsize=12)
ax.set_ylabel('chi^2 [ sum( (profile-mean)^2/error^2) ]',fontsize=12)
mplcursors.cursor(hover=True)
ax.axvline(x=5.60e-17,lw=0.5,alpha=0.5,color='k')
ax.axvline(x=2.80e-17,lw=0.5,alpha=0.5,color='k')
#ax.axhline(y=869.357,lw=0.5,alpha=0.5,color='b') for 1e-11 Hz spacing
#ax.axhline(y=11830.79495183693,lw=0.5,alpha=0.5,color='b') for 1e-11 spacing
ax.axhline(y=734.51,lw=0.5,alpha=0.5,color='b')
ax.axhline(y=11689.2,lw=0.5,alpha=0.5,color='b')
plt.show()
"""
"""
##### Plotting results from the chi^2 exploration
## secondary axis reference: https://matplotlib.org/3.1.0/gallery/subplots_axes_and_figures/secondary_axis.html
freqs,chi2_all = np.genfromtxt('/Volumes/Samsung_T5/NGC300_XMMdata/chi2_all_dec16-may19.txt',usecols=(0,1),unpack=True)
freqs,chi2_swift = np.genfromtxt('/Volumes/Samsung_T5/NGC300_XMMdata/chi2_swift_dec16-may19.txt',usecols=(0,1),unpack=True)
freqs,chi2_xmm = np.genfromtxt('/Volumes/Samsung_T5/NGC300_XMMdata/chi2_xmm_dec16-may19.txt',usecols=(0,1),unpack=True)
fig,ax = plt.subplots()
ax.plot(freqs/1e-6,chi2_all,'kx-')
ax.plot(freqs/1e-6,chi2_swift,'rx-',lw=0.5,alpha=0.5)
ax.plot(freqs/1e-6,chi2_xmm,'bx-',lw=0.5,alpha=0.5)
def time_to_freq(x):
return (1/x)
def freq_to_time(x):
return (1/x)/3600*1e6
secax = ax.secondary_xaxis('top',functions=(freq_to_time,time_to_freq))
secax.set_xlabel('Time (h)',fontsize=12)
freqs_fit = freqs[(freqs>=8.46e-6)&(freqs<=8.48e-6)]
chi_fit = np.array(chi2_all)[(freqs>=8.46e-6)&(freqs<=8.48e-6)]
#pguess = np.array([8.4712,400,0.02,200]) #for Swift
pguess = np.array([8.4712,500,0.02,11100]) #for all
#popt,pcov = curve_fit(lorentzian,freqs_fit/1e-6,chi_fit,p0=pguess)
#print('Lorentzian')
#print(popt)
#print(np.sqrt(np.diag(pcov)))
#plt.plot(freqs_fit/1e-6,lorentzian(freqs_fit/1e-6,*popt),'r-')
#popt,pcov = curve_fit(gaussian,freqs_fit/1e-6,chi_fit,p0=pguess)
#print('Gaussian')
#print(popt)
#print(np.sqrt(np.diag(pcov)))
#plt.plot(freqs_fit/1e-6,gaussian(freqs_fit/1e-6,*popt),'b-')
#plt.legend(('All data','Swift data','XMM data','Lorentzian fit','Gaussian fit'),loc='best')
ax.set_xlabel('Frequency (microHz)',fontsize=12)
ax.set_ylabel('chi^2 [ sum( (profile-mean)^2/error^2) ]',fontsize=12)
plt.show()
"""
##### Doing contour plots for the 2D P-Pdot exploration
### do a PDOT version too??
def summarize_2d(space,chi2_type,posneg):
"""
Summarizing the information from the 2D chi^2 exploration involving frequency and
the frequency derivative
space - whether in frequency space or period space
chi2_type - 'XMM', 'Swift', or 'all'
posneg - positive fdot or negative fdot
"""
plt.figure()
if chi2_type == 'XMM':
plt.title('XMM')
if posneg == 'pos':
freq,freqdot,chi2 = np.genfromtxt('/Volumes/Samsung_T5/NGC300_XMMdata/chi2_xmm_fine_ffdot.txt',usecols=(0,1,2),unpack=True)
if space == 'frequency':
plt.axhline(y=5.6e-17,color='w',lw=1,alpha=0.5)
plt.axhline(y=2.8e-17,color='w',lw=1,alpha=0.5)
elif space == 'period':
plt.axhline(y=-3.9,color='w',lw=1,alpha=0.5)
plt.axhline(y=-7.8,color='w',lw=1,alpha=0.5)
elif posneg == 'neg':
freq,freqdot,chi2 = np.genfromtxt('/Volumes/Samsung_T5/NGC300_XMMdata/chi2_xmm_fine_ffdotneg.txt',usecols=(0,1,2),unpack=True)
if space == 'frequency':
plt.axhline(y=-5.6e-17,color='w',lw=1,alpha=0.5)
plt.axhline(y=-2.8e-17,color='w',lw=1,alpha=0.5)
elif space == 'period':
plt.axhline(y=3.9,color='w',lw=1,alpha=0.5)
plt.axhline(y=7.8,color='w',lw=1,alpha=0.5)
elif chi2_type == 'Swift':
plt.title('Swift')
if posneg == 'pos':
freq,freqdot,chi2 = | np.genfromtxt('/Volumes/Samsung_T5/NGC300_XMMdata/chi2_swift_fine_ffdot.txt',usecols=(0,1,2),unpack=True) | numpy.genfromtxt |
'''
model: cpm
task: predict behavioral score (for each clip)
data: all clips used together
behavioral measures: see notebook
'''
import numpy as np
import pandas as pd
import pickle
import os
import argparse
import time
'''
ml
'''
from sklearn.model_selection import KFold
import torch
import torch.nn as nn
from cpm import cpm
model_type = 'cpm'
'''
Helpers
'''
from utils import _info
from rb_utils import static_score
from dataloader import _bhv_class_df as _bhv_reg_df
from dataloader import _get_bhv_cpm_seq as _get_seq
# results directory
RES_DIR = 'results/bhv_{}'.format(model_type)
if not os.path.exists(RES_DIR):
os.makedirs(RES_DIR)
K_SEED = 330
'''
SCORES:
'mse': mean squared error
'p': pearson correlation
's': spearman correlation
'''
SCORES = ['mse', 'p', 's']
def _train(df, bhv_df, args):
# get X-y from df
features = [ii for ii in df.columns if 'feat' in ii]
k_feat = len(features)
print('number of features = %d' %(k_feat))
k_clip = len(np.unique(df['c']))
print('number of clips = %d' %(k_clip))
subject_list = bhv_df['Subject'].unique()
train_list = subject_list[:args.train_size]
test_list = subject_list[args.train_size:]
# init dict for all results
results = {}
# true and predicted scores and clip label
results['y'] = {}
results['y_hat'] = {}
results['c'] = {}
for score in SCORES:
# mean scores across time
results['train_%s'%score] = np.zeros(args.k_fold)
results['val_%s'%score] = np.zeros(args.k_fold)
# per clip score
results['c_train_%s'%score] = {}
results['c_val_%s'%score] = {}
for ii in range(k_clip):
results['c_train_%s'%score][ii] = np.zeros(args.k_fold)
results['c_val_%s'%score][ii] = np.zeros(args.k_fold)
kf = KFold(n_splits=args.k_fold, random_state=K_SEED)
# get participant lists for each assigned class
class_list = {}
for ii in range(args.k_class):
class_list[ii] = bhv_df[
(bhv_df['Subject'].isin(train_list)) &
(bhv_df['y']==ii)]['Subject'].values
print('No. of participants in class {} = {}'.format(
ii, len(class_list[ii])))
'''
split participants in each class with kf
nearly identical ratio of train and val,
in all classes
'''
split = {}
for ii in range(args.k_class):
split[ii] = kf.split(class_list[ii])
for i_fold in range(args.k_fold):
_info('fold: %d/%d' %(i_fold+1, args.k_fold))
# ***between-subject train-val split
train_subs, val_subs = [], []
for ii in range(args.k_class):
train, val = next(split[ii])
for jj in train:
train_subs.append(class_list[ii][jj])
for jj in val:
val_subs.append(class_list[ii][jj])
'''
model main
'''
model = cpm(corr_thresh=args.corr_thresh)
X_train, y_train, c_train = _get_seq(
df, train_subs, args)
X_val, y_val, c_val = _get_seq(
df, val_subs, args)
# train model
_, _ = model.fit(X_train, y_train)
'''
results on train data
'''
s, s_c, _ = static_score(
model, X_train, y_train, c_train,
model_type = model_type)
for score in SCORES:
results['train_%s'%score][i_fold] = s[score]
for ii in range(k_clip):
results['c_train_%s'%score][ii][i_fold] = s_c[ii][score]
print('train p = %0.3f' %s['p'])
'''
results on val data
'''
s, s_c, y_hat = static_score(
model, X_val, y_val, c_val,
model_type = model_type)
for score in SCORES:
results['val_%s'%score][i_fold] = s[score]
for ii in range(k_clip):
results['c_val_%s'%score][ii][i_fold] = s_c[ii][score]
print('val p = %0.3f' %s['p'])
results['y'][i_fold] = y_val
results['y_hat'][i_fold] = y_hat
results['c'][i_fold] = c_val
return results
def _test(df, bhv_df, args):
_info('test mode')
# get X-y from df
features = [ii for ii in df.columns if 'feat' in ii]
k_feat = len(features)
print('number of features = %d' %(k_feat))
k_clip = len( | np.unique(df['c']) | numpy.unique |
import pathlib
from dataclasses import dataclass
from typing import Dict, Tuple
import napari
import numpy as np
import torch
from magicgui.widgets import Combobox, Slider
from magicgui.widgets import FloatSlider, Container, Label
from napari.layers import Shapes
from skimage.io import imread
from torchvision.models.detection.transform import GeneralizedRCNNTransform
from torchvision.ops import box_convert
from torchvision.ops import nms
from anchor_generator import get_anchor_boxes
from datasets import ObjectDetectionDataSet
from datasets import ObjectDetectionDatasetSingle
from transformations import re_normalize
from utils import color_mapping_func
from utils import enable_gui_qt
from utils import read_json, save_json
def make_bbox_napari(bbox, reverse=False):
"""
Obtener las coordenadas de las cuatro esquinas de una caja delimitadora,
se espera que sea en formato 'xyxy'.
El resultado puede ser puesto directamente en las capas de formas de napari.
Orden: arriba-izquierda, abajo-izquierda, abajo-derecha, arriba-derecha
estilo numpy ---> [y, x]
"""
if reverse:
x = bbox[:, 1]
y = bbox[:, 0]
x1 = x.min()
y1 = y.min()
x2 = x.max()
y2 = y.max()
return np.array([x1, y1, x2, y2])
else:
x1 = bbox[0]
y1 = bbox[1]
x2 = bbox[2]
y2 = bbox[3]
bbox_rect = np.array([[y1, x1], [y2, x1], [y2, x2], [y1, x2]])
return bbox_rect
def get_center_bounding_box(boxes: torch.tensor):
"""Regresa los puntos centrales de una caja delimitadora dada."""
return box_convert(boxes, in_fmt="xyxy", out_fmt="cxcywh")[:, :2]
class ViewerBase:
def napari(self):
# IPython magic para napari < 0.4.8
enable_gui_qt()
# napari
if self.viewer:
try:
del self.viewer
except AttributeError:
pass
self.index = 0
# Iniciar una instancia napari
self.viewer = napari.Viewer()
# Mostrar la muestra actual
self.show_sample()
# Comandos de teclado
# Presionar 'n' para pasar a la siguiente muestra
@self.viewer.bind_key("n")
def next(viewer):
self.increase_index() # Incrementar el índice
self.show_sample() # Mostrar la siguiente muestra
# Presionar 'b' para regresar a la muestra anterior
@self.viewer.bind_key("b")
def prev(viewer):
self.decrease_index() # Decrementar el ínidce
self.show_sample() # Mostrar la siguiente muestra
def increase_index(self):
self.index += 1
if self.index >= len(self.dataset):
self.index = 0
def decrease_index(self):
self.index -= 1
if self.index < 0:
self.index = len(self.dataset) - 1
def show_sample(self):
"""Método de sobrescritura"""
pass
def create_image_layer(self, x, x_name):
return self.viewer.add_image(x, name=str(x_name))
def update_image_layer(self, image_layer, x, x_name):
"""Reemplazar la información y el nombre de una image_layer dada"""
image_layer.data = x
image_layer.name = str(x_name)
def get_all_shape_layers(self):
return [layer for layer in self.viewer.layers if isinstance(layer, Shapes)]
def remove_all_shape_layers(self):
all_shape_layers = self.get_all_shape_layers()
for shape_layer in all_shape_layers:
self.remove_layer(shape_layer)
def remove_layer(self, layer):
self.viewer.layers.remove(layer)
class DatasetViewer(ViewerBase):
def __init__(
self,
dataset: ObjectDetectionDataSet,
color_mapping: Dict,
rccn_transform: GeneralizedRCNNTransform = None,
):
self.dataset = dataset
self.index = 0
self.color_mapping = color_mapping
# Visor de instancia napari
self.viewer = None
# RCNN_transformer
self.rccn_transform = rccn_transform
# imagen y capa de forma actual
self.image_layer = None
self.shape_layer = None
def show_sample(self):
# Obtener una muestra del dataset
sample = self.get_sample_dataset(self.index)
# RCNN-transformer
if self.rccn_transform is not None:
sample = self.rcnn_transformer(sample, self.rccn_transform)
# Transformar una muestra a numpy, CPU y el formato correcto a visualizar
x, x_name = self.transform_x(sample)
y, y_name = self.transform_y(sample)
# Crear una capa de imagen
if self.image_layer not in self.viewer.layers:
self.image_layer = self.create_image_layer(x, x_name)
else:
self.update_image_layer(self.image_layer, x, x_name)
# Crear una capa de forma
if self.shape_layer not in self.viewer.layers:
self.shape_layer = self.create_shape_layer(y, y_name)
else:
self.update_shape_layer(self.shape_layer, y, y_name)
# Reiniciar vista
self.viewer.reset_view()
# self.viewer.layers.select_previous() # enfocar en una capa de entrada
# self.viewer.status = f'index: {self.index}, x_name: {x_name}, y_name: {y_name}'
def get_sample_dataset(self, index):
return self.dataset[index]
def transform_x(self, sample):
# desempaquetar diccionario
x, x_name = sample["x"], sample["x_name"]
# Asegurarse de que es numpy.ndarray en el CPU
x = x.cpu().numpy()
# De [C, H, W] a [H, W, C] - solo para imágenes en RGB.
# if self.check_if_rgb(x):
# x = np.moveaxis(x, source=0, destination=-1)
if len(x.shape) == 2:
x = x.T
x = x[np.newaxis,...]
# x = x.T # Para parasrlas de [W,H] a [H,W]
# x = x[..., np.newaxis] # Añadido para imagenes de un canal
# print(len(x.shape))
# Re-normalizar
x = re_normalize(x)
return x, x_name
def transform_y(self, sample):
# Desempaquetar diccionario
y, y_name = sample["y"], sample["y_name"]
# Asegurarse de que es numpy.ndarray en el CPU
y = {key: value.cpu().numpy() for key, value in y.items()}
return y, y_name
def get_boxes(self, y):
boxes = y["boxes"]
# Transformar cajas delimitadoras para hacerlas compatibles con napari
boxes_napari = [make_bbox_napari(box) for box in boxes]
return boxes_napari
def get_labels(self, y):
return y["labels"]
def get_colors(self, y):
return color_mapping_func(y["labels"], self.color_mapping)
def get_scores(self, y):
return y["scores"]
def get_text_parameters(self):
return {
"text": "{labels}",
"size": 10,
"color": "white",
"anchor": "upper_left",
"translation": [-1, 0],
}
def create_shape_layer(self, y, y_name):
boxes = self.get_boxes(y)
labels = self.get_labels(y)
colors = self.get_colors(y)
# Añadir propiedades a la capa de forma
# Esto se requiere para obtener el txto correcto para el TextManager
# El TextManager muestra el texto en la parte superior de la caja delimitadora
# en este caso es la etiqueta atribuida acada caja delimitadora
text_parameters = self.get_text_parameters() # diccionario
properties = {"labels": labels}
if "scores" in y.keys():
scores = self.get_scores(y)
text_parameters["text"] = "label: {labels}\nscore: {scores:.2f}"
properties["scores"] = scores
# Añadir una capa de forma
shape_layer = self.viewer.add_shapes(
data=boxes,
face_color="transparent",
edge_color="red",
edge_width=2,
properties=properties,
name=y_name,
text=text_parameters,
)
# Convertir la capa en no-editable
shape_layer.editable = False
# Guardar información como metadatos
self.save_to_metadata(shape_layer, "boxes", boxes)
self.save_to_metadata(shape_layer, "labels", labels)
self.save_to_metadata(shape_layer, "colors", colors)
# Añadir puntajes
if "scores" in y.keys():
self.save_to_metadata(shape_layer, "scores", scores)
# Actualizar Color.
self.set_colors_of_shapes(shape_layer, self.color_mapping)
return shape_layer
def update_shape_layer(self, shape_layer, y, y_name):
"""Remove all shapes and replace the data and the properties"""
# Eliminar todas las foras de una capa
self.select_all_shapes(shape_layer)
shape_layer.remove_selected()
boxes = self.get_boxes(y)
labels = self.get_labels(y)
colors = self.get_colors(y)
if "scores" in y.keys():
scores = self.get_scores(y)
# Configurar las propiedades actuales
shape_layer.current_properties["labels"] = labels
if "scores" in y.keys():
shape_layer.current_properties["scores"] = scores
# Añadir formas a la capa
shape_layer.add(boxes)
# Configurar las propuedades de dorma correcta
shape_layer.properties["labels"] = labels
if "scores" in y.keys():
shape_layer.properties["scores"] = scores
# Anular la información en los metadatos
self.reset_metadata(shape_layer)
self.save_to_metadata(shape_layer, "boxes", boxes)
self.save_to_metadata(shape_layer, "labels", labels)
self.save_to_metadata(shape_layer, "colors", colors)
# Añadir puntajes
if "scores" in y.keys():
self.save_to_metadata(shape_layer, "scores", scores)
# Actualizar color
self.set_colors_of_shapes(shape_layer, self.color_mapping)
# Cambiar el nombre
shape_layer.name = y_name
def save_to_metadata(self, shape_layer, key, value):
shape_layer.metadata[key] = value
def reset_metadata(self, shape_layer):
shape_layer.metadata = {}
def check_if_rgb(self, x):
"""Verificar si la primer dimensión de la imagen es el número de canales, y es 3"""
# TODO: Las imágenes RGBA tienen 4 canles -> se genera Error
if x.shape[0] == 3:
return True
else:
raise AssertionError(
f"The channel dimension is supposed to be 3 for RGB images. This image has a channel dimension of size {x.shape[0]}"
)
def get_unique_labels(self, shapes_layer):
return set(shapes_layer.metadata["labels"])
def select_all_shapes(self, shape_layer):
"""Seleciona todas las formas dentro de una instancia shape_layer."""
shape_layer.selected_data = set(range(shape_layer.nshapes))
def select_all_shapes_label(self, shape_layer, label):
"""Selecciona todas las formas de una determinada etiqueta"""
if label not in self.get_unique_labels(shape_layer):
raise ValueError(
f"Label {label} does not exist. Available labels are {self.get_unique_labels(shape_layer)}!"
)
indices = set(self.get_indices_of_shapes(shape_layer, label))
shape_layer.selected_data = indices
def get_indices_of_shapes(self, shapes_layer, label):
return list(np.argwhere(shapes_layer.properties["labels"] == label).flatten())
def set_colors_of_shapes(self, shape_layer, color_mapping):
""" Itera sobre etiquetas únicas y asigna un color conforme a el color_mapping."""
for label in self.get_unique_labels(shape_layer): # get unique labels
color = color_mapping[label] # get color from mapping
self.set_color_of_shapes(shape_layer, label, color)
def set_color_of_shapes(self, shapes_layer, label, color):
"""Asigna un oclor a cada forma de una determinada etiqueta"""
self.select_all_shapes_label(
shapes_layer, label
) # Seleccionar únicamente las formas correctas
shapes_layer.current_edge_color = (
color # Cambiar el color de las formas formas seleccionadas
)
def gui_text_properties(self, shape_layer):
container = self.create_gui_text_properties(shape_layer)
self.viewer.window.add_dock_widget(
container, name="text_properties", area="right"
)
def gui_score_slider(self, shape_layer):
if "nms_slider" in self.viewer.window._dock_widgets.keys():
self.remove_gui("nms_slider")
self.shape_layer.events.name.disconnect(
callback=self.shape_layer.events.name.callbacks[0]
)
container = self.create_gui_score_slider(shape_layer)
self.slider = container
self.viewer.window.add_dock_widget(container, name="score_slider", area="right")
def gui_nms_slider(self, shape_layer):
if "score_slider" in self.viewer.window._dock_widgets.keys():
self.remove_gui("score_slider")
self.shape_layer.events.name.disconnect(
callback=self.shape_layer.events.name.callbacks[0]
)
container = self.create_gui_nms_slider(shape_layer)
self.slider = container
self.viewer.window.add_dock_widget(container, name="nms_slider", area="right")
def remove_gui(self, name):
widget = self.viewer.window._dock_widgets[name]
self.viewer.window.remove_dock_widget(widget)
def create_gui_text_properties(self, shape_layer):
TextColor = Combobox(
choices=shape_layer._colors, name="text color", value="white"
)
TextSize = Slider(min=1, max=50, name="text size", value=1)
container = Container(widgets=[TextColor, TextSize])
def change_text_color(event):
# Esto cambia el color del texto
shape_layer.text.color = str(TextColor.value)
def change_text_size(event):
# Esto cambia el tamaño del texto
shape_layer.text.size = int(TextSize.value)
TextColor.changed.connect(change_text_color)
TextSize.changed.connect(change_text_size)
return container
def create_gui_score_slider(self, shape_layer):
slider = FloatSlider(min=0.0, max=1.0, step=0.01, name="Score", value=0.0)
slider_label = Label(name="Score_threshold", value=0.0)
container = Container(widgets=[slider, slider_label])
def change_boxes(event, shape_layer=shape_layer):
# Eliminar todas las formas
self.select_all_shapes(shape_layer)
shape_layer.remove_selected()
# Crear la mascara y nueva información
mask = np.where(shape_layer.metadata["scores"] > slider.value)
new_boxes = np.asarray(shape_layer.metadata["boxes"])[mask]
new_labels = shape_layer.metadata["labels"][mask]
new_scores = shape_layer.metadata["scores"][mask]
# Configurar las propiedades actuales
shape_layer.current_properties["labels"] = new_labels
shape_layer.current_properties["scores"] = new_scores
# Añadir formas a una capa
if new_boxes.size > 0:
shape_layer.add(list(new_boxes))
# Configurar las propiedades
shape_layer.properties["labels"] = new_labels
shape_layer.properties["scores"] = new_scores
# Cambiar la etiqueta
slider_label.value = str(slider.value)
slider.changed.connect(change_boxes)
# Invocar puntaje
container.Score.value = 0.0
# Evento que se activa cuando el nombre de la capa es cambiado
self.shape_layer.events.name.connect(change_boxes)
return container
def create_gui_nms_slider(self, shape_layer):
slider = FloatSlider(min=0.0, max=1.0, step=0.01, name="NMS", value=0.0)
slider_label = Label(name="IoU_threshold")
container = Container(widgets=[slider, slider_label])
def change_boxes(event, shape_layer=shape_layer):
# Remover todas las formas de unas capas
self.select_all_shapes(shape_layer)
shape_layer.remove_selected()
# Crear una mascara y nueva información
boxes = torch.tensor(
[
make_bbox_napari(box, reverse=True)
for box in shape_layer.metadata["boxes"]
]
)
scores = torch.tensor(shape_layer.metadata["scores"])
if boxes.size()[0] > 0:
mask = nms(boxes, scores, slider.value) # torch.tensor
mask = (np.array(mask),)
new_boxes = np.asarray(shape_layer.metadata["boxes"])[mask]
new_labels = shape_layer.metadata["labels"][mask]
new_scores = shape_layer.metadata["scores"][mask]
# Configurar las propiedades
shape_layer.current_properties["labels"] = new_labels
shape_layer.current_properties["scores"] = new_scores
# Añadir formas a una capa
if new_boxes.size > 0:
shape_layer.add(list(new_boxes))
# Configurar las propiedas
shape_layer.properties["labels"] = new_labels
shape_layer.properties["scores"] = new_scores
# Configurar información temporal
shape_layer.metadata["boxes_nms"] = list(new_boxes)
shape_layer.metadata["labels_nms"] = new_labels
shape_layer.metadata["scores_nms"] = new_scores
# Cambiar etiqueta
slider_label.value = str(slider.value)
slider.changed.connect(change_boxes)
# Invocar NMS
container.NMS.value = 1.0
# Evento lanzado cuando el nombre de las capa de formas cambia
self.shape_layer.events.name.connect(change_boxes)
return container
def rcnn_transformer(self, sample, transform):
# Desempaquetar diccionario
x, x_name, y, y_name = (
sample["x"],
sample["x_name"],
sample["y"],
sample["y_name"],
)
x, y = transform([x], [y])
x, y = x.tensors[0], y[0]
return {"x": x, "y": y, "x_name": x_name, "y_name": y_name}
class DatasetViewerSingle(DatasetViewer):
def __init__(
self,
dataset: ObjectDetectionDatasetSingle,
rccn_transform: GeneralizedRCNNTransform = None,
):
self.dataset = dataset
self.index = 0
# Instancia del visualizador napari
self.viewer = None
# rccn_transformer
self.rccn_transform = rccn_transform
# Imagen actual y capa de formase & shape layer
self.image_layer = None
self.shape_layer = None
def show_sample(self):
# Obtener una muestra del conjunto de datos
sample = self.get_sample_dataset(self.index)
# RCNN-transformer
if self.rccn_transform is not None:
sample = self.rcnn_transformer(sample, self.rccn_transform)
# Transformar la muestra a numpy, CPU y el formato correcto a visualizar
x, x_name = self.transform_x(sample)
# Crear una capa de imagen
if self.image_layer not in self.viewer.layers:
self.image_layer = self.create_image_layer(x, x_name)
else:
self.update_image_layer(self.image_layer, x, x_name)
# Reiniciar vista
self.viewer.reset_view()
def rcnn_transformer(self, sample, transform):
# Desempaquetar diccionario
x, x_name = sample["x"], sample["x_name"]
x, _ = transform([x])
x, _ = x.tensors[0], _
return {"x": x, "x_name": x_name}
class Annotator(ViewerBase):
def __init__(
self,
image_ids: pathlib.Path,
annotation_ids: pathlib.Path = None,
color_mapping: Dict = {},
):
self.image_ids = image_ids
self.annotation_ids = annotation_ids
self.index = 0
self.color_mapping = color_mapping
# Instancia del visualizador napari
self.viewer = None
# Imagen actual y capas de formas
self.image_layer = None
self.shape_layers = []
# Iniciar anotaciones
self.annotations = self.init_annotations()
# Cargar anotaciones del disco
if self.annotation_ids is not None:
self.load_annotations()
# Ancho de los bordes para las formas
self.edge_width = 2.0
# Aotaciones de los objetos actuales
self.annotation_object = None
def init_annotations(self):
@dataclass
class AnnotationObject:
name: str
boxes: np.ndarray
labels: np.ndarray
def __bool__(self):
return True if self.boxes.size > 0 else False
return [
AnnotationObject(
name=image_id.stem, boxes=np.array([]), labels=np.array([])
)
for image_id in self.image_ids
]
def increase_index(self):
self.index += 1
if self.index >= len(self.image_ids):
self.index = 0
def decrease_index(self):
self.index -= 1
if self.index < 0:
self.index = len(self.image_ids) - 1
def show_sample(self):
# Obtener el identificardor de la imagen
image_id = self.get_image_id(self.index)
# Cargar la imagen
x = self.load_x(image_id)
# Transformaciones de la imagen
x = self.transform_x(x)
# Crear o actualizar una capa de imagen
if self.image_layer not in self.viewer.layers:
self.image_layer = self.create_image_layer(x, image_id)
else:
self.update_image_layer(self.image_layer, x, image_id)
# Guardar las anotaciones en annotation_object (cualquier cambio será guardado o sobreescrito)
self.save_annotations(self.annotation_object)
# Actualizar el objeto de anotaciones actual
self.annotation_object = self.get_annotation_object(self.index)
# Eliminar todas las capas de formas
self.remove_all_shape_layers()
# Crear las nuevas capas de formas
self.shape_layers = self.create_shape_layers(self.annotation_object)
# Reiniciar la vista
self.viewer.reset_view()
def get_image_id(self, index):
return self.image_ids[index]
def get_annotation_object(self, index):
return self.annotations[index]
def transform_x(self, x):
# Re-normalizar
x = re_normalize(x)
return x
def load_x(self, image_id):
return imread(image_id)
def load_annotations(self):
# Generar una lista de nombres, el archivo de anotación debe tener el mismo nombre que la imagen.
annotation_object_names = [
annotation_object.name for annotation_object in self.annotations
]
# Iterar sobre el identificadores de las anotaciones
for annotation_id in self.annotation_ids:
annotation_name = annotation_id.stem
index_list = self.get_indices_of_sequence(
annotation_name, annotation_object_names
)
if index_list:
# Verificar si se encuentra mas de un índice
idx = index_list[0] # Obtener el valor de ínidce de index_list
annotation_file = read_json(annotation_id) # Leer archivo
# Almacenarlos como np.ndarrays
boxes = | np.array(annotation_file["boxes"]) | numpy.array |
import numpy as np
import pandas as pd
import joblib
import tensorflow as tf
import sys
import functools
import os
import tensorflow.keras.backend as K
from matplotlib import pyplot as plt
# from IPython.display import clear_output
from scipy.stats import gaussian_kde, binned_statistic as binstat
from tensorflow.keras.preprocessing.sequence import pad_sequences
from sklearn.model_selection import ShuffleSplit, GroupShuffleSplit
from sklearn.preprocessing import MinMaxScaler, StandardScaler
from sklearn.metrics import r2_score, mean_squared_error, mean_absolute_error, median_absolute_error
from tensorflow.keras.losses import Loss
from scipy.spatial.distance import jensenshannon as js
class HuberLoss(Loss):
"""
Custom TensorFlow Loss subclass implementing the Huber loss.
"""
def __init__(self, threshold: float = 1):
"""
:param threshold: float
The Huber threshold between L1 and L2 losses.
"""
super().__init__()
self.threshold = threshold
def call(self, y_true, y_pred):
error = y_true - y_pred
is_small_error = tf.abs(error) <= self.threshold
small_error_loss = tf.square(error) / 2
big_error_loss = self.threshold * (tf.abs(error) - (0.5 * self.threshold))
return tf.where(is_small_error, small_error_loss, big_error_loss)
def root_mean_squared_error(y, y_pred, sample_weight=None):
"""
Compute the root mean squared error metric.
"""
value = mean_squared_error(y, y_pred, sample_weight=sample_weight)
return np.sqrt(value)
def process_input_parameters(pars, min_folds_cv=5):
"""
Check the consistency of the input parameters and make modifications if necessary.
:param pars: argparse.Namespace
An argparse namespace object containing the input parameters.
:param min_folds_cv: int
The minimum number of folds required for K-fold cross-validation.
:return: pars, argparse.Namespace
The processed version of the input namespace object.
"""
if len(pars.lcdir) > 1:
assert len(pars.wavebands) == len(pars.lcdir), "The number of items in lcdir must either be 1 or match " \
"the number of items in wavebands."
assert len(pars.wavebands) == len(pars.lcfile_suffices), \
"The number of items in wavebands and lcfile_suffices must match."
if not os.path.isdir(os.path.join(pars.rootdir, pars.outdir)):
os.mkdir(os.path.join(pars.rootdir, pars.outdir))
pars.hparam_grid = np.array(pars.hpars)
# Check if only the CPU is to be used:
if pars.cpu:
os.environ["CUDA_VISIBLE_DEVICES"] = ""
# Join the list elements of pars.subset into a long string:
if pars.subset:
pars.subset = ' '.join(pars.subset)
# Check the number of meta input features:
if pars.meta_input is None:
pars.n_meta = 0
else:
pars.n_meta = len(pars.meta_input)
if pars.nn_type == 'cnn':
pars.n_channels = len(pars.wavebands)
else:
pars.n_channels = 2 * len(pars.wavebands)
if pars.weighing_by_density:
print("Density weighing is ON with cutoff {}".format(pars.weighing_by_density))
else:
print("Density weighing is OFF.")
print("Number of input channels: {}".format(pars.n_channels))
print("Number of meta features: {}".format(pars.n_meta))
if pars.train:
pars.predict = False # We want to train a regression model.
if pars.pick_fold is not None:
for ii in pars.pick_fold:
print(type(ii))
assert isinstance(ii, int) and 0 < ii <= pars.k_fold, \
"pick_fold must be > 0 AND <= k_fold integer"
assert pars.k_fold >= min_folds_cv, \
"pick_fold requires k_fold >= {}".format(min_folds_cv)
pars.refit = False
if not pars.cross_validate:
assert len(pars.hparam_grid) == 1, "Cannot do grid-search of hyper-parameters if cross_validate is False."
pars.refit = True
if pars.explicit_test_frac:
assert pars.refit or pars.ensemble, \
"For the evaluation of the model on the test set, 'refit' or 'ensemble' must be set."
if pars.optimize_lr:
pars.n_epochs = 100
pars.decay = 0.0
pars.save_model = False
pars.cross_validate = False
pars.refit = True
return pars
def read_dataset(filename: str, columns: list = None, subset_expr: str = None, input_feature_names: list = None,
trim_quantiles: list = None, qlo: float = 0.25, qhi: float = 0.75, plothist: bool = False,
histfig: str = "hist.png", dropna_cols: list = None, comment: str = '#', dtype=None):
"""
Loads, trims, and exports dataset to numpy arrays.
:param filename: str
The name of the input file.
:param columns: list of strings
Passed to the usecols parameter of pandas.read_csv()
:param subset_expr: str
Expression for subsetting the input data, passed as the first parameter of pandas.DataFrame.query()
:param input_feature_names: list of strings
An optional subset of the usecols parameter, including the names of the columns to be returned as features.
If None, all columns in usecols will be returned.
:param trim_quantiles: list
An optional subset of the usecols parameter, including the names of the columns to be threshold-rejected
beyond the quantiles specified by qlo and qhi. If None, no quantile-trimming will be performed.
:param qlo: float
Lower quantile for threshold rejection.
:param qhi: float
Upper quantile for threshold rejection.
:param plothist: bool
If True, the histograms of the columns in usecols will be plotted before and, if performed, after quantile trimming.
:param histfig: str
The name of the output histogram figure file if plothist is True.
:param dropna_cols:
:param comment:
:param dtype:
:return:
"""
with open(filename) as f:
header = f.readline()
cols = header.strip('#').split()
df = pd.read_csv(filename, names=cols, header=None, sep="\s+", usecols=columns, comment=comment, dtype=dtype)
if dropna_cols is not None:
df.dropna(inplace=True, subset=dropna_cols)
ndata = len(df)
print(df.head())
print("----------\n{} lines read from {}\n".format(ndata, filename))
df_orig = df
# Apply threshold rejections:
if subset_expr is not None:
df = df.query(subset_expr)
ndata = len(df)
print("{} lines after threshold rejections\n".format(ndata))
# plot histogram for each column in original dataset
if plothist:
fig, ax = plt.subplots(figsize=(20, 10))
fig.clf()
_ = pd.DataFrame.hist(df, bins=int(np.ceil(np.cbrt(ndata) * 2)), figsize=(20, 10), grid=False, color='red',
ax=ax)
plt.savefig(histfig)
# omit data beyond specific quantiles [qlo, qhi]
if trim_quantiles is not None:
dfq = df[trim_quantiles]
quantiles = pd.DataFrame.quantile(dfq, q=[qlo, qhi], axis=0, numeric_only=True, interpolation='linear')
print("Values at [{},{}] quantiles to be applied for data trimming:".format(qlo, qhi))
print(quantiles.sum)
mask = (dfq > dfq.quantile(qlo)) & (dfq < dfq.quantile(qhi))
# print(mask)
mask = mask.all(axis=1)
# print(mask.shape)
df = pd.DataFrame.dropna(df[mask])
ndata = len(df)
print("\n{} lines remained after quantile rejection.\n".format(ndata))
# plot histogram for each column in trimmed dataset
if plothist:
fig, ax = plt.subplots(figsize=(20, 10))
_ = pd.DataFrame.hist(df, bins=int(np.ceil(np.cbrt(ndata) * 2)), figsize=(20, 10), grid=False,
color='green', ax=ax)
fig.savefig("hist_trim.png", format="png")
if input_feature_names is not None:
return df.loc[:, input_feature_names], df_orig
else:
return df, df_orig
def read_time_series_for_rnn(name_list, source_dir, nts, input_wavebands, ts_file_suffix, rootdir="",
periods=None, max_phase=1.0, phase_shift=None, nbins=None):
print("Reading time series...", file=sys.stderr)
n_data = len(name_list)
scaler = StandardScaler(copy=True, with_mean=True, with_std=False)
X_list = list()
times_dict = dict()
mags_dict = dict()
phases_dict = dict()
if nbins is not None:
print("Light curves will be binned to max. {0} points in [0, {1:.1f}].".format(nbins, max_phase))
for iband, waveband in enumerate(input_wavebands):
X = np.zeros((n_data, nts, 2)) # Input shape required by an RNN: (batch_size, time_steps, features)
phases = list()
times = list()
mags = list()
if len(source_dir) > 1:
directory = source_dir[iband]
else:
directory = source_dir[0]
for ii, name in enumerate(name_list):
print('Reading data for {}\r'.format(name), end="", file=sys.stderr)
pp, mm = np.genfromtxt(os.path.join(rootdir, directory, name + ts_file_suffix[iband]),
unpack=True, comments='#')
phasemask = (pp < max_phase)
pp = pp[phasemask]
mm = mm[phasemask]
if phase_shift is not None:
pp = get_phases(1.0, pp, shift=phase_shift, all_positive=True)
inds = np.argsort(pp)
pp = pp[inds]
mm = mm[inds]
if nbins is not None:
pp, mm = binlc(pp, mm, nbins=nbins, max_y=max_phase)
if periods is not None:
tt = pp * periods[ii]
else:
tt = pp
# here we only subtract the mean:
mm = scaler.fit_transform(mm.reshape(-1, 1)).flatten()
times.append(tt)
mags.append(mm)
phases.append(pp)
times_padded = pad_sequences(times, maxlen=nts, dtype='float64', padding='post', truncating='post', value=-1)
mags_padded = pad_sequences(mags, maxlen=nts, dtype='float64', padding='post', truncating='post', value=-1)
X[:, :, 0] = times_padded
X[:, :, 1] = mags_padded
X_list.append(X)
times_dict[waveband] = times
mags_dict[waveband] = mags
phases_dict[waveband] = phases
# Create final data matrix for the time series:
X = np.concatenate(X_list, axis=2)
print("")
return X, times_dict, mags_dict, phases_dict
def read_time_series_for_cnn(name_list, source_dir, nts, input_wavebands, ts_file_suffix, nuse=1,
rootdir="", n_aug=None):
nmags = int(nts / nuse)
n_data = len(name_list)
if n_aug is not None:
assert isinstance(n_aug, int) and n_aug > 0, \
"n_aug must be a positive integer"
dict_x_ts = dict()
for waveband in input_wavebands:
dict_x_ts[waveband] = np.zeros((n_data, nmags))
if n_aug is not None:
dict_x_ts[waveband] = np.zeros((n_data * n_aug, nmags))
groups = np.zeros((n_data * n_aug))
dict_x_ts_scaled = dict()
print("Reading time series...", file=sys.stderr)
for ii, name in enumerate(name_list):
print('Reading data for {}\r'.format(name), end="", file=sys.stderr)
for iband, waveband in enumerate(input_wavebands):
if len(source_dir) > 1:
directory = source_dir[iband]
else:
directory = source_dir[0]
if n_aug is None:
phases, timeseries = np.genfromtxt(os.path.join(directory, name + ts_file_suffix[iband]),
unpack=True, comments='#')
phases = phases[0:nts]
timeseries = timeseries[0:nts]
dict_x_ts[waveband][ii][:] = timeseries[nuse - 1::nuse]
groups = None
else:
tsinput = np.genfromtxt(os.path.join(directory, name + ts_file_suffix[iband]),
unpack=False, comments='#')
# check if there are n_aug+1 columns in the data matrix
assert tsinput.shape[1] == n_aug + 1, \
"data matrix in " + os.path.join(directory, name + ts_file_suffix[iband]) + " has wrong shape"
phases = tsinput[0:nts, 0]
for jj in range(n_aug):
timeseries = tsinput[0:nts, jj + 1]
dict_x_ts[waveband][jj + ii * n_aug][:] = timeseries[nuse - 1::nuse]
groups[jj + ii * n_aug] = ii
phases = phases[nuse - 1::nuse]
# Scale the time series to the [0,1] range
scaler = MinMaxScaler(copy=True, feature_range=(0, 1))
ts_list = list()
for ii, waveband in enumerate(input_wavebands):
scaler.fit(dict_x_ts[waveband].T)
dict_x_ts_scaled[waveband] = (scaler.transform(dict_x_ts[waveband].T)).T
ts_list.append(np.expand_dims(dict_x_ts_scaled[waveband], axis=2))
# Create final data matrix for the time series:
X = np.concatenate(ts_list, axis=2)
print("")
return X, dict_x_ts, dict_x_ts_scaled, phases, groups
def cross_validate(model, folds: list, x_list: list or tuple, y,
model_kwargs: dict = {}, compile_kwargs: dict = {},
initial_weights: list = None,
sample_weight_fit=None, sample_weight_eval=None, ids=None,
indices_to_scale: list or tuple = None, scaler=None,
n_epochs: int = 1, batch_size: int = None, shuffle=True, verbose: int = 0,
callbacks: list = [], metrics: list or tuple = None,
log_training=True, log_prefix='', pick_fold: list or tuple = None,
save_data=True, rootdir='.', filename_train='train.dat', filename_val='val.dat',
strategy=None, n_devices=1, validation_freq=1, seed=1):
# Initialize variables:
histories = list()
model_weights = list()
scalers_folds = list()
Y_train_collected = np.array([])
Y_val_collected = np.array([])
Y_train_pred_collected = np.array([])
Y_val_pred_collected = np.array([])
fitting_weights_train_collected = np.array([])
fitting_weights_val_collected = np.array([])
eval_weights_train_collected = np.array([])
eval_weights_val_collected = np.array([])
ids_train_collected = np.array([])
ids_val_collected = np.array([])
numcv_t = np.array([])
numcv_v = np.array([])
# callbacks.append(PrintLearningRate())
if ids is None:
# create IDs by simply numbering the data
ids = | np.linspace(1, y.shape[0], y.shape[0]) | numpy.linspace |
# -*- coding: utf-8 -*-
"""
<NAME> and <NAME>
AM205 Final Project Code
plots red dots for a range of x values and u zero
plots blue dots for one iteration forward from the red initial conditions
"""
import numpy as np
import matplotlib.pyplot as plt
import scipy as sp
import scipy.optimize as opt
# Determine initial conditions
mu = 0.01
C = 3.2
sample = 100
#number of crossings
tr = 1
uptime = 1000
dt = 0.01
h = 10**(-2)
# define dz/dt components for odeint solver
def pend(z,t):
x,y,u,v = z
dzdt = [u,v,v + 0.5*(2*(x-mu) + ((mu-1)*2*x)/(x**2+y**2)**(3.0/2) + (2*mu*(1-x))/((x-1)**2 + y**2)**(3.0/2)),-u + 0.5*(2*y + ((mu-1)*2*y)/(x**2+y**2)**(3.0/2) + (-2*mu*y)/((x-1)**2 + y**2)**(3.0/2)) ]
return dzdt
# function used in determining set of initial conditions
def f(x):
return (x-mu)**2 + 2*(1-mu)/ | np.abs(x) | numpy.abs |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Jan 22 10:05:24 2020
@author: tungutokyo
"""
import joblib
import pickle
import pandas as pd
import numpy as np
import urllib
import requests
import bs4
from sklearn.feature_extraction.text import TfidfVectorizer, CountVectorizer
import MeCab
from gensim.models import word2vec
from gensim.models import Doc2Vec
from gensim.models.doc2vec import TaggedDocument
from tqdm import tqdm, tqdm_pandas, tqdm_notebook
import matplotlib.pyplot as plt
import seaborn as sns
import itertools
from scipy import interp
from sklearn.model_selection import train_test_split, cross_val_score
from sklearn.preprocessing import LabelBinarizer
from sklearn.utils.extmath import safe_sparse_dot
from sklearn.utils.fixes import logsumexp
from sklearn.metrics.pairwise import cosine_similarity
import warnings
warnings.filterwarnings("ignore")
class NaiveBayes:
def __init__(self, alpha=0.01):
path_to_artifacts = "../../research/"
self.alpha = alpha
def get_news(self, link):
title = []
thearticle = []
#print(link)
paragraphtext = []
url = link
page = requests.get(url)
soup = bs4.BeautifulSoup(page.text, "html.parser")
atitle = soup.find(class_="article_header_text").find("h1")
thetitle = atitle.get_text()
articletext = soup.find_all("p")
for paragraph in articletext:
text = paragraph.get_text()
paragraphtext.append(text)
title.append(thetitle)
thearticle.append(paragraphtext)
myarticle = [" ".join(article) for article in thearticle]
data = {
"Title": title,
"Article": myarticle,
"PageLink": link}
news = pd.DataFrame(data=data)
cols = ["Title", "Article", "PageLink"]
news = news[cols]
return news
def preprocessing(self, input_data):
df = input_data.reset_index(drop=True)
df["Content_Parsed_1"] = df["Article"].str.replace("キーワードで気になるニュースを絞りこもう 「いいね」、フォローをしておすすめの記事をチェックしよう。 グノシーについて 公式SNS 関連サイト アプリをダウンロード グノシー | 情報を世界中の人に最適に届ける Copyright © Gunosy Inc. All rights reserved.", '')
def get_wakati_text(text):
tagger = MeCab.Tagger("-Owakati")
wakati_text = tagger.parse(text).strip()
return wakati_text
nrows = len(df)
wakati_text_list = []
for row in range(0, nrows):
text = df.loc[row]["Content_Parsed_1"]
wakati_text_list.append(get_wakati_text(text))
df["wakati_text"] = wakati_text_list
self.df_pred = df
with open("News_dataset.pickle", "rb") as data:
self.df_train = pickle.load(data)
self.df_train = self.df_train.reset_index(drop=True).drop(columns = ["News_length"])
with open("Updated_news.pickle", "rb") as data:
self.df_pre_recommend = pickle.load(data)
self.df_pre_recommend = self.df_pre_recommend.reset_index(drop=True).drop(columns = ["News_length"])
self.df_train["Content_Parsed_1"] = self.df_train["Article"].str.replace("キーワードで気になるニュースを絞りこもう 「いいね」、フォローをしておすすめの記事をチェックしよう。 グノシーについて 公式SNS 関連サイト アプリをダウンロード グノシー | 情報を世界中の人に最適に届ける Copyright © Gunosy Inc. All rights reserved.", '')
nrows = len(self.df_train)
wakati_text_list = []
for row in range(0, nrows):
text = self.df_train.loc[row]["Content_Parsed_1"]
wakati_text_list.append(get_wakati_text(text))
self.df_train["wakati_text"] = wakati_text_list
self.df_pre_recommend["Content_Parsed_1"] = self.df_pre_recommend["Article"].str.replace("キーワードで気になるニュースを絞りこもう 「いいね」、フォローをしておすすめの記事をチェックしよう。 グノシーについて 公式SNS 関連サイト アプリをダウンロード グノシー | 情報を世界中の人に最適に届ける Copyright © Gunosy Inc. All rights reserved.", '')
nrows = len(self.df_pre_recommend)
wakati_text_list = []
for row in range(0, nrows):
text = self.df_pre_recommend.loc[row]["Content_Parsed_1"]
wakati_text_list.append(get_wakati_text(text))
self.df_pre_recommend["wakati_text"] = wakati_text_list
df = pd.concat([df, self.df_train]).reset_index(drop=True)
vectorizer = TfidfVectorizer(use_idf = True, token_pattern=u'(?u)\\b\\w+\\b')
X = vectorizer.fit_transform(df.wakati_text.values)
X = X.toarray()
X_pred = X[0].reshape(1,-1)
X = np.delete(X, 0, axis=0)
df = df.drop(df.index[0])
y = df["Category"].apply(lambda x: 0
if x == "エンタメ" else 1
if x == "スポーツ" else 2
if x == "グルメ" else 3
if x == "海外" else 4
if x == "おもしろ" else 5
if x == "国内" else 6
if x == "IT・科学" else 7)
return X, y, X_pred
"""
Reference:
https://nlp.stanford.edu/IR-book/html/htmledition/naive-bayes-text-classification-1.html
"""
def count(self, X, Y):
"""Count and smooth feature occurrences.
feature_count_: the number of occurances of term in training documents from class
class_count_: the number of classes
"""
self.feature_count_ += safe_sparse_dot(Y.T, X)
self.class_count_ += Y.sum(axis=0)
def update_feature_log_distribution(self, alpha):
"""Apply smoothing to raw counts and recompute log probabilities
Equation 119:
log P^(t|c) = log(T_ct + alpha) - log (sum(T_ct' + alpha))
"""
smoothed_fc = self.feature_count_ + alpha
smoothed_cc = smoothed_fc.sum(axis=1)
self.feature_log_prob_ = (np.log(smoothed_fc) -
np.log(smoothed_cc.reshape(-1, 1)))
def joint_log_likelihood(self, X):
"""Calculate the posterior log probability of the samples X
Equation 115:
log P^(c) + sum(log P^(t|c))
"""
return (safe_sparse_dot(X, self.feature_log_prob_.T) +
self.class_log_prior_)
def update_class_log_distribution(self):
""" Equation 116:
log P^(c) = log(Nc) - log(N)
Nc: the number of documents in class c
N: the total number of documents
"""
n_classes = len(self.classes_)
with warnings.catch_warnings():
warnings.simplefilter("ignore", RuntimeWarning)
log_class_count = np.log(self.class_count_)
# empirical prior, with sample_weight taken into account
self.class_log_prior_ = (log_class_count -
np.log(self.class_count_.sum()))
def starting_values(self, n_effective_classes, n_features):
self.class_count_ = np.zeros(n_effective_classes, dtype=np.float64)
self.feature_count_ = np.zeros((n_effective_classes, n_features),
dtype=np.float64)
def estimate_predict(self, X, y, X_test):
_, n_features = X.shape
self.n_features_ = n_features
labelbin = LabelBinarizer()
Y = labelbin.fit_transform(y)
self.classes_ = labelbin.classes_
if Y.shape[1] == 1:
Y = np.concatenate((1 - Y, Y), axis=1)
n_effective_classes = Y.shape[1]
self.starting_values(n_effective_classes, n_features)
self.count(X, Y)
alpha = 0.01
# The maximum of posteriori (MAP)
self.update_feature_log_distribution(alpha)
self.update_class_log_distribution()
jll = self.joint_log_likelihood(X_test)
predict = self.classes_[np.argmax(jll, axis=1)]
log_prob_x = logsumexp(jll, axis=1)
predict_log_prob = jll - | np.atleast_2d(log_prob_x) | numpy.atleast_2d |
import numpy as np
from at import *
from at.load import load_mat
from matplotlib import pyplot as plt
import matplotlib.pyplot as plt
import at.plot
import numpy as np
from pylab import *
import pandas as pd
import csv
from random import random
def plot_closedOrbit(ring, refpts):
elements_indexes = get_refpts(ring, refpts)
lindata0, tune, chrom, lindata = ring.linopt(get_chrom=True, refpts=elements_indexes)
closed_orbitx = lindata['closed_orbit'][:, 0]
closed_orbity = lindata['closed_orbit'][:, 2]
s_pos = lindata['s_pos']
closed_orbit = lindata['closed_orbit']
beta_x= lindata['beta'][:, 0]
beta_y= lindata['beta'][:, 1]
dx = lindata['dispersion'][:, 0]
dy = lindata['dispersion'][:, 2]
plt.plot(s_pos, closed_orbitx)
# Label for x-axis
plt.xlabel("s_pos")
# Label for y-axis
plt.ylabel("closed_orbit x")
# for display
i = 0
S_pos2 = []
plt.title("Closed orbit x")
plt.show()
plt.plot(s_pos, closed_orbity)
# Label for x-axis
plt.xlabel("s_pos")
# Label for y-axis
plt.ylabel("closed_orbit y")
# for display
i = 0
S_pos2 = []
plt.title("Closed orbit y")
plt.show()
def correctionType(alpha1,alpha2, alpha3):
if alpha1 == 1:
type = "optics correction"
if alpha2 == 1:
type = "dispersion correction"
if alpha3 == 1:
type = "optics and dispersion correction"
print("This code performs: ", type)
#return type
def func(j, mylist):
# dedup, preserving order (dict is insertion-ordered as a language guarantee as of 3.7):
deduped = list(dict.fromkeys(mylist))
# Slice off all but the part you care about:
return deduped[::j]
def defineMatrices_w_eta(W, alpha1, alpha2,alpha3, C0x, C0y, C0xy, C0yx, Cxx_err, Cyy_err, Cxy_err, Cyx_err, dCx, dCy, dCxy,dCyx):
Nk = len(dCx) # number of free parameters
Nm = len(dCx) # number of measurements
print('NK:', Nk)
print('Nm:', Nm)
Ax = np.zeros([Nk, Nk])
Ay = np.zeros([Nk, Nk])
Axy = np.zeros([Nk, Nk])
Ayx = np.zeros([Nk, Nk])
A = np.zeros([4 * Nk, Nk])
##
Bx = np.zeros([Nk, 1])
By = np.zeros([Nk, 1])
Bxy = np.zeros([Nk, 1])
Byx = np.zeros([Nk, 1])
B = np.zeros([4 * Nk, 1])
##
Dx = (Cxx_err[:, :] - C0x[:, :] )#- error_variance) ### dk ?
Dy = (Cyy_err[:, :] - C0y[:, :] )
Dxy = (Cxy_err[:, :] - C0xy[:, :])
Dyx = (Cyx_err[:, :] - C0yx[:, :] )
##
for i in range(Nk): ## i represents each quad
# print('done A:', 100.* i ,'%')
for j in range(Nk):
Ax[i, j] = np.sum(np.dot(np.dot(dCx[i][0: -2, :],W*alpha1), dCx[j][0: -2, :].T)) + np.sum(np.dot(np.dot(dCx[i][ -2 ::, :],W*alpha2), dCx[j][ -2 ::, :].T)) + np.sum(np.dot(np.dot(dCx[i],W*alpha3), dCx[j].T))
Ay[i, j] = np.sum(np.dot(np.dot(dCy[i][0: -2, :],W*alpha1), dCy[j][0: -2, :].T)) + np.sum(np.dot(np.dot(dCy[i][ -2 ::, :],W*alpha2), dCy[j][ -2 ::, :].T))+ np.sum(np.dot(np.dot(dCy[i],W*alpha3), dCy[j].T))
Axy[i, j] = np.sum(np.dot(np.dot(dCxy[i][0: -2, :],W*alpha1), dCxy[j][0: -2, :].T)) + np.sum(np.dot(np.dot(dCxy[i][ -2 ::, :],W*alpha2), dCxy[j][ -2 ::, :].T))+ np.sum(np.dot(np.dot(dCxy[i],W*alpha3), dCxy[j].T))
Ayx[i, j] = np.sum(np.dot(np.dot(dCyx[i][0: -2, :],W*alpha1), dCyx[j][0: -2, :].T)) + np.sum(np.dot(np.dot(dCyx[i][ -2 ::, :],W*alpha2), dCyx[j][ -2 ::, :].T))+ np.sum(np.dot(np.dot(dCyx[i],W*alpha3), dCyx[j].T))
A[i, :] = Ax[i, :]
A[i + Nk, :] = Ay[i, :]
A[i + 2 * Nk, :] = Axy[i, :]
A[i + 3 * Nk, :] = Ayx[i, :]
##
for i in range(Nk):
Bx[i] = np.sum(np.dot(np.dot(dCx[i][0: -2, :],W*alpha1), Dx[0: -2, :].T))+ np.sum(np.dot(np.dot(dCx[i][ -2 ::, :],W*alpha2), Dx[ -2 ::, :].T)) + np.sum(np.dot(np.dot(dCx[i],W*alpha3), Dx.T))
By[i] = np.sum(np.dot(np.dot(dCy[i][0: -2, :],W*alpha1), Dy[0: -2, :].T)) + np.sum(np.dot( | np.dot(dCy[i][ -2 ::, :],W*alpha2) | numpy.dot |
import GPy
import numpy as np
import pytest
from sklearn.gaussian_process import GaussianProcessRegressor
from sklearn.gaussian_process.kernels import Matern
from bopy.benchmark_functions import forrester
from bopy.exceptions import NotFittedError
from bopy.surrogate import GPyGPSurrogate, ScipyGPSurrogate
n_samples = 10
@pytest.fixture(scope="module", autouse=True)
def x():
return np.linspace(0, 1, n_samples).reshape(-1, 1)
@pytest.fixture(scope="module", autouse=True)
def y(x):
return forrester(x)
def scipy_gp_surrogate():
return ScipyGPSurrogate(
gp=GaussianProcessRegressor(kernel=Matern(nu=1.5), alpha=1e-5, normalize_y=True)
)
def gpy_gp_surrogate():
def gp_initializer(x, y):
return GPy.models.GPRegression(
x, y, kernel=GPy.kern.RBF(input_dim=1), noise_var=1e-5, normalizer=True
)
return GPyGPSurrogate(gp_initializer=gp_initializer)
@pytest.fixture(
scope="module",
autouse=True,
params=[scipy_gp_surrogate(), gpy_gp_surrogate()],
ids=["scipy_gp", "gpy_gp"],
)
def surrogate(request):
return request.param
@pytest.fixture(scope="class")
def trained_surrogate(surrogate, x, y):
surrogate.fit(x, y)
return surrogate
class TestArgumentsToFit:
def test_x_must_contain_at_least_one_sample(self, surrogate):
with pytest.raises(ValueError, match="`x` must contain at least one sample"):
surrogate.fit(x=np.array([]), y=np.array([1.0]))
def test_y_must_contain_at_least_one_sample(self, surrogate):
with pytest.raises(ValueError, match="`y` must contain at least one sample"):
surrogate.fit(x=np.array([[1.0]]), y=np.array([]))
def test_x_and_y_must_contain_the_same_number_of_samples(self, surrogate):
with pytest.raises(
ValueError, match="`x` and `y` must have the same number of samples"
):
surrogate.fit(x=np.array([[1.0]]), y=np.array([1.0, 1.0]))
def test_x_must_be_2d(self, surrogate):
with pytest.raises(ValueError, match="`x` must be 2D"):
surrogate.fit(x=np.array([[[1.0]]]), y=np.array([1.0]))
def test_y_must_be_1d(self, surrogate):
with pytest.raises(ValueError, match="`y` must be 1D"):
surrogate.fit(x=np.array([[1.0]]), y=np.array([[1.0]]))
class TestBeforeFitting:
def test_calling_predict_raises_not_fitted_error(self, surrogate, x):
with pytest.raises(NotFittedError, match="must be fitted first"):
surrogate.predict(x)
class TestArgumentsToPredictAfterFitting:
def test_x_must_contain_at_least_one_sample(self, trained_surrogate):
with pytest.raises(ValueError, match="`x` must contain at least one sample"):
trained_surrogate.predict(x=np.array([]))
def test_x_must_be_2d(self, trained_surrogate):
with pytest.raises(ValueError, match="`x` must be 2D"):
trained_surrogate.predict(x=np.array([1.0]))
def test_x_must_have_the_same_number_of_dimensions_as_the_training_data(
self, trained_surrogate
):
with pytest.raises(
ValueError,
match="`x` must have the same number of dimensions as the training data",
):
trained_surrogate.predict(x=np.array([[1.0, 1.0]]))
class TestAfterPredicting:
@pytest.fixture(scope="class", autouse=True)
def predictions(self, trained_surrogate, x):
return trained_surrogate.predict(x)
@pytest.fixture(scope="class", autouse=True)
def predicted_mean(self, predictions):
return predictions[0]
@pytest.fixture(scope="class", autouse=True)
def predicted_var(self, predictions):
return predictions[1]
def test_predicted_mean_is_the_correct_shape(self, predicted_mean):
assert predicted_mean.shape == (n_samples,)
def test_predicted_var_is_the_correct_shape(self, predicted_var):
assert predicted_var.shape == (n_samples, n_samples)
def test_reference_to_x_is_stored(self, trained_surrogate, x):
assert | np.array_equal(trained_surrogate.x, x) | numpy.array_equal |
# -*- coding: utf-8 -*-
#
import copy
import os
import string
import tempfile
import numpy
import meshio
# In general:
# Use values with an infinite decimal representation to test precision.
tri_mesh = meshio.Mesh(
numpy.array([[0.0, 0.0, 0.0], [1.0, 0.0, 0.0], [1.0, 1.0, 0.0], [0.0, 1.0, 0.0]])
/ 3,
{"triangle": numpy.array([[0, 1, 2], [0, 2, 3]])},
)
triangle6_mesh = meshio.Mesh(
numpy.array(
[
[0.0, 0.0, 0.0],
[1.0, 0.0, 0.0],
[1.0, 1.0, 0.0],
[0.5, 0.25, 0.0],
[1.25, 0.5, 0.0],
[0.25, 0.75, 0.0],
[2.0, 1.0, 0.0],
[1.5, 1.25, 0.0],
[1.75, 0.25, 0.0],
]
)
/ 3.0,
{"triangle6": numpy.array([[0, 1, 2, 3, 4, 5], [1, 6, 2, 8, 7, 4]])},
)
quad_mesh = meshio.Mesh(
numpy.array(
[
[0.0, 0.0, 0.0],
[1.0, 0.0, 0.0],
[2.0, 0.0, 0.0],
[2.0, 1.0, 0.0],
[1.0, 1.0, 0.0],
[0.0, 1.0, 0.0],
]
)
/ 3.0,
{"quad": | numpy.array([[0, 1, 4, 5], [1, 2, 3, 4]]) | numpy.array |
import io
import contextlib
import warnings
import numpy as np
import scipy as sp
from copy import deepcopy
from sklearn.base import clone
from sklearn.utils.validation import check_is_fitted
from sklearn.base import BaseEstimator, TransformerMixin
from sklearn.utils.metaestimators import if_delegate_has_method
from joblib import Parallel, delayed
from hyperopt import fmin, tpe
from .utils import ParameterSampler, _check_param, _check_boosting
from .utils import _set_categorical_indexes, _get_categorical_support
from .utils import _feature_importances, _shap_importances
class _BoostSearch(BaseEstimator):
"""Base class for BoostSearch meta-estimator.
Warning: This class should not be used directly. Use derived classes
instead.
"""
def __init__(self):
pass
def _validate_param_grid(self, fit_params):
"""Private method to validate fitting parameters."""
if not isinstance(self.param_grid, dict):
raise ValueError("Pass param_grid in dict format.")
self._param_grid = self.param_grid.copy()
for p_k, p_v in self._param_grid.items():
self._param_grid[p_k] = _check_param(p_v)
if 'eval_set' not in fit_params:
raise ValueError(
"When tuning parameters, at least "
"a evaluation set is required.")
self._eval_score = np.argmax if self.greater_is_better else np.argmin
self._score_sign = -1 if self.greater_is_better else 1
rs = ParameterSampler(
n_iter=self.n_iter,
param_distributions=self._param_grid,
random_state=self.sampling_seed
)
self._param_combi, self._tuning_type = rs.sample()
self._trial_id = 1
if self.verbose > 0:
n_trials = self.n_iter if self._tuning_type is 'hyperopt' \
else len(self._param_combi)
print("\n{} trials detected for {}\n".format(
n_trials, tuple(self.param_grid.keys())))
def _fit(self, X, y, fit_params, params=None):
"""Private method to fit a single boosting model and extract results."""
model = self._build_model(params)
if isinstance(model, _BoostSelector):
model.fit(X=X, y=y, **fit_params)
else:
with contextlib.redirect_stdout(io.StringIO()):
model.fit(X=X, y=y, **fit_params)
results = {'params': params, 'status': 'ok'}
if isinstance(model, _BoostSelector):
results['booster'] = model.estimator_
results['model'] = model
else:
results['booster'] = model
results['model'] = None
if 'eval_set' not in fit_params:
return results
if self.boost_type_ == 'XGB':
# w/ eval_set and w/ early_stopping_rounds
if hasattr(results['booster'], 'best_score'):
results['iterations'] = results['booster'].best_iteration
# w/ eval_set and w/o early_stopping_rounds
else:
valid_id = list(results['booster'].evals_result_.keys())[-1]
eval_metric = list(results['booster'].evals_result_[valid_id])[-1]
results['iterations'] = \
len(results['booster'].evals_result_[valid_id][eval_metric])
else:
# w/ eval_set and w/ early_stopping_rounds
if results['booster'].best_iteration_ is not None:
results['iterations'] = results['booster'].best_iteration_
# w/ eval_set and w/o early_stopping_rounds
else:
valid_id = list(results['booster'].evals_result_.keys())[-1]
eval_metric = list(results['booster'].evals_result_[valid_id])[-1]
results['iterations'] = \
len(results['booster'].evals_result_[valid_id][eval_metric])
if self.boost_type_ == 'XGB':
# w/ eval_set and w/ early_stopping_rounds
if hasattr(results['booster'], 'best_score'):
results['loss'] = results['booster'].best_score
# w/ eval_set and w/o early_stopping_rounds
else:
valid_id = list(results['booster'].evals_result_.keys())[-1]
eval_metric = list(results['booster'].evals_result_[valid_id])[-1]
results['loss'] = \
results['booster'].evals_result_[valid_id][eval_metric][-1]
else:
valid_id = list(results['booster'].best_score_.keys())[-1]
eval_metric = list(results['booster'].best_score_[valid_id])[-1]
results['loss'] = results['booster'].best_score_[valid_id][eval_metric]
if params is not None:
if self.verbose > 0:
msg = "trial: {} ### iterations: {} ### eval_score: {}".format(
str(self._trial_id).zfill(4),
str(results['iterations']).zfill(5),
round(results['loss'], 5)
)
print(msg)
self._trial_id += 1
results['loss'] *= self._score_sign
return results
def fit(self, X, y, trials=None, **fit_params):
"""Fit the provided boosting algorithm while searching the best subset
of features (according to the selected strategy) and choosing the best
parameters configuration (if provided).
It takes the same arguments available in the estimator fit.
Parameters
----------
X : array-like of shape (n_samples, n_features)
The training input samples.
y : array-like of shape (n_samples,)
Target values.
trials : hyperopt.Trials() object, default=None
A hyperopt trials object, used to store intermediate results for all
optimization runs. Effective (and required) only when hyperopt
parameter searching is computed.
**fit_params : Additional fitting arguments.
Returns
-------
self : object
"""
self.boost_type_ = _check_boosting(self.estimator)
if self.param_grid is None:
results = self._fit(X, y, fit_params)
for v in vars(results['model']):
if v.endswith("_") and not v.startswith("__"):
setattr(self, str(v), getattr(results['model'], str(v)))
else:
self._validate_param_grid(fit_params)
if self._tuning_type == 'hyperopt':
if trials is None:
raise ValueError(
"trials must be not None when using hyperopt."
)
search = fmin(
fn=lambda p: self._fit(
params=p, X=X, y=y, fit_params=fit_params
),
space=self._param_combi, algo=tpe.suggest,
max_evals=self.n_iter, trials=trials,
rstate=np.random.RandomState(self.sampling_seed),
show_progressbar=False, verbose=0
)
all_results = trials.results
else:
all_results = Parallel(
n_jobs=self.n_jobs, verbose=self.verbose * int(bool(self.n_jobs))
)(delayed(self._fit)(X, y, fit_params, params)
for params in self._param_combi)
# extract results from parallel loops
self.trials_, self.iterations_, self.scores_, models = [], [], [], []
for job_res in all_results:
self.trials_.append(job_res['params'])
self.iterations_.append(job_res['iterations'])
self.scores_.append(self._score_sign * job_res['loss'])
if isinstance(job_res['model'], _BoostSelector):
models.append(job_res['model'])
else:
models.append(job_res['booster'])
# get the best
id_best = self._eval_score(self.scores_)
self.best_params_ = self.trials_[id_best]
self.best_iter_ = self.iterations_[id_best]
self.best_score_ = self.scores_[id_best]
self.estimator_ = models[id_best]
for v in vars(models[id_best]):
if v.endswith("_") and not v.startswith("__"):
setattr(self, str(v), getattr(models[id_best], str(v)))
return self
def predict(self, X, **predict_params):
"""Predict X.
Parameters
----------
X : array-like of shape (n_samples, n_features)
Samples.
**predict_params : Additional predict arguments.
Returns
-------
pred : ndarray of shape (n_samples,)
The predicted values.
"""
check_is_fitted(self)
if hasattr(self, 'transform'):
X = self.transform(X)
return self.estimator_.predict(X, **predict_params)
@if_delegate_has_method(delegate='estimator')
def predict_proba(self, X, **predict_params):
"""Predict X probabilities.
Parameters
----------
X : array-like of shape (n_samples, n_features)
Samples.
**predict_params : Additional predict arguments.
Returns
-------
pred : ndarray of shape (n_samples, n_classes)
The predicted values.
"""
check_is_fitted(self)
if hasattr(self, 'transform'):
X = self.transform(X)
return self.estimator_.predict_proba(X, **predict_params)
def score(self, X, y, sample_weight=None):
"""Return the score on the given test data and labels.
Parameters
----------
X : array-like of shape (n_samples, n_features)
Test samples.
y : array-like of shape (n_samples,)
True values for X.
sample_weight : array-like of shape (n_samples,), default=None
Sample weights.
Returns
-------
score : float
Accuracy for classification, R2 for regression.
"""
check_is_fitted(self)
if hasattr(self, 'transform'):
X = self.transform(X)
return self.estimator_.score(X, y, sample_weight=sample_weight)
class _BoostSelector(BaseEstimator, TransformerMixin):
"""Base class for feature selection meta-estimator.
Warning: This class should not be used directly. Use derived classes
instead.
"""
def __init__(self):
pass
def transform(self, X):
"""Reduces the input X to the features selected by Boruta.
Parameters
----------
X : array-like of shape (n_samples, n_features)
Samples.
Returns
-------
X : array-like of shape (n_samples, n_features_)
The input samples with only the selected features by Boruta.
"""
check_is_fitted(self)
shapes = np.shape(X)
if len(shapes) != 2:
raise ValueError("X must be 2D.")
if shapes[1] != self.support_.shape[0]:
raise ValueError(
"Expected {} features, received {}.".format(
self.support_.shape[0], shapes[1]))
if isinstance(X, np.ndarray):
return X[:, self.support_]
elif hasattr(X, 'loc'):
return X.loc[:, self.support_]
else:
raise ValueError("Data type not understood.")
class _Boruta(_BoostSelector):
"""Base class for BoostBoruta meta-estimator.
Warning: This class should not be used directly. Use derived classes
instead.
Notes
-----
The code for the Boruta algorithm is inspired and improved from:
https://github.com/scikit-learn-contrib/boruta_py
"""
def __init__(self,
estimator, *,
perc=100,
alpha=0.05,
max_iter=100,
early_stopping_boruta_rounds=None,
importance_type='feature_importances',
train_importance=True,
verbose=0):
self.estimator = estimator
self.perc = perc
self.alpha = alpha
self.max_iter = max_iter
self.early_stopping_boruta_rounds = early_stopping_boruta_rounds
self.importance_type = importance_type
self.train_importance = train_importance
self.verbose = verbose
def _create_X(self, X, feat_id_real):
"""Private method to add shadow features to the original ones. """
if isinstance(X, np.ndarray):
X_real = X[:, feat_id_real].copy()
X_sha = X_real.copy()
X_sha = np.apply_along_axis(self._random_state.permutation, 0, X_sha)
X = np.hstack((X_real, X_sha))
elif hasattr(X, 'iloc'):
X_real = X.iloc[:, feat_id_real].copy()
X_sha = X_real.copy()
X_sha = X_sha.apply(self._random_state.permutation)
X_sha = X_sha.astype(X_real.dtypes)
X = X_real.join(X_sha, rsuffix='_SHA')
else:
raise ValueError("Data type not understood.")
return X
def _check_fit_params(self, fit_params, feat_id_real=None):
"""Private method to validate and check fit_params."""
_fit_params = deepcopy(fit_params)
estimator = clone(self.estimator)
# add here possible estimator checks in each iteration
_fit_params = _set_categorical_indexes(
self.support_, self._cat_support, _fit_params, duplicate=True)
if feat_id_real is None: # final model fit
if 'eval_set' in _fit_params:
_fit_params['eval_set'] = list(map(lambda x: (
self.transform(x[0]), x[1]
), _fit_params['eval_set']))
else:
if 'eval_set' in _fit_params: # iterative model fit
_fit_params['eval_set'] = list(map(lambda x: (
self._create_X(x[0], feat_id_real), x[1]
), _fit_params['eval_set']))
if 'feature_name' in _fit_params: # LGB
_fit_params['feature_name'] = 'auto'
if 'feature_weights' in _fit_params: # XGB import warnings
warnings.warn(
"feature_weights is not supported when selecting features. "
"It's automatically set to None.")
_fit_params['feature_weights'] = None
return _fit_params, estimator
def _do_tests(self, dec_reg, hit_reg, iter_id):
"""Private method to operate Bonferroni corrections on the feature
selections."""
active_features = np.where(dec_reg >= 0)[0]
hits = hit_reg[active_features]
# get uncorrected p values based on hit_reg
to_accept_ps = sp.stats.binom.sf(hits - 1, iter_id, .5).flatten()
to_reject_ps = sp.stats.binom.cdf(hits, iter_id, .5).flatten()
# Bonferroni correction with the total n_features in each iteration
to_accept = to_accept_ps <= self.alpha / float(len(dec_reg))
to_reject = to_reject_ps <= self.alpha / float(len(dec_reg))
# find features which are 0 and have been rejected or accepted
to_accept = np.where((dec_reg[active_features] == 0) * to_accept)[0]
to_reject = np.where((dec_reg[active_features] == 0) * to_reject)[0]
# updating dec_reg
dec_reg[active_features[to_accept]] = 1
dec_reg[active_features[to_reject]] = -1
return dec_reg
def fit(self, X, y, **fit_params):
"""Fit the Boruta algorithm to automatically tune
the number of selected features."""
self.boost_type_ = _check_boosting(self.estimator)
if self.max_iter < 1:
raise ValueError('max_iter should be an integer >0.')
if self.perc <= 0 or self.perc > 100:
raise ValueError('The percentile should be between 0 and 100.')
if self.alpha <= 0 or self.alpha > 1:
raise ValueError('alpha should be between 0 and 1.')
if self.early_stopping_boruta_rounds is None:
es_boruta_rounds = self.max_iter
else:
if self.early_stopping_boruta_rounds < 1:
raise ValueError(
'early_stopping_boruta_rounds should be an integer >0.')
es_boruta_rounds = self.early_stopping_boruta_rounds
importances = ['feature_importances', 'shap_importances']
if self.importance_type not in importances:
raise ValueError(
"importance_type must be one of {}. Get '{}'".format(
importances, self.importance_type))
if self.importance_type == 'shap_importances':
if not self.train_importance and not 'eval_set' in fit_params:
raise ValueError(
"When train_importance is set to False, using "
"shap_importances, pass at least a eval_set.")
eval_importance = not self.train_importance and 'eval_set' in fit_params
shapes = np.shape(X)
if len(shapes) != 2:
raise ValueError("X must be 2D.")
n_features = shapes[1]
# create mask for user-defined categorical features
self._cat_support = _get_categorical_support(n_features, fit_params)
# holds the decision about each feature:
# default (0); accepted (1); rejected (-1)
dec_reg = np.zeros(n_features, dtype=np.int)
dec_history = np.zeros((self.max_iter, n_features), dtype=np.int)
# counts how many times a given feature was more important than
# the best of the shadow features
hit_reg = np.zeros(n_features, dtype=np.int)
# record the history of the iterations
imp_history = np.zeros(n_features, dtype=np.float)
sha_max_history = []
for i in range(self.max_iter):
if (dec_reg != 0).all():
if self.verbose > 1:
print("All Features analyzed. Boruta stop!")
break
if self.verbose > 1:
print('Iterantion: {} / {}'.format(i + 1, self.max_iter))
self._random_state = np.random.RandomState(i + 1000)
# add shadow attributes, shuffle and train estimator
self.support_ = dec_reg >= 0
feat_id_real = np.where(self.support_)[0]
n_real = feat_id_real.shape[0]
_fit_params, estimator = self._check_fit_params(fit_params, feat_id_real)
estimator.set_params(random_state=i + 1000)
_X = self._create_X(X, feat_id_real)
with contextlib.redirect_stdout(io.StringIO()):
estimator.fit(_X, y, **_fit_params)
# get coefs
if self.importance_type == 'feature_importances':
coefs = _feature_importances(estimator)
else:
if eval_importance:
coefs = _shap_importances(
estimator, _fit_params['eval_set'][-1][0])
else:
coefs = _shap_importances(estimator, _X)
# separate importances of real and shadow features
imp_sha = coefs[n_real:]
imp_real = np.zeros(n_features) * np.nan
imp_real[feat_id_real] = coefs[:n_real]
# get the threshold of shadow importances used for rejection
imp_sha_max = np.percentile(imp_sha, self.perc)
# record importance history
sha_max_history.append(imp_sha_max)
imp_history = np.vstack((imp_history, imp_real))
# register which feature is more imp than the max of shadows
hit_reg[np.where(imp_real[~np.isnan(imp_real)] > imp_sha_max)[0]] += 1
# check if a feature is doing better than expected by chance
dec_reg = self._do_tests(dec_reg, hit_reg, i + 1)
dec_history[i] = dec_reg
es_id = i - es_boruta_rounds
if es_id >= 0:
if np.equal(dec_history[es_id:(i + 1)], dec_reg).all():
if self.verbose > 0:
print("Boruta early stopping at iteration {}".format(i + 1))
break
confirmed = np.where(dec_reg == 1)[0]
tentative = | np.where(dec_reg == 0) | numpy.where |
import gc
import numpy as np
import pandas as pd
import os
from sklearn.feature_extraction.text import TfidfVectorizer
from sklearn.metrics import roc_auc_score
from sklearn.model_selection import RepeatedKFold
from sklearn.preprocessing import LabelEncoder
from datetime import datetime
from tqdm import tqdm
import lightgbm as lgb
# Load Data
dtype = {
'id': str,
'teacher_id': str,
'teacher_prefix': str,
'school_state': str,
'project_submitted_datetime': str,
'project_grade_category': str,
'project_subject_categories': str,
'project_subject_subcategories': str,
'project_title': str,
'project_essay_1': str,
'project_essay_2': str,
'project_essay_3': str,
'project_essay_4': str,
'project_resource_summary': str,
'teacher_number_of_previously_posted_projects': int,
'project_is_approved': np.uint8,
}
# Write code that limits the rows until I've sorted out the kinks
data_dir = "F:/Nerdy Stuff/Kaggle/DonorsChoose"
sub_path = "F:/Nerdy Stuff/Kaggle submissions/DonorChoose"
train = pd.read_csv(os.path.join(data_dir, 'data/train_stem.csv'),
low_memory=True)
test = pd.read_csv(os.path.join(data_dir, 'data/test_stem.csv'),
low_memory=True)
id_test = test['id'].values
# Extract features
def extract_features(df):
df['project_title_len'] = df['project_title'].apply(lambda x: len(str(x)))
df['project_essay_1_len'] = df['project_essay_1'].apply(lambda x: len(str(x)))
df['project_essay_2_len'] = df['project_essay_2'].apply(lambda x: len(str(x)))
df['project_essay_3_len'] = df['project_essay_3'].apply(lambda x: len(str(x)))
df['project_essay_4_len'] = df['project_essay_4'].apply(lambda x: len(str(x)))
df['project_resource_summary_len'] = df['project_resource_summary'].apply(lambda x: len(str(x)))
df['project_title_wc'] = df['project_title'].apply(lambda x: len(str(x).split(' ')))
df['project_essay_1_wc'] = df['project_essay_1'].apply(lambda x: len(str(x).split(' ')))
df['project_essay_2_wc'] = df['project_essay_2'].apply(lambda x: len(str(x).split(' ')))
df['project_essay_3_wc'] = df['project_essay_3'].apply(lambda x: len(str(x).split(' ')))
df['project_essay_4_wc'] = df['project_essay_4'].apply(lambda x: len(str(x).split(' ')))
df['project_resource_summary_wc'] = df['project_resource_summary'].apply(lambda x: len(str(x).split(' ')))
extract_features(train)
extract_features(test)
train.drop([
'project_essay_1',
'project_essay_2',
'project_essay_3',
'project_essay_4'], axis=1, inplace=True)
test.drop([
'project_essay_1',
'project_essay_2',
'project_essay_3',
'project_essay_4'], axis=1, inplace=True)
# Recoding as when stopwords are removed some titles have no values
print("Recoding missing values once NLP preprocessing done. Might want to check that")
train.loc[train['project_title'].isnull() == True, 'project_title'] = 'No values once NLP preprocessing is done'
test.loc[test['project_title'].isnull() == True, 'project_title'] = 'No values once NLP preprocessing is done'
train.loc[train['project_essay'].isnull() == True, 'project_essay'] = 'No values once NLP preprocessing is done'
test.loc[test['project_essay'].isnull() == True, 'project_essay'] = 'No values once NLP preprocessing is done'
train.loc[train['project_resource_summary'].isnull() == True, 'project_resource_summary'] = 'No values once NLP preprocessing is done'
test.loc[test['project_resource_summary'].isnull() == True, 'project_resource_summary'] = 'No values once NLP preprocessing is done'
train.loc[train['description_ttl'].isnull() == True, 'description_ttl'] = 'No values once NLP preprocessing is done'
test.loc[test['description_ttl'].isnull() == True, 'description_ttl'] = 'No values once NLP preprocessing is done'
gc.collect()
# Preprocess columns with label encoder
print('Label Encoder...')
cols = [
'teacher_id',
'teacher_prefix',
'school_state',
'project_grade_category',
'project_subject_categories',
'project_subject_subcategories'
]
df_all = pd.concat([train, test], axis=0)
for c in tqdm(cols):
le = LabelEncoder()
le.fit(df_all[c].astype(str))
train[c] = le.transform(train[c].astype(str))
test[c] = le.transform(test[c].astype(str))
del le
gc.collect()
print('Done.')
# Preprocess timestamp
print('Preprocessing timestamp...')
def process_timestamp(df):
df['project_submitted_datetime'] = pd.to_datetime(df['project_submitted_datetime'])
df['year'] = df['project_submitted_datetime'].apply(lambda x: x.year)
df['month'] = df['project_submitted_datetime'].apply(lambda x: x.month)
df['day'] = df['project_submitted_datetime'].apply(lambda x: x.day)
df['day_of_week'] = df['project_submitted_datetime'].apply(lambda x: x.dayofweek)
df['hour'] = df['project_submitted_datetime'].apply(lambda x: x.hour)
df['minute'] = df['project_submitted_datetime'].apply(lambda x: x.minute)
df['project_submitted_datetime'] = df['project_submitted_datetime'].values.astype(np.int64)
process_timestamp(train)
process_timestamp(test)
print('Done.')
# Preprocess text
print('Preprocessing text...')
cols = [
'project_title',
'project_essay',
'project_resource_summary',
'description_ttl'
]
n_features = [
400,
4040,
400,
400
]
for c_i, c in tqdm(enumerate(cols)):
print("TFIDF for %s" % (c))
tfidf = TfidfVectorizer(
max_features=n_features[c_i],
norm='l2',
)
tfidf.fit(df_all[c])
tfidf_train = np.array(tfidf.transform(train[c]).toarray(), dtype=np.float16)
tfidf_test = np.array(tfidf.transform(test[c]).toarray(), dtype=np.float16)
for i in range(n_features[c_i]):
train[c + '_tfidf_' + str(i)] = tfidf_train[:, i]
test[c + '_tfidf_' + str(i)] = tfidf_test[:, i]
del tfidf, tfidf_train, tfidf_test
gc.collect()
print('Done.')
gc.collect()
# Prepare data
cols_to_drop = [
'Unnamed: 0'
, 'id'
, 'teacher_id'
, 'project_title'
, 'project_essay'
, 'project_resource_summary'
, 'project_is_approved'
, 'description_ttl'
]
X = train.drop(cols_to_drop, axis=1, errors='ignore')
y = train['project_is_approved']
X_test = test.drop(cols_to_drop, axis=1, errors='ignore')
id_test = test['id'].values
feature_names = list(X.columns)
print(X.shape, X_test.shape)
# del train, test
gc.collect()
# Build the model
cnt = 0
p_buf = []
n_splits = 5
n_repeats = 1
kf = RepeatedKFold(
n_splits=n_splits,
n_repeats=n_repeats,
random_state=0)
auc_buf = []
num_rows = 60000
X_train_test = X.iloc[0:num_rows, :]
y_train_test = y.iloc[0:num_rows]
prob_ests = []
y_test = []
prb = np.array(prob_ests[0])
y_tst = np.asarray(y_test[0], np.int32)
prb.dtype
y_tst.dtype
prb.shape
y_tst.shape
prb_ser = pd.Series(prb)
roc_auc_score(np.asarray(y_tst[0:9000], np.int32), prb[0:9000])
import matplotlib.pyplot as plt
pd.Series(prb[0:9000]).dtype
for train_index, valid_index in kf.split(X_train_test):
print('Fold {}/{}'.format(cnt + 1, n_splits))
params = {
'boosting_type': 'gbdt',
'objective': 'binary',
'metric': 'auc',
'max_depth': 14,
'num_leaves': 31,
'learning_rate': 0.025,
'feature_fraction': 0.85,
'bagging_fraction': 0.85,
'bagging_freq': 5,
'verbose': 0,
'num_threads': 1,
'lambda_l2': 1.0,
'min_gain_to_split': 0,
}
lgb_train = lgb.Dataset(
X_train_test.loc[train_index],
y_train_test.loc[train_index],
feature_name=feature_names,
)
lgb_train.raw_data = None
lgb_valid = lgb.Dataset(
X_train_test.loc[valid_index],
y_train_test.loc[valid_index],
)
lgb_valid.raw_data = None
model = lgb.train(
params,
lgb_train,
# num_boost_round=10000,
num_boost_round=100,
valid_sets=[lgb_train, lgb_valid],
early_stopping_rounds=100,
verbose_eval=100,
)
if cnt == 0:
importance = model.feature_importance()
model_fnames = model.feature_name()
tuples = sorted(zip(model_fnames, importance), key=lambda x: x[1])[::-1]
tuples = [x for x in tuples if x[1] > 0]
print('Important features:')
for i in range(60):
if i < len(tuples):
print(tuples[i])
else:
break
del importance, model_fnames, tuples
p = model.predict(X.loc[valid_index], num_iteration=model.best_iteration)
print(type(p))
print(p[0:5])
print(type(X))
print(type(y))
print(max(p))
prob_ests.append(p)
y_test.append(y.loc[valid_index])
auc = roc_auc_score(y.loc[valid_index], p)
auc = round(auc, 4)
print('{} AUC: {}'.format(str(cnt), str(auc)))
p = model.predict(X_test, num_iteration=model.best_iteration)
if len(p_buf) == 0:
p_buf = | np.array(p, dtype=np.float16) | numpy.array |
import os
from gym import error, spaces
from gym.utils import seeding
import numpy as np
from gym.envs.flex import flex_env
import pygame as pg
import itertools
from pygame.locals import *
from OpenGL.GL import *
from OpenGL.GLU import *
from scipy.spatial.distance import cdist
from scipy.spatial.transform import Rotation as R
try:
import bindings as pyFlex
except ImportError as e:
raise error.DependencyNotInstalled(
"{}. (HINT: PyFlex Binding is not installed correctly)".format(e))
class PlasticFlippingEnv(flex_env.FlexEnv):
def __init__(self):
self.resolution = 32
self.direct_info_dim = 13
obs_size = self.resolution * self.resolution *1 + self.direct_info_dim
self.frame_skip = 10
self.mapHalfExtent = 4
self.mapPartitionSize = 3
self.idxPool = np.array([x for x in itertools.product(np.arange(self.mapPartitionSize) - int(
self.mapPartitionSize / 2), np.arange(self.mapPartitionSize) - int(self.mapPartitionSize / 2))])
self.numInitClusters = 1
self.randomCluster = True
self.clusterDim = np.array([5,2,5])
action_bound = np.array([[-10, -10, -10, -np.pi / 2], [
10, 10, 10, np.pi / 2]])
obs_high = np.ones(obs_size) * np.inf
obs_low = -obs_high
observation_bound = np.array([obs_low, obs_high])
flex_env.FlexEnv.__init__(self, self.frame_skip, obs_size, observation_bound, action_bound, scene=2, viewer=0)
self.metadata = {
'render.modes': ['human', 'rgb_array'],
'video.frames_per_second': int(np.round(1.0 / self.dt))
}
self.action_scale = (action_bound[1] - action_bound[0]) / 2
self.barDim = np.array([1.5, 2.0, 0.8])
# self.goal_gradients = np.zeros((self.numInstances,self.resolution,self.resolution))
self.initClusterparam = np.zeros(
(self.numInstances, 6 * self.numInitClusters))
self.rolloutCnt = 0
self.stage = np.ones(self.numInstances)
self.rolloutRet = np.zeros(self.numInstances)
self.currCurriculum = 0
self.rwdBuffer = [[0, 0, 0] for _ in range(100)]
print("============================================Flipping================================================")
def angle_to_rot_matrix(self, angles):
rot_vec = np.ones((self.numInstances, 2, 2))
rot_vec[:, 0, 0] = np.cos(angles)
rot_vec[:, 0, 1] = -np.sin(angles)
rot_vec[:, 1, 0] = np.sin(angles)
rot_vec[:, 1, 1] = np.cos(angles)
return rot_vec
def _step(self, action):
action = action * self.action_scale
prev_bar_state, prev_part_state, prev_part_heights,prev_part_vel = self.get_state()
rot_mat = self.angle_to_rot_matrix(action[:, 3])
transformed_action = np.zeros((self.numInstances, 6))
for i in range(action.shape[0]):
bar_rot = R.from_euler('x',prev_bar_state[i,1,0])
action_trans = bar_rot.apply(action[i,0:3])
transformed_action[i, 0:3] = action_trans + prev_bar_state[i, 0]
flex_action = np.zeros((self.numInstances, 7))
flex_action[:, 0] = transformed_action[:, 0]
flex_action[:, 1] = transformed_action[:, 1]
flex_action[:, 2] = transformed_action[:, 2]
flex_action[:, 3] = prev_bar_state[:, 1, 0] + action[:, 3]
flex_action[:, 4] = 0
flex_action[:, 5] = 0
flex_action[:, 6] = 0
prev_height_diff = np.min(prev_part_heights,axis=1)-prev_bar_state[:,0,1]
prev_com_xz = np.mean(prev_part_state,axis=1)
# Simulation
done = self.do_simulation(flex_action, self.frame_skip)
curr_bar_state, curr_part_state, curr_part_heights,curr_part_vels = self.get_state()
curr_com_xz = np.mean(curr_part_state,axis=1)
obs = self._get_obs()
height_diff = np.min(curr_part_heights,axis=1)-curr_bar_state[:,0,1]
curr_total_heat = np.zeros(self.numInstances)
curr_total_heat_cnt = np.zeros(self.numInstances)
ang_vels = np.zeros(self.numInstances)
ang_vels_full = np.zeros((self.numInstances,3))
ang_vels_res = np.zeros(self.numInstances)
for i in range(self.numInstances):
height = height_diff[i]
curr_part_vel = curr_part_vels[i]
bar_rot = R.from_euler('x',curr_bar_state[i,1,0])
currParts = np.concatenate([curr_part_state[i,:,0,np.newaxis],curr_part_heights[i,:,np.newaxis],curr_part_state[i,:,1,np.newaxis]],axis=1)
rel_pos = currParts-curr_bar_state[i,0]
trans_pos = bar_rot.inv().apply(rel_pos)
ang_vel = self.get_angular_vel(currParts,curr_part_vel)
# w = np.mean(rel_pos,axis=0)[1] if np.mean(rel_pos,axis=0)[1]>0.5 else 0
w = 1 if np.mean(rel_pos,axis=0)[1]>0.5 else 0
# if(i==0):
# print(np.mean(rel_pos,axis=0)[1])
# print("Vel mag", np.mean(np.linalg.norm(curr_part_vel,axis=1)))
ang_vels_full[i] = 5*ang_vel*w
ang_vel_proj =np.dot(ang_vel,np.array([1,0,0]))*w
ang_vel_res = np.linalg.norm(ang_vel - ang_vel_proj*np.array([1,0,0]))
ang_vels[i] = np.clip(4*(ang_vel_proj),-1,1)
# ang_vels[i] = -4*(ang_vel_proj)
ang_vels_res[i] = (ang_vel_res)
# Heavy penalty on low particle heights
# Clipped ang vel val
# Only height reward
self.set_aux_info(ang_vels_full)
height_diff[height_diff>0] = 0.1+height_diff[height_diff>0]*10
height_diff[height_diff<0] *= 0.1
rewards = 0.1*0*height_diff+ang_vels
# print(ang_vels[0])
# if self.currCurriculum == 1:
# rewards -=-ang_vels_res
self.rolloutRet += rewards
info = {
# 'Total Reward': rewards[0],
'Height' : 0.1*height_diff[0],
'ang_vel': ang_vels[0],
# 'com_diff': com_diff[0]
}
reward_decomp = [0,0,0]
if (len(self.rwdBuffer) >= 100):
self.rwdBuffer.pop(0)
self.rwdBuffer.append(reward_decomp)
return obs, rewards, done, info
def _get_obs(self):
bar_states, part_states, part_heights,part_vels = self.get_state()
obs_list = []
for i in range(self.numInstances):
stage = self.stage[i]
part_state = part_states[i]
valid_idx = (part_state[:, 0] > -self.mapHalfExtent) & (part_state[:, 0] < self.mapHalfExtent) & (
part_state[:, 1] > -self.mapHalfExtent) & (part_state[:, 1] < self.mapHalfExtent)
part_state = part_state[valid_idx]
part_height = part_heights[i]
part_height = part_height[valid_idx]
part_vel = part_vels[i]
part_vel = part_vel[valid_idx]
bar_state = bar_states[i]
bar_y_rot_vec = np.array([np.cos(bar_state[1, 1]), np.sin(bar_state[1, 1])])
# bar_rot = np.zeros((2, 2))
# bar_rot[0, 0] = bar_y_rot_vec[0]
# bar_rot[0, 1] = -bar_y_rot_vec[1]
# bar_rot[1, 0] = bar_y_rot_vec[1]
# bar_rot[1, 1] = bar_y_rot_vec[0]
# density = self.get_particle_density(
# part_state, bar_state, bar_rot, normalized=True)
# height_map = self.get_mean_height_map(part_state, bar_state, bar_rot, part_height)
part_pos_xyz = np.concatenate([part_state[:,0,np.newaxis],part_height[:,np.newaxis],part_state[:,1,np.newaxis]],axis=1)
height_map = self.get_mean_height_map(part_pos_xyz, bar_state)
# if(i==0):
# print(np.max(heightz))
ang_vel = self.get_angular_vel(part_pos_xyz,part_vel) #3
bar_pos = bar_state[0] # 3
bar_ang_x = np.array([np.cos(bar_state[1, 0]), np.sin(bar_state[1, 0])]) # 2
bar_vel = bar_state[2] # 3
bar_ang_vel_x = np.array([np.cos(bar_state[3, 0]), np.sin(bar_state[3, 0])]) # 2
# if(i==0):
# print(part_pos_xyz)
# print(part_vel)
# print(ang_vel)
bar_info = np.concatenate([bar_pos, bar_ang_x, bar_vel, bar_ang_vel_x,bar_vel])
obs = np.concatenate(
[bar_info, height_map.flatten()
])
obs_list.append(obs)
return np.array(obs_list)
def get_particle_density(self, particles, bar_state, rot, normalized=True, width=2.5):
if (particles.shape[0] == 0):
return np.zeros((self.resolution, self.resolution))
particles -= bar_state[0, (0, 2)]
particles = np.matmul(particles, rot.transpose())
particles = np.clip(particles, -self.mapHalfExtent, self.mapHalfExtent)
H = self.get_density(particles, self.resolution,
width, self.mapHalfExtent)
if normalized:
# H = H ** (1.0 / 2)
H = H / (200)
H = np.clip(H, 0, 1)
return H
def get_angular_vel(self,part_pos,part_vel):
if(part_pos.shape[0]==0):
return np.array([0,0,0])
return self.get_angular_vel_flex(part_pos,part_vel)
def get_mean_height_map(self, particles, bar_state, normalized=True, width=2.5):
if (particles.shape[0] == 0):
return np.zeros((self.resolution, self.resolution))
bar_euler = bar_state[1]
bar_rot = R.from_euler('x',bar_euler[0])
rel_pos = particles-bar_state[0]
trans_pos = bar_rot.inv().apply(rel_pos)
trans_pos = trans_pos[(trans_pos[:,0]>-self.barDim[1])&(trans_pos[:,0]<self.barDim[1])&(trans_pos[:,2]>-self.barDim[1])&(trans_pos[:,2]<self.barDim[1])&(trans_pos[:,1]>0)]
# trans_pos[:,(0,2)] = np.clip(trans_pos[:,(0,2)], -self.barDim[1], self.barDim[1])
H = self.get_height_map(trans_pos[:,(0,2)], trans_pos[:,1], self.resolution, width, self.mapHalfExtent)
# rel_pos = particles-np.array([bar_state[0,0],0,bar_state[0,2]])
# trans_pos = bar_rot.inv().apply(rel_pos)
# trans_pos = trans_pos[(trans_pos[:,0]>-self.barDim[1])&(trans_pos[:,0]<self.barDim[1])&(trans_pos[:,2]>-self.barDim[1])&(trans_pos[:,2]<self.barDim[1])&(trans_pos[:,1]>0)]
# trans_pos[:,(0,2)] = np.clip(trans_pos[:,(0,2)], -self.barDim[1], self.barDim[1])
# H = self.get_height_map(rel_pos[:,(0,2)], rel_pos[:,1], self.resolution, width, self.mapHalfExtent)
return H
def get_state(self):
full_state = flex_env.FlexEnv.get_state(self)
numPart = (full_state.shape[1]-4)//2
part_state = full_state[:, 4:4+numPart, (0, 2)]
part_vel = full_state[:, 4+numPart:4+2*numPart, :]
bar_state = full_state[:, :4, :]
part_heights = full_state[:, 4:4+numPart, 1]
return bar_state, part_state, part_heights, part_vel
def _reset(self):
self.rwdBuffer = [[0, 0, 0] for _ in range(100)]
print("Return at current rollout: ", self.rolloutRet)
print("Mean Return at current rollout: ", np.mean(self.rolloutRet))
print("Current Curriculum: ",self.currCurriculum)
if(np.mean(self.rolloutRet)>100):
self.currCurriculum = 1
self.rolloutRet = np.zeros(self.numInstances)
if self.randomCluster:
self.idxPool = | np.array([[0, 0]]) | numpy.array |
'''
'''
import os
import sys
import h5py
import numpy as np
from scipy.stats import chi2
np.seterr(divide='ignore', invalid='ignore')
# -- abcpmc --
import abcpmc
from abcpmc import mpi_util
# -- galpopfm --
from . import dustfm as dustFM
from . import measure_obs as measureObs
dat_dir = os.environ['GALPOPFM_DIR']
def distance_metric(x_obs, x_model, method='chi2', x_err=None):
''' distance metric between forward model m(theta) and observations
notes
-----
* simple L2 norm between the 3D histogram of [Rmag, Balmer, FUV-NUV]
'''
if x_err is None:
x_err = [1. for _x in x_obs]
if method == 'chi2': # chi-squared
rho = [np.sum((_obs - _mod)**2/_err**2)
for _obs, _mod, _err in zip(x_obs, x_model, x_err)]
elif method == 'L2': # chi-squared
rho = [np.sum((_obs - _mod)**2)
for _obs, _mod, _err in zip(x_obs, x_model, x_err)]
elif method == 'L1': # L1 morm
rho = [np.sum(np.abs(_obs - _mod))
for _obs, _mod, _err in zip(x_obs, x_model, x_err)]
else:
raise NotImplementedError
return rho
def sumstat_obs(statistic='2d', return_bins=False):
''' summary statistics for SDSS observations is the 3D histgram of
[M_r, G-R, FUV - NUV].
notes
-----
* 09/22/2020: observation summary statistics updated to Jeremy's SDSS
catalog (centrals *and* satellites) with NSA absolute magnitudes
* see `nb/observables.ipynb` to see exactly how the summary statistic is
calculated.
'''
if statistic == '1d':
r_edges, gr_edges, fn_edges, x_gr, x_fn, _, _ = np.load(os.path.join(dat_dir, 'obs',
'tinker.Mr_20.Mr.GR.FUVNUV.npy'),
allow_pickle=True)
dgr = gr_edges[1] - gr_edges[0]
nbar = dgr * np.sum(x_gr)
x_obs = [nbar, x_gr, x_fn]
elif statistic == '2d':
r_edges, gr_edges, fn_edges, x_gr, x_fn, _, _ = np.load(os.path.join(dat_dir, 'obs',
'tinker.Mr_20.Mr_GR.Mr_FUVNUV.npy'),
allow_pickle=True)
dr = r_edges[1] - r_edges[0]
dgr = gr_edges[1] - gr_edges[0]
nbar = dr * dgr * np.sum(x_gr),
x_obs = [nbar, x_gr, x_fn]
elif statistic == '3d':
r_edges, gr_edges, fn_edges, _x_obs, _ = np.load(os.path.join(dat_dir, 'obs',
'tinker.Mr_20.Mr_GR_FUVNUV.npy'),
allow_pickle=True)
dr = r_edges[1] - r_edges[0]
dgr = gr_edges[1] - gr_edges[0]
dfn = fn_edges[1] - fn_edges[0]
nbar = dr * dgr * dfn * np.sum(_x_obs)
x_obs = [nbar, _x_obs]
if return_bins:
return r_edges, gr_edges, fn_edges, x_obs
return x_obs
def sumstat_model(theta, sed=None, dem='slab_calzetti', f_downsample=1.,
statistic='2d', noise=True, seed=None, return_datavector=False,
sfr0_prescription='adhoc'):
''' calculate summary statistics for forward model m(theta)
:param theta:
array of input parameters
:param sed:
dictionary with SEDs of **central** galaxies
:param dem:
string specifying the dust empirical model
:param f_downsample:
if f_downsample > 1., then the SED dictionary is downsampled.
:param sfr0_prescription:
prescription for dealing with SFR=0 galaxies
notes
-----
* 09/22/2020: simple noise model implemented
* 4/22/2020: extra_data kwarg added. This is to pass pre-sampled
observables for SFR = 0 galaxies
'''
# don't touch these values! they are set to agree with the binning of
# obersvable
nbins = [8, 400, 200]
ranges = [(20, 24), (-5., 20.), (-5, 45.)]
dRmag = 0.5
dGR = 0.0625
dfuvnuv = 0.25
# SFR=0 galaxies
sfr0 = (sed['logsfr.inst'] == -999)
if sfr0_prescription == 'adhoc':
raise ValueError
#R_mag_sfr0, G_R_sfr0, FUV_NUV_sfr0 = _observable_zeroSFR(
# sed['wave'],
# sed['sed_noneb'][sfr0,:])
elif sfr0_prescription == 'sfrmin':
logsfr_min = sed['logsfr.inst'][~sfr0].min() # minimum SFR
print(logsfr_min)
sed['logsfr.inst'][sfr0] = logsfr_min
else:
raise NotImplementedError
sed_dusty = dustFM.Attenuate(
theta,
sed['wave'],
sed['sed_noneb'],
sed['sed_onlyneb'],
sed['logmstar'],
sed['logsfr.inst'],
dem=dem)
# observational measurements
F_mag = measureObs.AbsMag_sed(sed['wave'], sed_dusty, band='galex_fuv')
N_mag = measureObs.AbsMag_sed(sed['wave'], sed_dusty, band='galex_nuv')
G_mag = measureObs.AbsMag_sed(sed['wave'], sed_dusty, band='g_sdss')
R_mag = measureObs.AbsMag_sed(sed['wave'], sed_dusty, band='r_sdss')
# apply FUV and NUV cut
uv_cut = (F_mag < -13.5) & (N_mag < -14)
F_mag = F_mag[uv_cut]
N_mag = N_mag[uv_cut]
G_mag = G_mag[uv_cut]
R_mag = R_mag[uv_cut]
# calculate color
FUV_NUV = F_mag - N_mag
G_R = G_mag - R_mag
if sfr0_prescription == 'adhoc':
# append sampled SFR=0 observables to data vector
R_mag = np.concatenate([R_mag, R_mag_sfr0])
G_R = np.concatenate([G_R, G_R_sfr0])
FUV_NUV = np.concatenate([FUV_NUV, FUV_NUV_sfr0])
n_gal = len(R_mag)
if noise:
if seed is not None:
np.random.seed(seed)
# noise model (simplest model)
sig_R = chi2.rvs(3, loc=0.02, scale=0.00003, size=n_gal)
sig_FN = chi2.rvs(2, loc=0.05, scale=0.05, size=n_gal)
sig_GR = chi2.rvs(3, size=n_gal) * (0.00001 * (R_mag + 20.1) + 0.00005)\
+ (0.000025 * (R_mag + 20.1) + 0.02835)
R_mag += np.random.normal(size=n_gal) * sig_R
FUV_NUV += np.random.normal(size=n_gal) * sig_FN
G_R += np.random.normal(size=n_gal) * sig_GR
data_vector = | np.array([-1.*R_mag, G_R, FUV_NUV]) | numpy.array |
# Copyright 2020 DeepLearningResearch
#
# Copyright 2017 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# This file has been modified by DeepLearningResearch for the development of DEAL.
"""Make datasets and save specified directory.
Downloads datasets using scikit datasets and can also parse csv file
to save into pickle format.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from io import BytesIO
import os
import pickle
try:
from StringIO import StringIO
except ImportError:
from io import StringIO
import tarfile
from urllib.request import urlopen
import keras.backend as K
from keras.datasets import cifar10
from keras.datasets import cifar100
from keras.datasets import mnist
import numpy as np
import pandas as pd
import sklearn.datasets.rcv1
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.feature_extraction.text import TfidfTransformer
from absl import app
from absl import flags
from tensorflow import gfile
# Flags to specify save directory and data set to be downloaded.
flags.DEFINE_string('save_dir', '../../data/',
'Where to save outputs')
flags.DEFINE_string('datasets', 'mnist_keras',
'Which datasets to download, comma separated.')
FLAGS = flags.FLAGS
class Dataset(object):
def __init__(self, X, y):
self.data = X
self.target = y
def get_keras_data(dataname):
"""Get datasets using keras API and return as a Dataset object."""
if dataname == 'cifar10_keras':
train, test = cifar10.load_data()
elif dataname == 'cifar100_coarse_keras':
train, test = cifar100.load_data('coarse')
elif dataname == 'cifar100_keras':
train, test = cifar100.load_data()
elif dataname == 'mnist_keras':
train, test = mnist.load_data()
else:
raise NotImplementedError('dataset not supported')
X = np.concatenate((train[0], test[0]))
y = np.concatenate((train[1], test[1]))
if dataname == 'mnist_keras':
# Add extra dimension for channel
num_rows = X.shape[1]
num_cols = X.shape[2]
X = X.reshape(X.shape[0], 1, num_rows, num_cols)
if K.image_data_format() == 'channels_last':
X = X.transpose(0, 2, 3, 1)
y = y.flatten()
data = Dataset(X, y)
return data
def get_cifar10():
"""Get CIFAR-10 dataset from source dir.
Slightly redundant with keras function to get cifar10 but this returns
in flat format instead of keras numpy image tensor.
"""
url = 'http://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz'
def download_file(url):
#req = urllib2.Request(url)
#response = urllib2.urlopen(req)
response = urlopen(url).read()
return response
response = download_file(url)
tmpfile = BytesIO()
while True:
# Download a piece of the file from the connection
s = response.read(16384)
# Once the entire file has been downloaded, tarfile returns b''
# (the empty bytes) which is a falsey value
if not s:
break
# Otherwise, write the piece of the file to the temporary file.
tmpfile.write(s)
response.close()
tmpfile.seek(0)
tar_dir = tarfile.open(mode='r:gz', fileobj=tmpfile)
X = None
y = None
for member in tar_dir.getnames():
if '_batch' in member:
filestream = tar_dir.extractfile(member).read()
batch = pickle.load(StringIO.StringIO(filestream))
if X is None:
X = np.array(batch['data'], dtype=np.uint8)
y = | np.array(batch['labels']) | numpy.array |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Dec 17 14:32:28 2019
@author: tcandela
"""
"""
Librairies to read nesting beach features and to compute initial positions
"""
# =============================================================================
# IMPORTS
# =============================================================================
from mpl_toolkits.basemap import Basemap
from mpl_toolkits.axes_grid.inset_locator import inset_axes
from matplotlib import gridspec
import netCDF4 as nc
import numpy as np
import matplotlib.pyplot as plt
import datetime as dt
import time
import os
from os.path import isfile, join
import cv2
from matplotlib.colors import LinearSegmentedColormap
from matplotlib import cm
import random
#Personal librairies
import librumeau as brum
import netCDF_lib as ncl
import turtle_lib as tul
# =============================================================================
# FUNCTIONS
# =============================================================================
def complete_release_map(infile, path, gridfile, lonlat1D, nturtles, xmin, xmax, ymin, ymax, lat_space, lon_space):
figname = 'fig/Release_Map.png'
#Zone to plot in minimap
xmin_sl = -180
xmax_sl = 180
ymin_sl = -80
ymax_sl = 85
#yticks parameters
ymax_pos = 11.5
dy = 0.1
#To annotate country names and change map scale, go to plot code#
#Loading initial positions
initfile = open(infile,'r')
x_init, y_init, t_init = np.loadtxt(initfile,usecols=(0,1,3),unpack=True)
x_init, y_init, t_init = x_init[:nturtles], y_init[:nturtles], t_init[:nturtles]
print('\nInitial positions loaded')
#Loading grid file
grid = nc.Dataset(gridfile)
if lonlat1D == True:
lon_mat = np.squeeze(grid['glamt'])
lat_mat = np.squeeze(grid['gphit'])
else:
lon_mat = np.squeeze(grid['glamt'])[0,:]
lat_mat = np.squeeze(grid['gphit'])[:,0]
print('\nGrid file loaded')
#Convert grid point into lon/lat
lon, lat = brum.grid_to_geo(x_init, y_init, lon_mat, lat_mat)
for i in np.arange(len(lon)):
if lon[i] > 180:
lon[i] = lon[i]-360
# PLOT CODE
#Figure parameters
fig = plt.figure(figsize=(7/2.54,5/2.54))
ax_pos = fig.add_subplot(111)
ax_pos_small = inset_axes(ax_pos, width="50%", height="25%", loc='upper right')
ax_pos.set_yticks(np.arange(ymin,ymax_pos,dy))
ax_pos.spines['right'].set_linewidth(0.5)
ax_pos.spines['left'].set_linewidth(0.5)
ax_pos.spines['bottom'].set_linewidth(0.5)
ax_pos.spines['top'].set_linewidth(0.5)
#Plot map
m = Basemap(ax=ax_pos, projection='merc',lon_0=5,lat_0=90.0,llcrnrlon=xmin,urcrnrlon=xmax,llcrnrlat=ymin,urcrnrlat=ymax,resolution='h')
m.fillcontinents(color='0.65',alpha=1, lake_color='w')
m.drawcoastlines(color='0.3',linewidth=0.2)
m.drawcountries(color='w',linewidth=0.1)
m.drawparallels(np.arange(ymin,ymax_pos,lat_space), labels=[1,0,0,0], fontsize=5, linewidth=0.1)
m.drawmeridians(np.arange(xmin,xmax,lon_space), labels=[0,0,0,1], fontsize=5, linewidth=0.1)
#Plot annotations
m.drawmapscale(-55.5, 4.25,-55.5,4.25, length=75,fontsize=4.5, barstyle='simple',labelstyle='simple')
ax_pos.annotate(u'French Guiana', xy=(0.60, 0.25), fontsize=5,fontweight='normal',xycoords='axes fraction' ,horizontalalignment='right', verticalalignment='center',color='k')
ax_pos.annotate(u'Suriname', xy=(0.25, 0.4), fontsize=5,fontweight='normal',xycoords='axes fraction' ,horizontalalignment='right', verticalalignment='center',color='k')
#PLot turtles
lon_m,lat_m = m(lon,lat)
m.scatter(lon_m,lat_m,color='b', marker="o",edgecolors='None',s = 0.1)#,alpha=0.8,marker=".")
#Plot minimap
m_pt = Basemap(ax=ax_pos_small, projection='cyl',lon_0=-180,lat_0=90.0,llcrnrlon=xmin_sl,urcrnrlon=xmax_sl,llcrnrlat=ymin_sl,urcrnrlat=ymax_sl,resolution='l')
m_pt.fillcontinents(color='0.65',alpha=1, lake_color='w')
m_pt.scatter([-53.85],[6.1], color='b', marker="o", edgecolors='None', s=4,zorder=12)
#Save figure
plt.subplots_adjust(left=0.1, right=0.95, bottom=0.02, top=0.98)
plt.savefig(path+figname,dpi = 300)
print('\nMission accomplie ! (plot saved at ' + path + figname +')\n')
def plot_habitat(ax,hab_mode, gridfile, numday,latlim,lonlim, SCL, To, food_max,dmin,dmax,param,data_lists,current_date, lonmin, lonmax, log=False) :
""" Plot habitat on map."""
# Read Temp end Mnk data.
lonmax = max(lonlim)
lat = param['lat_phy']
lon = param['lon_phy']
temp = param['T_var']
U_var = param['U_var']
V_var = param['V_var']
food_path = param['food_dir'] + '/'
#Feeding habitat
if hab_mode == 'npp' or hab_mode == 'tot':
PP = ncl.interpolate_vgpm(current_date, param)
if hab_mode == 'tot':
PP = PP[::-1,:] #reverse lat
PP = PP[119:,:] #remove first 10 degrees !!!!!! il faut sélectionner les indices pour que les grilles correspondent mais il peut y avoir un décalage d'une demi maille. La résolution doit être la même
Food_hab = tul.food_hab(PP,food_max)
#
if hab_mode == 'npp':
lat = param['lat_food']
lon = param['lon_food']
latlon = ncl.read_nc(gridfile,[lat,lon])
latmat = np.asarray(latlon[lat])
lonmat = np.asarray(latlon[lon])
#Temperature habitat
if hab_mode == 'temp' or hab_mode == 'tot':
T_files = data_lists[2]
current_T_file = T_files[numday]
T_dict = ncl.read_nc(current_T_file, [lat,lon,temp])
T = np.squeeze(T_dict[temp])
T_hab = tul.t_hab(T,SCL,To,param['species'])
latmat = np.asarray(T_dict[lat])
lonmat = np.asarray(T_dict[lon])
#Ocean currents
if hab_mode == 'current':
U_files = data_lists[0]
current_U_file = U_files[numday]
U_dict = ncl.read_nc(current_U_file, [lat, lon, U_var])
#
V_files = data_lists[1]
current_V_file = V_files[numday]
V_dict = ncl.read_nc(current_V_file, [V_var])
#
latmat = np.asarray(U_dict[lat])
lonmat = np.asarray(U_dict[lon])
#
U = np.squeeze(U_dict[U_var])
V = np.squeeze(V_dict[V_var])
norm = np.sqrt((U**2)+(V**2))
if hab_mode == 'npp':
hab = Food_hab
legend = u"Foraging Habitat suitability index"
cmap = 'pink_r'
levels = np.arange(0.,1.1,0.1)
ticks = levels
elif hab_mode == 'temp':
hab = T_hab
legend = u"Thermal Habitat suitability index"
cmap = 'pink_r'
levels = np.arange(0.,1.1,0.1)
ticks = levels
elif hab_mode == 'tot':
hab = T_hab*Food_hab
legend = u"Habitat suitability index"
cmap = 'pink_r'
levels = np.arange(0,1.1,0.1)
ticks = levels
elif hab_mode == 'current':
hab = norm
legend = u'Current velocity [m/s]'
cmap = 'pink_r'
levels = np.arange(0,2.1,0.1)
ticks = np.arange(0,2.2,0.2)
hab = np.where(hab>levels[-1], levels[-1], hab)
if lonmax > 180 and lonmin < 180: # manage date line change
idx1 = np.where(lonmat < 0)[0]
idx2 = np.where(lonmat >= 0)[0]
hab0 = hab.copy()
hab[:, idx1] = hab0[:, idx2]
hab[:, idx2] = hab0[:, idx1]
lonmat[idx1] += 360
lonmat0 = lonmat.copy()
lonmat[idx1] = lonmat0[idx2]
lonmat[idx2] = lonmat0[idx1]
# Plot
#im = ax.contourf(lonmat,latmat,hab,levels,cmap=cmap, alpha = 0.9,zorder=0)
im = ax.pcolormesh(lonmat,latmat,hab,cmap=cmap, alpha = 0.9,zorder=0,vmin=0,vmax=2)#vmin=levels[0],vmax=levels[-1]
cbar = plt.colorbar(im, orientation='horizontal',pad = 0.1, shrink=0.87, ticks = ticks)#, shrink=0.9)#, shrink=0.45, pad=0.03, fraction=0.25)
cbar.ax.tick_params(labelsize=12)
cbar.set_label(legend, labelpad=5, size=16)
#cbar.outline.set_linewidth(0.5)
#cbar.ax.xaxis.set_tick_params(width=0.5)
def display_fig(frame_title=''):
c=1
fig=plt.figure(num=1,figsize=(11*c,6.21*c), facecolor='w', edgecolor='w')
# Display frame title
ax=fig.add_subplot(111)
#cax=plt.axes([0.85, 0.1, 0.075, 0.8])
#cax = fig.add_axes([0.85, 0.09, 0.045, 0.8])
ax.text(0.15, 1.06,frame_title, ha='center',va='center', transform=ax.transAxes,fontweight = 'bold', color='k',fontsize=16,)
'''
cb_ax = fig.add_axes([0.05, 0.09, 0.045, 0.8])
cb = pl.colorbar(im,cax=cb_ax,orientation='vertical')
cb.solids.set(alpha=1)
cb.ax.tick_params(labelsize=12)
cb.set_label(u"habitat suitability index", labelpad=3, size=12)
cb.outline.set_linewidth(0.5)
cb.ax.xaxis.set_tick_params(width=0.5)
'''
return ax#,cax
def display_colorbar(f,im, ax_cb, label):
cb=f.colorbar(im,cax=ax_cb,orientation='horizontal')
cb.solids.set(alpha=1)
cb.ax.tick_params(labelsize=8)
cb.set_label(label, labelpad=1, size=8)
cb.outline.set_linewidth(0.5)
cb.ax.xaxis.set_tick_params(width=0.5)
def display_tracks(ax, lat='NA',lon='NA',dates='NA',ms=0.00,col='b', marker= 'o',alpha=0.5,label=None) :
""" """
ax.scatter(lon, lat, marker=marker,s=ms, edgecolor='none',c=col, alpha=alpha, zorder=100,label=label)
return ax
def plot_map(ax, latmin, latmax, lonmin, lonmax,value=0.6,res=0.25,alpha=1, lon_space=20,lat_space=10) :
""" Plot continents. """
map=Basemap(ax=ax,llcrnrlon=lonmin,llcrnrlat=latmin,urcrnrlon=lonmax,urcrnrlat=latmax,projection='cyl',resolution='l')
#Get meridians and parallels spaces
def getTicks(lmin, lmax, step):
lmaxabs = (int(max(abs(lmax), abs(lmin)))/step+1)*step
return np.intersect1d(np.arange(lmin+1, lmax), np.arange(-lmaxabs, lmaxabs, step))
#Draw parallels & meridians
map.drawparallels(np.arange(latmin, latmax, lat_space),labels=[1,0,0,0], fontsize=8,zorder=0.2,linewidth=0.1)
map.drawmeridians(np.arange(lonmin, lonmax, lon_space),labels=[0,0,0,1], fontsize=8,zorder=0.2,linewidth=0.1)
map.drawcountries(color='k',linewidth=0.01,zorder=0.3)
map.drawcoastlines(color='grey',linewidth=0.2,zorder=0.3)
map.fillcontinents(color='0.35')
return ax
def getTicks(lmin, lmax, step):
lmaxabs = (int(max(abs(lmax), abs(lmin)))/step+1)*step
return np.intersect1d(np.arange(lmin+1, lmax), np.arange(-lmaxabs, lmaxabs, step))
def show_start_point(ax, lat,lon) :
""" """
ax.plot((np.mean(lon[0,:]),),(np.mean(lat[0,:]),),markerfacecolor='w',
markeredgecolor='k',marker='o',ms=6,mew=0.3,zorder=999)
def plot_animation_frames(gridfile, dico,hab_mode,To,lethargy,coef_SMR,start_day,end_day,h,latlim,lonlim, save_path, param, data_lists, last_turtle, mortality, group, nb_cat, colors, hourly=False, dpi=100):
""" Plot animation frames with turtles positions and approximate habitat. """
species = param['species']
nturtles = param['nturtles'] - 1 if last_turtle == -1 else last_turtle
time_extra = param['time_extrapolation']
#
latmin = min(latlim)
latmax = max(latlim)
lonmin = min(lonlim)
lonmax = max(lonlim)
if hourly:
delta = 24 # nb of positions per datafile
else:
delta = 1 # daily
dmin = 80.
dmax = 200.
lat = dico['traj_lat'][:,:last_turtle]
lon = dico['traj_lon'][:,:last_turtle]
init_t = dico['init_t'][:last_turtle]
traj_time = dico['traj_time'][:,:last_turtle]
group = group[:last_turtle]
#
lon[lon>=180] -= 360 #needed ?
if hab_mode != 'void' and mortality:
temp = dico['traj_temp'][start_day:end_day,:last_turtle]
date_death = tul.find_date_death(nturtles,temp,To,coef_SMR,lethargy,init_t, end_day-start_day)
date_start_physfile = dt.datetime(param['ystart'],1,1)
date_start_physfile_entier= date_start_physfile.toordinal()
if hab_mode != 'void' and mortality:
date_death_entier = date_death + date_start_physfile_entier
month_names = ['Jan.','Feb.','Mar.','Apr.','May','Jun.','Jul.','Aug.','Sep.','Oct.','Nov.','Dec.']
#
SCL = tul.compute_SCL_VGBF(param['SCL0'], species, start_day)
for step in range(start_day,end_day,h):
print('\n')
print(step, 'of', end_day-h)
days_since_ref = init_t.min() + step/delta # nb of days since ref (1st January ystart)
current_date = date_start_physfile + dt.timedelta(days_since_ref)
if param['time_periodic']:
days_since_ref = init_t.min() + (days_since_ref - init_t.min()) % param['time_periodic']
file_date = date_start_physfile + dt.timedelta(days_since_ref) # datafile date
date_today_entier = file_date.toordinal() # datafile date used for comparision
numday = int(days_since_ref - init_t.min() + 0.5) # nb of days since first turtle release, +0.5 because datafiles are at 12:00
# Frame title
title = current_date.strftime("%d %B %Y, %H:%M")
print(title)
print('numday',numday)
#
newlat,newlon,date_mat = ncl.age_to_date(traj_time,init_t,lat,lon)
#
ax = display_fig(frame_title=title)
# Display habitat.
if hab_mode != 'void':
# Calcul des paramètre relatifs à la nage active et à l'habitat
SCL = tul.compute_SCL_VGBF(SCL, species, h) #increment SCL of h days
food_max = tul.compute_Fmax(step+start_day,species,SCL,param['P0'])
plot_habitat(ax, hab_mode, gridfile, numday, [latmin, latmax], [lonmin,lonmax], SCL, To, food_max, dmin, dmax, param, data_lists,file_date, lonmin, lonmax)
# Find alive and dead turtles
# Blue dots : alive turtles
# Black dots: dead turtles
# Dead turtles are removed from the animation 90 days after they died
if hab_mode != 'void' and mortality and len(group)==0:
index_dead_at_date = np.where((date_death_entier<=date_today_entier)&(date_death_entier+90>date_today_entier)) #+90 > dead disappear after 90 days
index_alive_at_date = np.where(date_death_entier>date_today_entier)
if hab_mode == 'void' and mortality and len(group)==0:
index_dead_at_date=[]
index_alive_at_date=np.arange(lat.shape[1])
# Display position (scatter)
if mortality and len(group) == 0:
display_tracks(ax, lat=newlat[step,index_dead_at_date],lon=newlon[step,index_dead_at_date],ms=11,col='k', marker = 'o',alpha=0.6)
display_tracks(ax, lat=newlat[step,index_alive_at_date],lon=newlon[step,index_alive_at_date],ms=11,col='#1f78b4', marker = 'o',alpha=0.6)
elif len(group)==0:
display_tracks(ax, lat=newlat[step,:],lon=newlon[step,:],ms=11,col='#1f78b4',alpha=0.6)
if hab_mode != 'void' and len(group) > 0:
for cat in np.arange(nb_cat):
if mortality:
index_dead_at_date = np.where((date_death_entier <= date_today_entier) & (date_death_entier + 90 > date_today_entier) & (group == cat)) #+90 > dead disappear after 90 days
index_alive_at_date = np.where((date_death_entier > date_today_entier) & (group == cat))
display_tracks(ax, lat=newlat[step,index_dead_at_date], lon=newlon[step,index_dead_at_date], ms=5, col=colors[cat], marker = 'x', alpha=0.6)
display_tracks(ax, lat=newlat[step,index_alive_at_date], lon=newlon[step,index_alive_at_date], ms=5, col=colors[cat], marker = 'o', alpha=0.6)
else:
idx = np.where(group == cat)
display_tracks(ax, lat=newlat[step,idx], lon=newlon[step,idx], ms=5, col=colors[cat], marker = 'o', alpha=0.6)
# Plot starting point
#show_start_point(ax, lat,lon)
lon_space = (lonmax - lonmin)/7
lat_space = (latmax - latmin)/7
# Display map.
#plot_map(ax, latmin, latmax, lonmin, lonmax, lon_space,lat_space)
plt.xlim([lonmin,lonmax])
plt.ylim([latmin,latmax])
#save figure
m = str(("%04d") %step)
plt.savefig(save_path + 'frame_' + m + '.png', bbox_inches='tight', dpi=dpi)
plt.close()
def plot_animation_frames_tuned(gridfile, dico,hab_mode,To,lethargy,coef_SMR,start_day,end_day,h,latlim,lonlim,save_path, param, data_lists, last_turtle, mortality = True, dpi=100):
"""
Plot animation frames for 1 turtle with a dt < 24h (for example 24 dt / day)
Also plot 4 points at a distance grad_dx to see where gradients are computed
Might work with several turtles
"""
#Tuned parameters
delta = 24 #24 positions for 1 data file (dt = 1h)
grad = False #to plot points where gradient is computed
deg = 111195 #1degree = 111,195 km approx
grad_dx = param['grad_dx']
#
species = param['species']
nturtles = param['nturtles'] - 1 if last_turtle == -1 else last_turtle
#
latmin = min(latlim)
latmax = max(latlim)
lonmin = min(lonlim)
lonmax = max(lonlim)
dmin = 80.
dmax = 200.
lat = dico['traj_lat'][:,:last_turtle]
lon = dico['traj_lon'][:,:last_turtle]
init_t = dico['init_t'][:last_turtle]
traj_time = dico['traj_time'][:,:last_turtle]
date_start_physfile = dt.datetime(param['ystart'],1,1) #à modifier éventuellement
month_names = ['Jan.','Feb.','Mar.','Apr.','May','Jun.','Jul.','Aug.','Sep.','Oct.','Nov.','Dec.']
#
SCL = param['SCL0'] + tul.age_to_SCL(start_day,species) #not exact if (SCL0 is not hatchling SCL and start_day > 0)
for step in range(0,end_day,h): #here not days but time_steps
print('\n')
print(step, 'of', end_day-h)
days_since_ref = int(init_t.min()) + 1 + step//delta #increment days each delta time steps
date_title = date_start_physfile + dt.timedelta(days_since_ref)
date = date_start_physfile + dt.timedelta(days_since_ref)
# Frame title
m = '00'
month = month_names[date_title.month-1]
day = str(("%02d") %date_title.day)
year = str(date_title.year)
title ='| '+day+' '+month+' '+year+' |'
print(' ',title)
print('File date : ',date.strftime("%d-%m-%Y"))
#
newlat,newlon,date_mat = ncl.age_to_date(traj_time,init_t,lat,lon)
#
ax = display_fig(frame_title=title)
# Display habitat.
if hab_mode != 'void':
# Calcul des paramètre relatifs à la nage active et à l'habitat
SCL = tul.compute_SCL_VGBF(SCL, species, 1)
food_max = tul.compute_Fmax(step+start_day,species,SCL,param['P0'])
numday = days_since_ref - int(init_t.min())
plot_habitat(ax, hab_mode, gridfile, numday, [latmin, latmax], [lonmin,lonmax], SCL, To, food_max, dmin, dmax, param, data_lists,date)
print(numday)
display_tracks(ax, lat=newlat[step,:],lon=newlon[step,:],ms=11,col='#1f78b4',alpha=0.6)
#For gradients points
if grad:
dx_lon = grad_dx / (deg * np.cos(newlat[step,0] * np.pi / 180))
dx_lat = grad_dx / deg
display_tracks(ax, lat=newlat[step,:]-dx_lat,lon=newlon[step,:],ms=11,col='k',alpha=0.6)
display_tracks(ax, lat=newlat[step,:]+dx_lat,lon=newlon[step,:],ms=11,col='k',alpha=0.6)
display_tracks(ax, lat=newlat[step,:],lon=newlon[step,:]-dx_lon,ms=11,col='k',alpha=0.6)
display_tracks(ax, lat=newlat[step,:],lon=newlon[step,:]+dx_lon,ms=11,col='k',alpha=0.6)
# Plot starting point
show_start_point(ax, lat,lon)
lon_space = (lonmax - lonmin)/7
lat_space = (latmax - latmin)/7
# Display map.
plot_map(ax, latmin, latmax, lonmin, lonmax, lon_space,lat_space)
plt.xlim([lonmin,lonmax])
plt.ylim([latmin,latmax])
#save figure
m = str(("%04d") %step)
plt.savefig(save_path + 'frame_' + m + '.png', bbox_inches='tight', dpi=dpi)
plt.close()
def convert_frames_to_video(pathIn, pathOut, fps):
print('\n')
print('****************************************************')
print("Converting frames to video...")
print('****************************************************')
print('\n')
frame_array = []
files = [f for f in os.listdir(pathIn) if (isfile(join(pathIn, f)) and os.path.splitext(join(pathIn, f))[-1] == '.png')]
#for sorting the file names properly
#only png files should be in the directory
try:
files.sort(key = lambda x: int(x[6:10])) #work if name = frame_****.png
except:
files = sorted(files)
for i in range(len(files)):
time.sleep(0.01)
filename = pathIn + files[i]
#reading each files
img = cv2.imread(filename)
height, width, layers = img.shape
size = (width,height)
print(filename)
#inserting the frames into an image array
frame_array.append(img)
out = cv2.VideoWriter(pathOut,cv2.VideoWriter_fourcc('M','J','P','G'), fps, size)
for i in range(len(frame_array)):
# writing to a image array
out.write(frame_array[i])
out.release()
def compute_presence_map(dx,x,y,xmin,xmax,ymin,ymax,seuil_numpos=30000,lat_space=5,lon_space=20):
x = np.array(x)
x[x>=xmax]-=360
y = np.array(y)
x_ravel = x.ravel()
y_ravel = y.ravel()
x_ravel[np.where(y_ravel==0)] = 0
y_ravel[np.where(x_ravel==0)] = 0
x_ravel_nonzero = np.delete(x_ravel,np.where(x_ravel==0))
y_ravel_nonzero = np.delete(y_ravel,np.where(y_ravel==0))
x_ravel_nonone = np.delete(x_ravel_nonzero,np.where(x_ravel_nonzero==1.0))
y_ravel_nonone = np.delete(y_ravel_nonzero,np.where(y_ravel_nonzero==1.0))
x_ravel_nonone= np.append(x_ravel_nonone,0)
x_ravel_nonone = np.append(x_ravel_nonone,410)
y_ravel_nonone = | np.append(y_ravel_nonone,-90) | numpy.append |
'''
A short script to plot the outputs of the SSP Mutual Information sampling
Is currently assuming that the data is stored in the format:
/<path-to>/<test-function-name>/<selection-agent>
where <selection-agent> is one of {gp-mi,ssp-mi}
'''
import numpy as np
import numpy.matlib as matlib
import pandas as pd
import pytry
import matplotlib.pyplot as plt
import matplotlib as mpl
mpl.use('pgf')
mpl.rcParams.update({
'pgf.texsystem': 'pdflatex',
'font.family': 'serif',
'text.usetex': True,
'pgf.rcfonts': False,
'pdf.fonttype': 42,
'ps.fonttype': 42,
'figure.autolayout': True
})
from argparse import ArgumentParser
import best
def get_data(data_frame):
regret = np.vstack(data_frame['regret'].values)
avg_regret = | np.vstack(data_frame['avg_regret'].values) | numpy.vstack |
"""Tests for the `pyabc.sumstat` module."""
import os
import tempfile
import numpy as np
import pandas as pd
import pytest
import pyabc
from pyabc.predictor import LinearPredictor
from pyabc.sumstat import (
GMMSubsetter,
IdentitySumstat,
IdSubsetter,
PredictorSumstat,
)
from pyabc.util import EventIxs, dict2arr, dict2arrlabels
def test_dict2arr():
"""Test conversion of dicts to arrays."""
dct = {
"s0": pd.DataFrame({"a": [0, 1], "b": [2, 3]}),
"s1": np.array([4, 5]),
"s2": 6,
}
keys = ["s0", "s1", "s2"]
arr = dict2arr(dct, keys=keys)
assert (arr == np.array([0, 2, 1, 3, 4, 5, 6])).all()
labels = dict2arrlabels(dct, keys=keys)
assert len(labels) == len(arr)
assert labels == [
"s0:a:0",
"s0:b:0",
"s0:a:1",
"s0:b:1",
"s1:0",
"s1:1",
"s2",
]
with pytest.raises(TypeError):
dict2arr({"s0": "alice"}, keys=["s0"])
with pytest.raises(TypeError):
dict2arrlabels({"s0": "alice"}, keys=["s0"])
@pytest.fixture(params=[None, [lambda x: x, lambda x: x ** 2]])
def trafos(request):
"""Data transformations."""
return request.param
def test_identity_sumstat(trafos):
"""Test the IdentitySumstat."""
sumstat = IdentitySumstat(trafos=trafos)
x0 = {'s0': 1.0, 's1': 42.0}
sumstat.initialize(
t=0, get_sample=lambda: pyabc.population.Sample(), x_0=x0, total_sims=0
)
assert not sumstat.requires_calibration()
assert not sumstat.is_adaptive()
if trafos is None:
assert (sumstat({'s1': 7.0, 's0': 3.0}) == np.array([3.0, 7.0])).all()
assert len(sumstat.get_ids()) == 2
else:
assert (
sumstat({'s1': 7.0, 's0': 3.0}) == np.array([3.0, 7.0, 9.0, 49.0])
).all()
assert len(sumstat.get_ids()) == 4
def test_event_ixs():
"""Test fit index construction."""
ixs = EventIxs(ts=1, sims=10)
assert not ixs.act(t=0, total_sims=0)
assert ixs.act(t=1, total_sims=0)
assert ixs.act(t=0, total_sims=20)
ixs = EventIxs(ts={np.inf})
assert ixs.act(t=0, total_sims=0)
assert ixs.act(t=7, total_sims=0)
ixs = EventIxs(sims={10, 20})
assert not ixs.act(t=0, total_sims=5)
assert ixs.act(t=0, total_sims=15)
assert not ixs.act(t=0, total_sims=16)
assert ixs.act(t=0, total_sims=20)
ixs = EventIxs(from_t=5)
assert (
not ixs.act(t=4, total_sims=50)
and ixs.act(t=5, total_sims=50)
and ixs.act(t=20, total_sims=50)
)
ixs = EventIxs(from_sims=10)
assert (
not ixs.act(t=4, total_sims=9)
and ixs.act(t=4, total_sims=10)
and ixs.act(t=4, total_sims=20)
)
def test_pre():
"""Test chaining of summary statistics."""
sumstat = IdentitySumstat(
trafos=[lambda x: x ** 2],
pre=IdentitySumstat(trafos=[lambda x: x, lambda x: x ** 2]),
)
assert not sumstat.requires_calibration()
assert not sumstat.is_adaptive()
sumstat.configure_sampler(pyabc.SingleCoreSampler())
x0 = {'s0': 1.0, 's1': 42.0}
sumstat.initialize(
t=0, get_sample=lambda: pyabc.population.Sample(), x_0=x0, total_sims=0
)
assert (
sumstat({'s1': 7.0, 's0': 3.0}) == | np.array([3.0, 7.0, 9.0, 49.0]) | numpy.array |
import numpy as np, random
#from RLKeras import ReplayMemory
class Game(object):
Delta = 0.1
NActions = 2
StateDim = 2
Moves = np.array(
[
(1.0, 0.0),
(0.0, 1.0),
(-1.0, 0.0),
(0.0, -1.0)
]
) * Delta
def init(self, n):
# returns ranodm states, done
states = self.randomStates(n)
return states
def step(self, states, actions):
n = len(states)
states1 = states.copy()
done = np.zeros((n,), dtype=np.bool)
rewards = np.zeros((n,))
for i, (state, action) in enumerate(zip(states, actions)):
states1[i,:] += self.Moves[action]
x, y = states1[i,:]
final = x > 1.0 or y > 1.0
if final:
z = x if y > 1.0 else y
r = 1-2*z
done[i] = True
rewards[i] = r
return states1, rewards, done
def randomStates(self, n):
return | np.random.random((n,2)) | numpy.random.random |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.