prompt
stringlengths 19
879k
| completion
stringlengths 3
53.8k
| api
stringlengths 8
59
|
---|---|---|
import numpy as np
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
import timeit
import time
import datetime
from tqdm import tqdm
def activation(input_array,function='sigmoid'):
if function =='sigmoid':
return 1/(1 + np.exp(-input_array))
np.random.seed(8888)
x1 = np.linspace(-5,5, 400)
x2 = np.linspace(-5,5, 400)
np.random.shuffle(x1)
np.random.shuffle(x2)
d = x1**2 + x2**2
# Normalize d 0.2~0.8
d_max = np.max(d)
d_min = np.min(d)
d = (d-d_min)/(d_max-d_min)*(0.8-0.2)+0.2
#---------------- Input data ------------------------------
num_in = 2
#----------------Hiddent Layer 1 ---------------------
num_L1 = 10
bias_L1 = np.random.uniform(-0.5,0.5,[num_L1,1])#5 1
w_L1 = np.random.uniform(-0.5,0.5,[num_in,num_L1])#2 5
#---------------- Output -----------------------------
num_out = 1
bias_out = np.random.uniform(-0.5,0.5,[num_out,1])# 1 1
w_out = np.random.uniform(-0.5,0.5,[num_L1,num_out])# 5 1
#---------------- Parameter --------------------------
eta = 0.01
mom = 0.9
epoch = 250000
Eav_train = np.zeros([epoch])
Eav_test = np.zeros([epoch])
dw_out = temp1 = np.zeros([num_L1,num_out]) #5 1
dbias_out = temp2 = np.zeros([num_out,1])#1 1
dw_L1 = temp3 = np.zeros([num_in,num_L1])#2 5
dbias_L1 = temp4 = np.zeros([num_L1,1])# 5 1
#---------------- Traning ----------------------------
t0 = timeit.default_timer()
now = datetime.datetime.now().strftime("%Y-%m-%d_%H-%M-%S")
pbar = tqdm(total =epoch)
for i in range(epoch):
#--------------- Feed Forward -------------------
e = np.zeros([300])
E_train = np.zeros([300])
for j in range(300):
#X = np.array([x1[j],x2[j]]).reshape(2,1)# 2 1
X = np.array([x1[j],x2[j]]).reshape(2,1)# 2 1
L1 = activation(np.dot(np.transpose(w_L1),X) + bias_L1,'sigmoid')#5 1
out = activation(np.dot(np.transpose(L1),w_out) + bias_out,'sigmoid')#1 1
#--------------- Back Propagation-----------------
e[j] = (d[j]-out) #1 1
E_train[j] = 0.5 * e[j]**2
locg_k = e[j] * (out*(1-out))# 1 1
temp2 = temp2 + mom * dbias_out + eta * locg_k * 1 #1 1
temp1 = temp1 + mom * dw_out + eta * locg_k * L1 #5 1
locg_j = L1*(1-L1) * locg_k * w_out# 5 1
temp4 = temp4 + mom * dbias_L1 + eta * locg_j * 1 # 5 1
temp3 = temp3 + mom * dw_L1 + eta * np.dot(X,np.transpose(locg_j))#2 5
dbias_out = temp2/300
dw_out = temp1/300
dbias_L1 = temp4/300
dw_L1 = temp3/300
temp1 = np.zeros([num_L1,num_out]) #5 1
temp2 = np.zeros([num_out,1])#1 1
temp3 = | np.zeros([num_in,num_L1]) | numpy.zeros |
# -*- coding: utf-8 -*-
import io
import numpy as np
from numpy.testing import assert_array_equal
import pytest
from pytoshop import codecs
from pytoshop import enums
@pytest.mark.parametrize("depth", (8, 16))
def test_zip_with_prediction(depth):
np.random.seed(0)
dtype = codecs.color_depth_dtype_map[depth]
x = np.random.randint(0, (2**depth) - 1, size=(255, 256), dtype=dtype)
fd = io.BytesIO()
codecs.compress_image(
fd, x, enums.Compression.zip_prediction, (255, 256), 1, depth, 1)
y = codecs.decompress_image(
fd.getvalue(), enums.Compression.zip_prediction, (255, 256), depth, 1)
assert_array_equal(x, y)
@pytest.mark.parametrize("depth", (1, 8, 16, 32))
def test_zip(depth):
np.random.seed(0)
dtype = codecs.color_depth_dtype_map[depth]
x = np.random.randint(0, (2**depth) - 1, size=(255, 256), dtype=dtype)
fd = io.BytesIO()
codecs.compress_image(
fd, x, enums.Compression.zip, (255, 256), 1, depth, 1)
y = codecs.decompress_image(
fd.getvalue(), enums.Compression.zip, (255, 256), depth, 1)
assert_array_equal(x, y)
@pytest.mark.parametrize("depth", (8, 16, 32))
@pytest.mark.parametrize("version", (1, 2))
def test_rle(depth, version):
np.random.seed(0)
dtype = codecs.color_depth_dtype_map[depth]
x = np.random.randint(0, (2**depth) - 1, size=(255, 256), dtype=dtype)
fd = io.BytesIO()
codecs.compress_image(
fd, x, enums.Compression.rle, (255, 256), 1, depth, version)
y = codecs.decompress_image(
fd.getvalue(), enums.Compression.rle, (255, 256), depth, version)
assert_array_equal(x, y)
@pytest.mark.parametrize("depth", (1, 8, 16, 32))
def test_raw_constant(depth):
if depth == 1:
value = 1
else:
value = 42
dtype = codecs.color_depth_dtype_map[depth]
x = np.ones((255, 256), dtype=dtype) * value
fd = io.BytesIO()
codecs.compress_image(
fd, value, enums.Compression.raw, (255, 256), 1, depth, 1)
y = codecs.decompress_image(
fd.getvalue(), enums.Compression.raw, (255, 256), depth, 1)
| assert_array_equal(x, y) | numpy.testing.assert_array_equal |
#!/usr/bin/env python
import os, sys, cv2, json
import math, PIL, cairo
import copy, random, re
from copy import deepcopy
import numpy as np
import os.path as osp
from time import time
from datetime import datetime
from nltk.tokenize import word_tokenize
from nltk.corpus import stopwords
from collections import Counter
import matplotlib.pyplot as plt
from abstract_config import get_config
import torch, torchtext
import torch.nn as nn
###########################################################
## Directory
###########################################################
this_dir = osp.dirname(__file__)
def maybe_create(dir_path):
if not osp.exists(dir_path):
os.makedirs(dir_path)
def prepare_directories(config):
postfix = datetime.now().strftime("%m%d_%H%M%S")
model_name = '{}_{}'.format(config.exp_name, postfix)
config.model_name = model_name
config.model_dir = osp.join(config.log_dir, model_name)
maybe_create(config.model_dir)
def pickle_load(path):
with open(path, 'rb') as fid:
data_ = pickle.load(fid)
return data_
def pickle_save(path, data):
with open(path, 'wb') as fid:
pickle.dump(data, fid, pickle.HIGHEST_PROTOCOL)
def json_load(path):
with open(path, 'r') as fid:
data_ = json.load(fid)
return data_
def json_save(path, data):
with open(path, 'w') as fid:
json.dump(data, fid, indent=4, sort_keys=True)
###########################################################
## Discretization
###########################################################
class LocationMap(object):
def __init__(self, config):
self.cfg = config
self.cols, self.col_step = \
np.linspace(config.margin, config.image_size[0]-config.margin,
num=config.grid_size[0],
endpoint=True, retstep=True, dtype=np.float)
self.rows, self.row_step = \
np.linspace(config.margin, config.image_size[1]-config.margin,
num=config.grid_size[1],
endpoint=True, retstep=True, dtype=np.float)
Xs, Ys = np.meshgrid(self.cols, self.rows)
self.coords = np.vstack((Xs.flatten(), Ys.flatten())).transpose()
def index2coord(self, index):
return self.coords[index].copy()
def indices2coords(self, indices):
return self.coords[indices].copy()
def coord2index(self, coord):
col_idx = int(float(coord[0] - self.cfg.margin)/self.col_step + 0.5)
row_idx = int(float(coord[1] - self.cfg.margin)/self.row_step + 0.5)
col_idx = max(0, min(col_idx, self.cfg.grid_size[0]-1))
row_idx = max(0, min(row_idx, self.cfg.grid_size[1]-1))
return row_idx * self.cfg.grid_size[0] + col_idx
def coords2indices(self, coords):
grids = (coords - self.cfg.margin)/np.array([self.col_step, self.row_step]).reshape((1,2)).astype(np.float)
grids = (grids + 0.5).astype(np.int)
grids[:, 0] = np.maximum(0, np.minimum(grids[:, 0], self.cfg.grid_size[0]-1))
grids[:, 1] = np.maximum(0, np.minimum(grids[:, 1], self.cfg.grid_size[1]-1))
return grids[:, 1] * self.cfg.grid_size[0] + grids[:, 0]
###########################################################
## Vocabulary
###########################################################
import string
punctuation_table = str.maketrans('', '', string.punctuation)
stop_words = set(stopwords.words('english'))
# print('stop_words: ', stop_words)
def further_token_process(tokens):
tokens = [w.translate(punctuation_table) for w in tokens]
tokens = [w for w in tokens if w.isalpha()]
tokens = [w for w in tokens if not w in stop_words]
return tokens
def atoi(text):
return int(text) if text.isdigit() else text
def natural_keys(text):
'''
alist.sort(key=natural_keys) sorts in human order
http://nedbatchelder.com/blog/200712/human_sorting.html
(See Toothy's implementation in the comments)
'''
return [ atoi(c) for c in re.split('(\d+)', text) ]
class Vocab(object):
def __init__(self, name):
self.name = name
self.word2index = {}
self.word2count = {}
self.index2word = []
for idx, word in enumerate(['<pad>', '<sos>', '<eos>']):
self.word2index[word] = idx
self.index2word.append(word)
self.word2count[word] = 1
self.n_words = 3
self.glovec = torchtext.vocab.GloVe(cache=osp.join(this_dir, '..', 'data', 'caches'))
def get_glovec(self):
vectors = []
self.word2vector = {}
for i in range(len(self.index2word)):
w = self.index2word[i]
v_th = self.glovec[w].squeeze()
v_np = v_th.numpy()
vectors.append(v_th)
self.word2vector[w] = v_np
self.vectors = torch.stack(vectors, 0)
del self.glovec
self.glovec = None
def load(self, path):
with open(path, 'r') as fp:
vocab_info = json.loads(fp.read())
self.word2index = vocab_info['word2index']
self.word2count = vocab_info['word2count']
self.index2word = vocab_info['index2word']
self.n_words = len(self.index2word)
def save(self, path):
vocab_info = {}
vocab_info['word2index'] = self.word2index
vocab_info['word2count'] = self.word2count
vocab_info['index2word'] = self.index2word
with open(path, 'w') as fp:
json.dump(vocab_info, fp, indent=4, sort_keys=True)
def addSentence(self, sentence):
tokens = word_tokenize(sentence.lower())
tokens = further_token_process(tokens)
for word in tokens:
self.addWord(word)
def addWord(self, word):
if word not in self.word2index:
self.word2index[word] = self.n_words
self.word2count[word] = 1
self.index2word.append(word)
self.n_words += 1
else:
self.word2count[word] += 1
def filter_words(self, max_size=None, min_freq=1):
counter = Counter(self.word2count)
# rm special tokens before sorting
counter['<pad>'] = 0; counter['<sos>'] = 0; counter['<eos>'] = 0
# sort by frequency, then alphabetically
words_and_frequencies = sorted(counter.items(), key=lambda tup: tup[0])
words_and_frequencies.sort(key=lambda tup: tup[1], reverse=True)
# reset
self.index2word = []
self.word2index = {}
self.n_words = 0
for idx, word in enumerate(['<pad>', '<sos>', '<eos>']):
self.word2index[word] = idx
self.index2word.append(word)
self.n_words += 1
for word, freq in words_and_frequencies:
if freq < min_freq or len(self.index2word) == max_size:
break
self.index2word.append(word)
self.word2index[word] = self.n_words
self.n_words += 1
counter['<pad>'] = 1; counter['<sos>'] = 1; counter['<eos>'] = 1
self.word2count = dict(counter)
def word_to_index(self, w):
return self.word2index.get(w, -1)
###########################################################
## Pytorch
###########################################################
class Flatten(nn.Module):
def forward(self, input):
return input.view(input.size(0), -1)
def conv3x3(in_planes, out_planes, stride=1):
"""3x3 convolution with padding"""
return nn.Conv2d(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
def weights_init(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
m.weight.data.normal_(0.0, 0.02)
elif classname.find('BatchNorm') != -1:
m.weight.data.normal_(1.0, 0.02)
m.bias.data.fill_(0)
def get_n_params(model):
pp=0
for p in list(model.parameters()):
nn=1
for s in list(p.size()):
nn = nn*s
pp += nn
return pp
def indices2onehots(indices, out_dim):
bsize, slen = indices.size()
inds = indices.view(bsize, slen, 1)
onehots = torch.zeros(bsize, slen, out_dim).float()
onehots.scatter_(-1, inds, 1.0)
return onehots.float()
###########################################################
## Data
###########################################################
def normalize(input_img, mean=None, std=None):
if (mean is None) or (std is None):
mean = np.array([0.485, 0.456, 0.406]).reshape((1,1,3))
std = np.array([0.229, 0.224, 0.225]).reshape((1,1,3))
# [0, 255] --> [0, 1]
img_np = input_img.astype(np.float32)/255.0
# BGR --> RGB
img_np = img_np[:, :, ::-1].copy()
# Normalize
img_np = (img_np - mean)/std
# H x W x C --> C x H x W
img_np = img_np.transpose((2, 0, 1))
return img_np
def unnormalize(input_img, mean=None, std=None):
if (mean is None) or (std is None):
mean = np.array([0.485, 0.456, 0.406]).reshape((1,1,3))
std = np.array([0.229, 0.224, 0.225]).reshape((1,1,3))
# C x H x W --> H x W x C
img_np = input_img.transpose((1, 2, 0))
# Unnormalize
img_np = img_np * std + mean
# RGB --> BGR
img_np = img_np[:, :, ::-1].copy()
# [0, 1] --> [0, 255]
img_np = (255.0 * img_np).astype(np.int)
img_np = np.maximum(0, img_np)
img_np = np.minimum(255, img_np)
img_np = img_np.astype(np.uint8)
return img_np
class image_normalize(object):
def __init__(self, field):
self.mean = np.array([0.485, 0.456, 0.406]).reshape((1,1,3))
self.std = np.array([0.229, 0.224, 0.225]).reshape((1,1,3))
self.field = field
def __call__(self, sample):
raws = sample[self.field]
imgs = []
for i in range(len(raws)):
img = normalize(raws[i], self.mean, self.std)
imgs.append(img)
imgs = np.stack(imgs, 0)
sample[self.field] = imgs
return sample
def img_to_tensor(input_imgs, mean=None, std=None):
imgs_np = []
for i in range(len(input_imgs)):
img_np = normalize(input_imgs[i], mean, std)
imgs_np.append(img_np)
imgs_np = np.stack(imgs_np, 0)
# to pytorch
imgs_th = torch.from_numpy(imgs_np).float()
return imgs_th
def tensor_to_img(input_imgs_th, mean=None, std=None):
imgs_np = []
for i in range(len(input_imgs_th)):
img_np = input_imgs_th[i].cpu().data.numpy()
img_np = unnormalize(img_np, mean, std)
imgs_np.append(img_np)
imgs_np = np.stack(imgs_np, 0)
return imgs_np
###########################################################
## Visualization
###########################################################
def surface_to_image(surface):
# get numpy data from cairo surface
pimg = PIL.Image.frombuffer("RGBA",
(surface.get_width(), surface.get_height()),
surface.get_data(), "raw", "RGBA", 0, 1)
frame = np.array(pimg)[:,:,:-1]
return frame
###########################################################
## Evaluation
###########################################################
def bb_iou(A, B):
eps = 1e-8
A_area = float(A[2] - A[0]) * (A[3] - A[1])
B_area = float(B[2] - B[0]) * (B[3] - B[1])
minx = max(A[0], B[0]); miny = max(A[1], B[1])
maxx = min(A[2], B[2]); maxy = min(A[3], B[3])
w = max(0, maxx - minx)
h = max(0, maxy - miny)
I_area = w * h
return I_area/(A_area + B_area - I_area + eps)
def gaussian2d(x, y, sigmas):
v = (x - y)/np.array(sigmas)
return np.exp(-0.5 * np.sum(v * v))
def batch_gaussian1d(x, y, sigma):
v = (x - y)/sigma
return np.exp(-0.5 * np.sum(v * v, -1))
###########################################################
## Bounding box
###########################################################
def clip_xyxy(box, width, height):
box[0] = max(0, box[0])
box[1] = max(0, box[1])
box[2] = min(box[2], width-1)
box[3] = min(box[3], height-1)
return box.astype(np.int32)
def clip_xyxys(boxes, width, height):
boxes[:, 0] = np.maximum(boxes[:, 0], 0)
boxes[:, 1] = np.maximum(boxes[:, 1], 0)
boxes[:, 2] = np.minimum(boxes[:, 2], width - 1)
boxes[:, 3] = np.minimum(boxes[:, 3], height - 1)
return boxes.astype(np.int32)
def xywh_to_xyxy(box, width, height):
x = box[0]; y = box[1]
w = box[2]; h = box[3]
xmin = x - 0.5 * w + 1
xmax = x + 0.5 * w
ymin = y - 0.5 * h + 1
ymax = y + 0.5 * h
xyxy = np.array([xmin, ymin, xmax, ymax])
return clip_xyxy(xyxy, width, height)
def xywhs_to_xyxys(boxes, width, height):
x = boxes[:, 0]; y = boxes[:, 1]
w = boxes[:, 2]; h = boxes[:, 3]
xmin = x - 0.5 * w + 1.0
xmax = x + 0.5 * w
ymin = y - 0.5 * h + 1.0
ymax = y + 0.5 * h
xyxy = np.vstack((xmin, ymin, xmax, ymax)).transpose()
return clip_xyxys(xyxy, width, height)
def normalized_xywhs_to_xyxys(boxes):
x = boxes[:, 0]; y = boxes[:, 1]
w = boxes[:, 2]; h = boxes[:, 3]
xmin = x - 0.5 * w
xmax = x + 0.5 * w
ymin = y - 0.5 * h
ymax = y + 0.5 * h
xyxy = np.vstack((xmin, ymin, xmax, ymax)).transpose()
xyxy[:, 0] = np.maximum(xyxy[:, 0], 0.0)
xyxy[:, 1] = np.maximum(xyxy[:, 1], 0.0)
xyxy[:, 2] = np.minimum(xyxy[:, 2], 1.0)
xyxy[:, 3] = np.minimum(xyxy[:, 3], 1.0)
return xyxy
def xyxy_to_xywh(box):
x = 0.5 * (boxes[0] + boxes[2])
y = 0.5 * (boxes[1] + boxes[3])
w = boxes[2] - boxes[0] + 1.0
h = boxes[3] - boxes[1] + 1.0
return np.array([x, y, w, h])
def xyxys_to_xywhs(boxes):
x = 0.5 * (boxes[:, 0] + boxes[:, 2])
y = 0.5 * (boxes[:, 1] + boxes[:, 3])
w = boxes[:, 2] - boxes[:, 0] + 1.0
h = boxes[:, 3] - boxes[:, 1] + 1.0
return np.vstack((x, y, w, h)).transpose()
###########################################################
## Visualization
###########################################################
def paint_box(ctx, color, box):
x = box[0]; y = box[1]
w = box[2] - box[0] + 1
h = box[3] - box[1] + 1
ctx.set_source_rgb(color[0], color[1], color[2])
ctx.set_line_width(10)
ctx.rectangle(x, y, w, h)
ctx.stroke()
# ctx.set_operator(cairo.OPERATOR_ADD)
# ctx.fill()
def paint_txt(ctx, txt, box):
font_option = cairo.FontOptions()
font_option.set_antialias(cairo.Antialias.SUBPIXEL)
ctx.set_source_rgb(0, 0, 0)
ctx.set_font_options(font_option)
ctx.select_font_face("Purisa", cairo.FONT_SLANT_ITALIC, cairo.FONT_WEIGHT_BOLD)
ctx.set_font_size(60)
# ctx.set_operator(cairo.OPERATOR_ADD)
x = box[0]; y = box[1] + 50
w = box[2] - box[0] + 1
h = box[3] - box[1] + 1
ctx.move_to(x, y)
ctx.show_text(txt)
def create_squared_image(img, pad_value=None):
if pad_value is None:
pad_value = np.array([255,255,255])
width = img.shape[1]
height = img.shape[0]
max_dim = np.maximum(width, height)
offset_x = 0 #int(0.5 * (max_dim - width))
offset_y = max_dim - height #int(0.5 * (max_dim - height))
output_img = pad_value.reshape(1, 1, img.shape[-1]) * \
np.ones((max_dim, max_dim, img.shape[-1]))
output_img[offset_y : offset_y + height, \
offset_x : offset_x + width, :] = img
return output_img.astype(np.uint8), offset_x, offset_y
def create_colormap(num_colors):
dz = np.arange(1, num_colors+1)
norm = plt.Normalize()
colors = plt.cm.jet(norm(dz))
return colors[:,:3]
###########################################################
## Logging
###########################################################
def log_scores(infos, path):
log_info = {}
unigram_P = infos.unigram_P()
log_info['unigram_P'] = [np.mean(unigram_P), np.std(unigram_P), np.amin(unigram_P), | np.amax(unigram_P) | numpy.amax |
import numpy as np
class ESN():
def __init__(self, n_inputs: int, n_outputs: int, n_reservoir: int = 500,
input_scale=1, feedback_scale=1, spectral_radius=0.95,
teacher_forcing: bool = True, sparsity=0, noise=0.001,
bias=0.01, ridge=10**-10, rng=np.random.default_rng()):
"""
An implementation of Echo State Network.
The specification of the network mainly follows Lu et al (2017), while
the leakage rate is fixed to be zero.
See https://aip.scitation.org/doi/10.1063/1.4979665 for more details.
:param n_inputs: number of input dimensions
:param n_outputs: number of output (teacher) dimensions
:param n_reservoir: number of reservoir nodes
:param input_scale: scale of input weights
:param feedback_scale: scale of feedback weights
:param spectral_radius: spectral radius of the recurrent weight matrix
:param teacher_forcing: whether to feed the output (teacher) back to the network
:param sparsity: proportion of recurrent weights set to zero
:param noise: scale of noise in the network dynamics
:param bias: bias constant in activation function
:param ridge: ridge regression parameter
:param rng: random generator
"""
self.n_inputs = n_inputs
self.n_outputs = n_outputs
self.n_reservoir = n_reservoir
self.input_scale = input_scale
self.feedback_scale = feedback_scale
self.spectral_radius = spectral_radius
self.teacher_forcing = teacher_forcing
self.sparsity = sparsity
self.noise = noise
self.bias = bias
self.ridge = ridge
self.rng = rng
self._initweights()
def _initweights(self):
"""
Initialize the adjacency matrix of the reservior network and the input weight matrix
"""
# the adjacency matrix, beginning with a random matrix in range [-1,1):
A = self.rng.random((self.n_reservoir, self.n_reservoir)) - 0.5
# delete some connections to satisfy the average degree:
A[self.rng.random(A.shape) < self.sparsity] = 0
# compute the spectral radius of these weights:
radius = np.max(np.abs(np.linalg.eigvals(A)))
# rescale them to reach the requested spectral radius:
self.A = A * (self.spectral_radius / radius)
# generate a random input weight matrix:
self.W_in = (self.rng.random((self.n_reservoir, self.n_inputs
)) * 2 - 1)*self.input_scale
# generate a random feedback weight matrix:
if self.teacher_forcing:
self.W_feedb = (self.rng.random((self.n_reservoir, self.n_outputs
)) * 2 - 1)*self.feedback_scale
return
def _update(self, current_state, input_pattern, teacher_pattern):
"""
performs one update step.
i.e., computes the next network state by applying the adjacency matrix
to the last state and the input/feedback weight matrix to an input/teacher
"""
preactivation = (np.dot(self.A, current_state)
+ np.dot(self.W_in, input_pattern))+self.bias
if self.teacher_forcing:
preactivation += np.dot(self.W_feedb, teacher_pattern)
return (np.tanh(preactivation)
+ self.noise * (self.rng.random(self.n_reservoir) - 0.5))
def fit(self, inputs, teachers):
"""
Collect the network's reaction to training data, training output weights.
:param inputs: array of dimensions (steps * n_inputs)
:param teacher: array of dimension (steps * n_outputs)
"""
# detect and correct possible errors:
if len(teachers) != (steps := len(inputs)):
raise ValueError("teacher and input do not match")
if inputs.ndim < 2:
inputs = np.expand_dims(inputs, 1)
if inputs.shape[1] != self.n_inputs:
raise ValueError("incorrect input dimension")
if teachers.ndim < 2:
teachers = np.expand_dims(teachers, 1)
if teachers.shape[1] != self.n_outputs:
raise ValueError("incorrect teacher dimension")
# pre-allocate memory for network states:
states = np.zeros((steps, self.n_reservoir))
# let the network evolve according to inputs:
for n in range(steps-1):
states[n+1] = self._update(states[n], inputs[n+1], teachers[n])
# remember the last state for later:
self.laststate = states[-1]
self.lastoutput = teachers[-1]
# disregard the first few states:
transient = min(int(steps / 10), 300)
states = states[transient:]
teachers = teachers[transient:]
# learn the weights, i.e. solve output layer quantities W_out and c
# that make the reservoir output approximate the teacher sequence:
states_mean = np.mean(states, axis=0)
teachers_mean = np.mean(teachers, axis=0)
states_delta = states-states_mean
teachers_delta = teachers-teachers_mean
Id = np.eye(self.n_reservoir)
self.W_out = teachers_delta.T.dot(states_delta).dot(
np.linalg.inv((states_delta.T).dot(states_delta)+self.ridge*Id))
self.c = teachers_mean-self.W_out.dot(states_mean)
self.measure_error(states, teachers)
return
def measure_error(self, states, teachers):
outputs = np.squeeze( | np.dot(states, self.W_out.T) | numpy.dot |
r"""Tests for parallel implementation of triangulations."""
import nose.tools as nt
import numpy as np
import os
import time
from cgal4py import _use_multiprocessing
from cgal4py import parallel, delaunay
from cgal4py.domain_decomp import GenericTree
from cgal4py.tests.test_cgal4py import make_points, make_test, MyTestCase
if _use_multiprocessing:
import multiprocessing as mp
from mpi4py import MPI
import ctypes
np.random.seed(10)
@nt.nottest
def lines_load_test(npts, ndim, periodic=False):
lines = [
"from cgal4py.tests.test_cgal4py import make_points",
"pts, le, re = make_points({}, {})".format(npts, ndim),
"load_dict = dict(pts=pts, left_edge=le, right_edge=re,",
" periodic={})".format(periodic)]
return lines
class TestGetMPIType(MyTestCase):
def setup_param(self):
self._func = parallel._get_mpi_type
self.param_equal = [(MPI.INT, ['i'], {}),
(MPI.LONG, ['l'], {}),
(MPI.FLOAT, ['f'], {}),
(MPI.DOUBLE, ['d'], {})]
self.param_raises = [(ValueError, ['m'], {})]
class TestWriteMPIScript(MyTestCase):
def setup_param(self):
self._func = parallel.write_mpi_script
fname = 'test_mpi_script.py'
read_lines = lines_load_test(10, 2)
self.param_runs = [
((fname, read_lines, 'triangulate'), {}),
((fname, read_lines, 'triangulate'), dict(use_double=True)),
((fname, read_lines, 'triangulate'), dict(use_buffer=True)),
((fname, read_lines, 'triangulate'), dict(profile=True))]
self._fname = fname
self._read_lines = read_lines
def check_runs(self, args, kwargs):
self.func(*args, **kwargs)
assert(os.path.isfile(args[0]))
os.remove(args[0])
def test_overwrite(self):
self.func(self._fname, self._read_lines, 'volumes')
t0 = os.path.getmtime(self._fname)
time.sleep(1)
self.func(self._fname, self._read_lines, 'volumes', overwrite=False)
t1 = os.path.getmtime(self._fname)
nt.eq_(t0, t1)
time.sleep(1)
self.func(self._fname, self._read_lines, 'volumes', overwrite=True)
t2 = os.path.getmtime(self._fname)
nt.assert_not_equal(t1, t2)
os.remove(self._fname)
class TestParallelLeaf(MyTestCase):
def setup_param(self):
self._func = parallel.ParallelLeaf
self.param_runs = [
((0, 2), {}),
((0, 3), {}),
# ((0, 4), {}),
((0, 2), {'periodic':True}),
((0, 3), {'periodic':True}),
# ((0, 4), {'periodic':True}),
]
def check_runs(self, args, kwargs):
pts, tree = make_test(*args, **kwargs)
left_edges = np.vstack([leaf.left_edge for leaf in tree.leaves])
right_edges = np.vstack([leaf.right_edge for leaf in tree.leaves])
for leaf in tree.leaves:
pleaf = self._func(leaf, left_edges, right_edges)
def check_tessellate(self, args, kwargs):
pts, tree = make_test(*args, **kwargs)
left_edges = np.vstack([leaf.left_edge for leaf in tree.leaves])
right_edges = | np.vstack([leaf.right_edge for leaf in tree.leaves]) | numpy.vstack |
#
# Copyright (c) 2021 The Markovflow Contributors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Module containing the integration tests for the `SparsePowerExpectationPropagation` class."""
import numpy as np
import pytest
import tensorflow as tf
from gpflow.likelihoods import Gaussian
from markovflow.kernels import Matern12
from markovflow.likelihoods import PEPGaussian, PEPScalarLikelihood
from markovflow.models import (
GaussianProcessRegression,
SparseCVIGaussianProcess,
SparsePowerExpectationPropagation,
)
from tests.tools.generate_random_objects import generate_random_time_observations
OUT_DIM = 1
LENGTH_SCALE = 2.0
VARIANCE = 2.25
NUM_DATA = 2
batch_shape = ()
output_dim = 1
@pytest.fixture(name="spep_gpr_optim_setup")
def _spep_gpr_optim_setup():
"""
Creates a GPR model and a matched Sparse PEP model (z=x),
and optimize the later (single step)
"""
time_points, observations, kernel, variance = _setup()
chol_obs_covariance = tf.eye(output_dim, dtype=tf.float64) * tf.sqrt(variance)
input_data = (time_points, observations)
inducing_points = time_points + 1e-10
gpr = GaussianProcessRegression(
kernel=kernel,
input_data=input_data,
chol_obs_covariance=chol_obs_covariance,
mean_function=None,
)
likelihood = Gaussian(variance=variance)
sep = SparsePowerExpectationPropagation(
kernel=kernel,
inducing_points=inducing_points,
likelihood=PEPScalarLikelihood(likelihood),
learning_rate=0.1,
alpha=1.0,
)
scvi = SparseCVIGaussianProcess(
kernel=kernel, inducing_points=inducing_points, likelihood=likelihood, learning_rate=1.0,
)
# do not train any hyper-parameters for these tests
for t in likelihood.trainable_variables + kernel.trainable_variables:
t._trainable = False
# update sites -> optimal
scvi.update_sites(input_data)
sep.nat1.assign(scvi.nat1.numpy())
sep.nat2.assign(scvi.nat2.numpy())
return sep, gpr, input_data
def _setup():
""" Data, kernel and likelihood setup """
time_points, observations = generate_random_time_observations(
obs_dim=output_dim, num_data=NUM_DATA, batch_shape=batch_shape
)
time_points = tf.constant(time_points)
observations = tf.constant(observations)
kernel = Matern12(lengthscale=LENGTH_SCALE, variance=VARIANCE, output_dim=output_dim)
observation_noise = 1.0
variance = tf.constant(observation_noise, dtype=tf.float64)
return time_points, observations, kernel, variance
def test_optimal_sites(with_tf_random_seed, spep_gpr_optim_setup):
"""Test that the optimal value of the exact sites match the true sites """
spep, gpr, data = spep_gpr_optim_setup
spep.learning_rate = 1.0
spep.alpha = 1.0
spep.update_sites(data)
sd = spep.kernel.state_dim
# for z = x, the sites are 2 sd x 2 sd but half empty
# one part must match the GPR site
spep_nat1 = spep.nat1.numpy()[:-1, sd:]
spep_nat2 = spep.nat2.numpy()[:-1, sd:, sd:]
spep_log_norm = spep.log_norm.numpy()[:-1]
spep_energy = spep.energy(data).numpy()
# manually compute the optimal sites
s2 = gpr._chol_obs_covariance.numpy() ** 2
gpr_nat1 = gpr.observations / s2
gpr_nat2 = -0.5 / s2 * np.ones_like(spep_nat2)
gpr_log_norm = -0.5 * gpr.observations.numpy() ** 2 / s2 - 0.5 * | np.log(2.0 * np.pi * s2) | numpy.log |
import unittest
import qteasy as qt
import pandas as pd
from pandas import Timestamp
import numpy as np
from numpy import int64
import itertools
import datetime
from qteasy.utilfuncs import list_to_str_format, regulate_date_format, time_str_format, str_to_list
from qteasy.utilfuncs import maybe_trade_day, is_market_trade_day, prev_trade_day, next_trade_day, prev_market_trade_day
from qteasy.utilfuncs import next_market_trade_day
from qteasy.space import Space, Axis, space_around_centre, ResultPool
from qteasy.core import apply_loop
from qteasy.built_in import SelectingFinanceIndicator
from qteasy.history import stack_dataframes
from qteasy.tsfuncs import income, indicators, name_change, get_bar
from qteasy.tsfuncs import stock_basic, trade_calendar, new_share, get_index
from qteasy.tsfuncs import balance, cashflow, top_list, index_indicators, composite
from qteasy.tsfuncs import future_basic, future_daily, options_basic, options_daily
from qteasy.tsfuncs import fund_basic, fund_net_value, index_basic
from qteasy.evaluate import eval_alpha, eval_benchmark, eval_beta, eval_fv
from qteasy.evaluate import eval_info_ratio, eval_max_drawdown, eval_sharp
from qteasy.evaluate import eval_volatility
from qteasy.tafuncs import bbands, dema, ema, ht, kama, ma, mama, mavp, mid_point
from qteasy.tafuncs import mid_price, sar, sarext, sma, t3, tema, trima, wma, adx, adxr
from qteasy.tafuncs import apo, bop, cci, cmo, dx, macd, macdext, aroon, aroonosc
from qteasy.tafuncs import macdfix, mfi, minus_di, minus_dm, mom, plus_di, plus_dm
from qteasy.tafuncs import ppo, roc, rocp, rocr, rocr100, rsi, stoch, stochf, stochrsi
from qteasy.tafuncs import trix, ultosc, willr, ad, adosc, obv, atr, natr, trange
from qteasy.tafuncs import avgprice, medprice, typprice, wclprice, ht_dcperiod
from qteasy.tafuncs import ht_dcphase, ht_phasor, ht_sine, ht_trendmode, cdl2crows
from qteasy.tafuncs import cdl3blackcrows, cdl3inside, cdl3linestrike, cdl3outside
from qteasy.tafuncs import cdl3starsinsouth, cdl3whitesoldiers, cdlabandonedbaby
from qteasy.tafuncs import cdladvanceblock, cdlbelthold, cdlbreakaway, cdlclosingmarubozu
from qteasy.tafuncs import cdlconcealbabyswall, cdlcounterattack, cdldarkcloudcover
from qteasy.tafuncs import cdldoji, cdldojistar, cdldragonflydoji, cdlengulfing
from qteasy.tafuncs import cdleveningdojistar, cdleveningstar, cdlgapsidesidewhite
from qteasy.tafuncs import cdlgravestonedoji, cdlhammer, cdlhangingman, cdlharami
from qteasy.tafuncs import cdlharamicross, cdlhighwave, cdlhikkake, cdlhikkakemod
from qteasy.tafuncs import cdlhomingpigeon, cdlidentical3crows, cdlinneck
from qteasy.tafuncs import cdlinvertedhammer, cdlkicking, cdlkickingbylength
from qteasy.tafuncs import cdlladderbottom, cdllongleggeddoji, cdllongline, cdlmarubozu
from qteasy.tafuncs import cdlmatchinglow, cdlmathold, cdlmorningdojistar, cdlmorningstar
from qteasy.tafuncs import cdlonneck, cdlpiercing, cdlrickshawman, cdlrisefall3methods
from qteasy.tafuncs import cdlseparatinglines, cdlshootingstar, cdlshortline, cdlspinningtop
from qteasy.tafuncs import cdlstalledpattern, cdlsticksandwich, cdltakuri, cdltasukigap
from qteasy.tafuncs import cdlthrusting, cdltristar, cdlunique3river, cdlupsidegap2crows
from qteasy.tafuncs import cdlxsidegap3methods, beta, correl, linearreg, linearreg_angle
from qteasy.tafuncs import linearreg_intercept, linearreg_slope, stddev, tsf, var, acos
from qteasy.tafuncs import asin, atan, ceil, cos, cosh, exp, floor, ln, log10, sin, sinh
from qteasy.tafuncs import sqrt, tan, tanh, add, div, max, maxindex, min, minindex, minmax
from qteasy.tafuncs import minmaxindex, mult, sub, sum
from qteasy.history import get_financial_report_type_raw_data, get_price_type_raw_data
from qteasy.database import DataSource
from qteasy._arg_validators import _parse_string_kwargs, _valid_qt_kwargs
class TestCost(unittest.TestCase):
def setUp(self):
self.amounts = np.array([10000, 20000, 10000])
self.op = np.array([0, 1, -0.33333333])
self.prices = np.array([10, 20, 10])
self.r = qt.Cost()
def test_rate_creation(self):
print('testing rates objects\n')
self.assertIsInstance(self.r, qt.Cost, 'Type should be Rate')
def test_rate_operations(self):
self.assertEqual(self.r['buy_fix'], 0.0, 'Item got is incorrect')
self.assertEqual(self.r['sell_fix'], 0.0, 'Item got is wrong')
self.assertEqual(self.r['buy_rate'], 0.003, 'Item got is incorrect')
self.assertEqual(self.r['sell_rate'], 0.001, 'Item got is incorrect')
self.assertEqual(self.r['buy_min'], 5., 'Item got is incorrect')
self.assertEqual(self.r['sell_min'], 0.0, 'Item got is incorrect')
self.assertEqual(self.r['slipage'], 0.0, 'Item got is incorrect')
self.assertEqual(np.allclose(self.r(self.amounts), [0.003, 0.003, 0.003]), True, 'fee calculation wrong')
def test_rate_fee(self):
self.r.buy_rate = 0.003
self.r.sell_rate = 0.001
self.r.buy_fix = 0
self.r.sell_fix = 0
self.r.buy_min = 0
self.r.sell_min = 0
self.r.slipage = 0
print('\nSell result with fixed rate = 0.001 and moq = 0:')
print(self.r.get_selling_result(self.prices, self.op, self.amounts))
test_rate_fee_result = self.r.get_selling_result(self.prices, self.op, self.amounts)
self.assertIs(np.allclose(test_rate_fee_result[0], [0., 0., -3333.3333]), True, 'result incorrect')
self.assertAlmostEqual(test_rate_fee_result[1], 33299.999667, msg='result incorrect')
self.assertAlmostEqual(test_rate_fee_result[2], 33.333332999999996, msg='result incorrect')
print('\nSell result with fixed rate = 0.001 and moq = 1:')
print(self.r.get_selling_result(self.prices, self.op, self.amounts, 1))
test_rate_fee_result = self.r.get_selling_result(self.prices, self.op, self.amounts, 1)
self.assertIs(np.allclose(test_rate_fee_result[0], [0., 0., -3333]), True, 'result incorrect')
self.assertAlmostEqual(test_rate_fee_result[1], 33296.67, msg='result incorrect')
self.assertAlmostEqual(test_rate_fee_result[2], 33.33, msg='result incorrect')
print('\nSell result with fixed rate = 0.001 and moq = 100:')
print(self.r.get_selling_result(self.prices, self.op, self.amounts, 100))
test_rate_fee_result = self.r.get_selling_result(self.prices, self.op, self.amounts, 100)
self.assertIs(np.allclose(test_rate_fee_result[0], [0., 0., -3300]), True, 'result incorrect')
self.assertAlmostEqual(test_rate_fee_result[1], 32967.0, msg='result incorrect')
self.assertAlmostEqual(test_rate_fee_result[2], 33, msg='result incorrect')
print('\nPurchase result with fixed rate = 0.003 and moq = 0:')
print(self.r.get_purchase_result(self.prices, self.op, self.amounts, 0))
test_rate_fee_result = self.r.get_purchase_result(self.prices, self.op, self.amounts, 0)
self.assertIs(np.allclose(test_rate_fee_result[0], [0., 997.00897308, 0.]), True, 'result incorrect')
self.assertAlmostEqual(test_rate_fee_result[1], -20000.0, msg='result incorrect')
self.assertAlmostEqual(test_rate_fee_result[2], 59.82053838484547, msg='result incorrect')
print('\nPurchase result with fixed rate = 0.003 and moq = 1:')
print(self.r.get_purchase_result(self.prices, self.op, self.amounts, 1))
test_rate_fee_result = self.r.get_purchase_result(self.prices, self.op, self.amounts, 1)
self.assertIs(np.allclose(test_rate_fee_result[0], [0., 997., 0.]), True, 'result incorrect')
self.assertAlmostEqual(test_rate_fee_result[1], -19999.82, msg='result incorrect')
self.assertAlmostEqual(test_rate_fee_result[2], 59.82, msg='result incorrect')
print('\nPurchase result with fixed rate = 0.003 and moq = 100:')
print(self.r.get_purchase_result(self.prices, self.op, self.amounts, 100))
test_rate_fee_result = self.r.get_purchase_result(self.prices, self.op, self.amounts, 100)
self.assertIs(np.allclose(test_rate_fee_result[0], [0., 900., 0.]), True, 'result incorrect')
self.assertAlmostEqual(test_rate_fee_result[1], -18054., msg='result incorrect')
self.assertAlmostEqual(test_rate_fee_result[2], 54.0, msg='result incorrect')
def test_min_fee(self):
self.r.buy_rate = 0.
self.r.sell_rate = 0.
self.r.buy_fix = 0.
self.r.sell_fix = 0.
self.r.buy_min = 300
self.r.sell_min = 300
self.r.slipage = 0.
print('\npurchase result with fixed cost rate with min fee = 300 and moq = 0:')
print(self.r.get_purchase_result(self.prices, self.op, self.amounts, 0))
test_min_fee_result = self.r.get_purchase_result(self.prices, self.op, self.amounts, 0)
self.assertIs(np.allclose(test_min_fee_result[0], [0., 985, 0.]), True, 'result incorrect')
self.assertAlmostEqual(test_min_fee_result[1], -20000.0, msg='result incorrect')
self.assertAlmostEqual(test_min_fee_result[2], 300.0, msg='result incorrect')
print('\npurchase result with fixed cost rate with min fee = 300 and moq = 10:')
print(self.r.get_purchase_result(self.prices, self.op, self.amounts, 10))
test_min_fee_result = self.r.get_purchase_result(self.prices, self.op, self.amounts, 10)
self.assertIs(np.allclose(test_min_fee_result[0], [0., 980, 0.]), True, 'result incorrect')
self.assertAlmostEqual(test_min_fee_result[1], -19900.0, msg='result incorrect')
self.assertAlmostEqual(test_min_fee_result[2], 300.0, msg='result incorrect')
print('\npurchase result with fixed cost rate with min fee = 300 and moq = 100:')
print(self.r.get_purchase_result(self.prices, self.op, self.amounts, 100))
test_min_fee_result = self.r.get_purchase_result(self.prices, self.op, self.amounts, 100)
self.assertIs(np.allclose(test_min_fee_result[0], [0., 900, 0.]), True, 'result incorrect')
self.assertAlmostEqual(test_min_fee_result[1], -18300.0, msg='result incorrect')
self.assertAlmostEqual(test_min_fee_result[2], 300.0, msg='result incorrect')
print('\nselling result with fixed cost rate with min fee = 300 and moq = 0:')
print(self.r.get_selling_result(self.prices, self.op, self.amounts))
test_min_fee_result = self.r.get_selling_result(self.prices, self.op, self.amounts)
self.assertIs(np.allclose(test_min_fee_result[0], [0, 0, -3333.3333]), True, 'result incorrect')
self.assertAlmostEqual(test_min_fee_result[1], 33033.333)
self.assertAlmostEqual(test_min_fee_result[2], 300.0)
print('\nselling result with fixed cost rate with min fee = 300 and moq = 1:')
print(self.r.get_selling_result(self.prices, self.op, self.amounts, 1))
test_min_fee_result = self.r.get_selling_result(self.prices, self.op, self.amounts, 1)
self.assertIs(np.allclose(test_min_fee_result[0], [0, 0, -3333]), True, 'result incorrect')
self.assertAlmostEqual(test_min_fee_result[1], 33030)
self.assertAlmostEqual(test_min_fee_result[2], 300.0)
print('\nselling result with fixed cost rate with min fee = 300 and moq = 100:')
print(self.r.get_selling_result(self.prices, self.op, self.amounts, 100))
test_min_fee_result = self.r.get_selling_result(self.prices, self.op, self.amounts, 100)
self.assertIs(np.allclose(test_min_fee_result[0], [0, 0, -3300]), True, 'result incorrect')
self.assertAlmostEqual(test_min_fee_result[1], 32700)
self.assertAlmostEqual(test_min_fee_result[2], 300.0)
def test_rate_with_min(self):
"""Test transaction cost calculated by rate with min_fee"""
self.r.buy_rate = 0.0153
self.r.sell_rate = 0.01
self.r.buy_fix = 0.
self.r.sell_fix = 0.
self.r.buy_min = 300
self.r.sell_min = 333
self.r.slipage = 0.
print('\npurchase result with fixed cost rate with buy_rate = 0.0153, min fee = 300 and moq = 0:')
print(self.r.get_purchase_result(self.prices, self.op, self.amounts, 0))
test_rate_with_min_result = self.r.get_purchase_result(self.prices, self.op, self.amounts, 0)
self.assertIs(np.allclose(test_rate_with_min_result[0], [0., 984.9305624, 0.]), True, 'result incorrect')
self.assertAlmostEqual(test_rate_with_min_result[1], -20000.0, msg='result incorrect')
self.assertAlmostEqual(test_rate_with_min_result[2], 301.3887520929774, msg='result incorrect')
print('\npurchase result with fixed cost rate with buy_rate = 0.0153, min fee = 300 and moq = 10:')
print(self.r.get_purchase_result(self.prices, self.op, self.amounts, 10))
test_rate_with_min_result = self.r.get_purchase_result(self.prices, self.op, self.amounts, 10)
self.assertIs(np.allclose(test_rate_with_min_result[0], [0., 980, 0.]), True, 'result incorrect')
self.assertAlmostEqual(test_rate_with_min_result[1], -19900.0, msg='result incorrect')
self.assertAlmostEqual(test_rate_with_min_result[2], 300.0, msg='result incorrect')
print('\npurchase result with fixed cost rate with buy_rate = 0.0153, min fee = 300 and moq = 100:')
print(self.r.get_purchase_result(self.prices, self.op, self.amounts, 100))
test_rate_with_min_result = self.r.get_purchase_result(self.prices, self.op, self.amounts, 100)
self.assertIs(np.allclose(test_rate_with_min_result[0], [0., 900, 0.]), True, 'result incorrect')
self.assertAlmostEqual(test_rate_with_min_result[1], -18300.0, msg='result incorrect')
self.assertAlmostEqual(test_rate_with_min_result[2], 300.0, msg='result incorrect')
print('\nselling result with fixed cost rate with sell_rate = 0.01, min fee = 333 and moq = 0:')
print(self.r.get_selling_result(self.prices, self.op, self.amounts))
test_rate_with_min_result = self.r.get_selling_result(self.prices, self.op, self.amounts)
self.assertIs( | np.allclose(test_rate_with_min_result[0], [0, 0, -3333.3333]) | numpy.allclose |
# coding=utf-8
# 20160510
# __author__ = 'xhcao'
import numpy as np
import scipy.spatial as sp
# A=self.method.distance_correction_for_one_matrix(X, dimension)\
# B=self.method.distance_correction_for_one_matrix(Y, dimension)\
# corr_matrix[i,j]=self.method.distance_correlation(A,B) "
def distance_correction_for_one_matrix(x, dimension):
# X是一个样本,ndarray类型,矩阵的每列为样本的一个特征属性
# akl
n = x.shape[0]
akl = sp.distance.cdist(x, x, 'minkowski', p = dimension) #norm - d minkowski distance
#ak*
ak_ = np.zeros(n)
for i in range(0,n):
ak_[i] = | np.sum(akl[i,:]) | numpy.sum |
# Copyright 2019 <NAME> & <NAME>
#
# This file is part of OBStools.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import sys
import numpy as np
from obspy.core import Stream
import matplotlib.pyplot as plt
def decompose(RF_r, RF_t, t1=0., t2=1., plot_f=False, plot_comps=False):
"""
Function to decompose radial and transverse receiver function
streams into back-azimuth harmonics and determine the main
orientation ``azim``, obtained by minimizing the H1' component
between ``t1`` and ``t2``.
Parameters
----------
RF_r : :class:`~obspy.core.Stream`
Stream containing the radial component receiver functions
RF_t : :class:`~obspy.core.Stream`
Stream containing the transverse component receiver functions
t1 : float
Minimum time over which to calculate ``azcorr`` (sec)
t2 : float
Maximum time over which to calculate ``azcorr`` (sec)
Returns
-------
azcorr : float
Direction (azimuth) along which the Ht_0 harmonic component
is minimized (between ``t1`` and ``t2``)
RMS : :class:`~numpy.ndarray`
Root-mean-square misfit used to determine azcorr
hr0_rot : :class:`~numpy.ndarray`
Rotated Hr_0 component
ht0_rot : :class:`~numpy.ndarray`
Rotated Ht_0 component
"""
if not isinstance(RF_r, Stream):
raise(Exception("Input radial component is not a Stream object"))
if not isinstance(RF_t, Stream):
raise(Exception("Input transverse component is not a Stream object"))
# Some integers
nbin = len(RF_r)
nn = len(RF_r[0].data)
dt = RF_r[0].stats.delta
daz = 0.1
naz = int(180./daz)
deg2rad = np.pi/180.
# Initialize work arrays
taxis = np.arange(-nn/2, nn/2)*dt
trange = np.where((taxis>t1) & (taxis<t2))[0]
print(trange)
print(taxis[trange])
nt = len(trange)
hr0_rot = np.zeros((nt, naz))
ht0_rot = np.zeros((nt, naz))
hr0 = np.zeros(nt); hr1 = np.zeros(nt); hr2 = np.zeros(nt)
hr3 = np.zeros(nt); hr4 = np.zeros(nt); meanr = np.zeros(nt)
ht0 = np.zeros(nt); ht1 = np.zeros(nt); ht2 = np.zeros(nt)
ht3 = np.zeros(nt); ht4 = np.zeros(nt); meant = np.zeros(nt)
# Loop over each depth step
for ii, it in enumerate(trange):
# Initialize work arrays
d_r = np.zeros(nbin)
d_t = np.zeros(nbin)
G = np.zeros((nbin, 5))
# Build arrays and matrices
for itrace in range(nbin):
baz = RF_r[itrace].stats.baz
d_r[itrace] = RF_r[itrace].data[it]
d_t[itrace] = RF_t[itrace].data[it]
G[itrace, 0] = 1.0
G[itrace, 1] = np.cos(deg2rad*baz)
G[itrace, 2] = np.sin(deg2rad*baz)
G[itrace, 3] = np.cos(2.*deg2rad*baz)
G[itrace, 4] = np.sin(2.*deg2rad*baz)
# Solve using damped least squares
lam=1.e-25
m_r = np.linalg.solve(np.dot(G.T, G)+lam*np.identity(G.shape[1]),
np.dot(G.T, d_r))
m_t = np.linalg.solve(np.dot(G.T, G)+lam*np.identity(G.shape[1]),
np.dot(G.T, d_t))
meanr[ii] = np.mean(d_r)
hr0[ii] = m_r[0]
hr1[ii] = m_r[1]
hr2[ii] = m_r[2]
hr3[ii] = m_r[3]
hr4[ii] = m_r[4]
meant[ii] = np.mean(d_t)
ht0[ii] = m_t[0]
ht1[ii] = m_t[1]
ht2[ii] = m_t[2]
ht3[ii] = m_t[3]
ht4[ii] = m_t[4]
for iaz in range(naz):
phi = iaz*daz*deg2rad
hr0_rot[ii, iaz] = np.cos(phi)*m_r[0] + np.sin(phi)*m_t[0]
ht0_rot[ii, iaz] = -np.sin(phi)*m_r[0] + np.cos(phi)*m_t[0]
# Minimize misfit of rotated transverse component over specific
# time range to find azim
RMS = np.zeros(naz)
for iaz in range(naz):
RMS[iaz] = np.sqrt(np.mean(np.square(ht0_rot[:, iaz])))
# RMS[iaz] = np.sqrt(np.mean(np.square(ht0_rot[indmin:indmax, iaz])))
# Azimuth of H1
indaz = np.argmin(RMS)
azcorr = indaz*daz
# Resolve ambiguity based on radial component
if np.mean(hr0_rot[:, indaz]) < 0.:
azcorr += 180.
# Rotated components
phi = deg2rad*azcorr
meanr_r = np.cos(phi)*meanr + np.sin(phi)*meant
hr0_r = np.cos(phi)*hr0 + np.sin(phi)*ht0
hr1_r = np.cos(phi)*hr1 + np.sin(phi)*ht1
hr2_r = np.cos(phi)*hr2 + np.sin(phi)*ht2
hr3_r = np.cos(phi)*hr3 + np.sin(phi)*ht3
hr4_r = np.cos(phi)*hr4 + np.sin(phi)*ht4
meant_r = -np.sin(phi)*meanr + np.cos(phi)*meant
ht0_r = -np.sin(phi)*hr0 + np.cos(phi)*ht0
ht1_r = -np.sin(phi)*hr1 + np.cos(phi)*ht1
ht2_r = -np.sin(phi)*hr2 + np.cos(phi)*ht2
ht3_r = - | np.sin(phi) | numpy.sin |
import os, pickle
import numpy as np
import matplotlib.pyplot as plt
from datetime import datetime
import imageio
from glob import glob
import tensorflow_datasets as tfds
from tensorflow.keras.optimizers import (Adam, RMSprop, SGD, Adagrad,
Adadelta)
from tensorflow.keras.layers import LeakyReLU, ReLU, Activation
from tensorflow.keras.utils import plot_model
from tensorflow.keras.datasets import mnist
class BaseModel:
def __init__(self):
self.models = {}
self.di_real_lss = []
self.di_fake_lss = []
self.di_lss = []
self.di_acc = []
self.ge_lss = []
def optimizer(self, lr):
opt = self.opt.lower()
if opt == 'rmsprop':
optimizer = RMSprop(learning_rate=lr)
elif opt == 'sgd':
optimizer = SGD(learning_rate=lr)
elif opt == 'adagrad':
optimizer = Adagrad(learning_rate=lr)
elif opt == 'adadelta':
optimizer = Adadelta(learning_rate=lr)
elif opt == 'adam' and self.beta1:
optimizer = Adam(learning_rate=lr, beta_1=self.beta1)
else:
optimizer = Adam(learning_rate=lr)
return optimizer
def activation(self, act, alpha=0.2):
act = act.lower()
if act == 'leakyrelu':
activation = LeakyReLU(alpha=alpha)
elif act == 'relu':
activation = ReLU()
else:
activation = Activation(act)
return activation
def trainable(self, model, tf):
model.trainable = tf
for layer in model.layers:
layer.trainable = tf
def summary(self):
for name, model in self.models.items():
print('############### %s ###############' % (name))
model.summary()
def show_img(self, generator, z_dim, file_name, color='RGB', show=False):
r, c = 5, 5
noise = np.random.normal(0, 1, (r * c, z_dim))
gen_imgs = generator.predict(noise)
gen_imgs = 0.5 * (gen_imgs + 1)
gen_imgs = np.clip(gen_imgs, 0, 1)
fig, axs = plt.subplots(r, c, figsize=(15, 15))
cnt = 0
for i in range(r):
for j in range(c):
axs[i, j].imshow(np.squeeze(gen_imgs[cnt, :, :, :]), cmap=color)
axs[i, j].axis('off')
cnt += 1
if show:
plt.show()
fig.savefig(os.path.join(self.loader.folder, 'images', file_name))
plt.close()
def plot_models(self):
folder = os.path.join(self.loader.folder, 'plots')
os.makedirs(folder, exist_ok=True)
for name, model in self.models.items():
file_name = os.path.join(folder, name + '.png')
plot_model(model, to_file=file_name, show_shapes=True)
def save_params(self):
folder = os.path.join(self.loader.folder, 'params')
os.makedirs(folder, exist_ok=True)
file_name = os.path.join(folder, 'params.pkl')
with open(file_name, 'wb') as f:
pickle.dumps(*self.params, f)
def save_weights(self, file_name='weights.h5'):
folder = os.path.join(self.loader.folder, 'weights')
os.makedirs(folder, exist_ok=True)
for name, model in self.models.items():
file = os.path.join(folder, name + file_name)
model.save_weights(file)
def save_models(self, epoch=''):
folder = os.path.join(self.loader.folder, 'models')
os.makedirs(folder, exist_ok=True)
for name, model in self.models.items():
file_name = os.path.join(folder, 'models', '{}{}.h5'.format(name, epoch))
model.save(file_name)
def load_models(self, epoch=''):
folder = os.path.join(self.loader.folder, 'models')
for name, model in self.models.items():
file_name = os.path.join(folder, 'models', '{}{}.h5'.format(name, epoch))
model.load_weights(file_name)
def plot_loss(self):
fig = plt.figure(figsize=(150, 100))
ax1 = fig.add_subplot(231)
ax1.set_xlim([0, len(self.di_real_lss)])
ax1.set_title('Discriminator Real Loss')
ax1.set_xlabel('Epochs')
ax1.set_ylabel('Loss')
ax1.plot(len(self.di_real_lss), self.di_real_lss)
ax2 = fig.add_subplot(232)
ax2.set_xlim([0, len(self.di_fake_lss)])
ax2.set_title('Discriminator Fake Loss')
ax2.set_xlabel('Epochs')
ax2.set_ylabel('Loss')
ax2.plot(len(self.di_fake_lss), self.di_fake_lss)
ax3 = fig.add_subplot(233)
ax3.set_xlim([0, len(self.di_lss)])
ax3.set_title('Discriminator Loss')
ax3.set_xlabel('Epochs')
ax3.set_ylabel('Loss')
ax3.plot(len(self.di_lss), self.di_lss)
ax4 = fig.add_subplot(234)
ax4.set_xlim([0, len(self.ge_lss)])
ax4.set_title('Generator Loss')
ax4.set_xlabel('Epochs')
ax4.set_ylabel('Loss')
ax4.plot(len(self.ge_lss), self.ge_lss)
ax5 = fig.add_subplot(235)
ax5.set_xlim([0, len(self.di_acc)])
ax5.set_ylim([0, 100])
ax5.set_title('Discriminator Accuracy')
ax5.set_xlabel('Epochs')
ax5.set_ylabel('Accuracy')
ax5.plot(len(self.di_acc), self.di_acc)
plt.show()
plt.cla()
plt.clf()
class DataLoader:
def __init__(self, dataset, ID, shape=(256, 256), color='RGB', section='GAN'):
self.dataset = dataset
self.shape = shape
self.color = color
self.folder = './run/{}/{}_{}'.format(section, ID, dataset)
os.makedirs(os.path.join(self.folder, 'graph'), exist_ok=True)
os.makedirs(os.path.join(self.folder, 'images'), exist_ok=True)
os.makedirs(os.path.join(self.folder, 'weights'), exist_ok=True)
def load_batch(self, batch_size=1, is_testing=False):
data_type = 'train' if not is_testing else 'val'
path_A = glob('./datasets/{}/{}A/*'.format(self.dataset, data_type))
path_B = glob('./datasets/{}/{}B/*'.format(self.dataset, data_type))
self.n_batches = min(len(path_A), len(path_B)) // batch_size
path_A = np.random.choice(path_A,
self.n_batches * batch_size,
replace=False)
path_B = np.random.choice(path_B,
self.n_batches * batch_size,
replace=False)
for i in range(self.n_batches - 1):
batch_A = path_A[i * batch_size:(i + 1) * batch_size]
batch_B = path_B[i * batch_size:(i + 1) * batch_size]
imgs_A = np.empty((batch_size, self.shape[0], self.shape[1], 3))
imgs_B = np.empty((batch_size, self.shape[0], self.shape[1], 3))
for i, (img_A, img_B) in enumerate(zip(batch_A, batch_B)):
img_A = imageio.imread(img_A, pilmode=self.color).astype(np.uint8)
img_B = imageio.imread(img_B, pilmode=self.color).astype(np.uint8)
imgs_A[i] = | np.array(img_A) | numpy.array |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Written by <NAME> and CBIG under MIT license:
https://github.com/ThomasYeoLab/CBIG/blob/master/LICENSE.md
"""
import os
import time
import numpy as np
from scipy.stats.stats import pearsonr
import torch
from utils import load_graph, preprocess_adj, normalized_laplacian
from utils import rescale_laplacian, chebyshev_polynomial
def mics_z_norm(train_y, valid_y, test_y):
'''z normalize y of training, validation and test set based on training set
Args:
train_y (ndarray): training y data
valid_y (ndarray): validation y data
test_y (ndarray): testing y data
Returns:
Tuple: contains z-normed y data and std of training y data
'''
# subtract mean of y of training set
t_mu = train_y.mean(axis=0, keepdims=True)
train_y = train_y - t_mu
valid_y = valid_y - t_mu
test_y = test_y - t_mu
# divide std of y of training set
t_sigma = train_y.std(axis=0)
train_y = train_y / t_sigma[np.newaxis, :]
valid_y = valid_y / t_sigma[np.newaxis, :]
test_y = test_y / t_sigma[np.newaxis, :]
# return processed y and std for future MAE calculation
return [train_y, valid_y, test_y, t_sigma]
def mics_z_norm_gcnn(input_y, train_mask):
"""z normalize y based on training data
Args:
input_y (ndarray): y data
train_mask (ndarray): mask of training data
Returns:
Tuple: contains z-normed y data and std of training y data
"""
# get mean and std of training data
y_tra = input_y[train_mask, :]
t_mu = y_tra.mean(axis=0)
t_sigma = y_tra.std(axis=0)
# perform z-norm
input_y = input_y - t_mu[np.newaxis, :]
input_y = input_y / t_sigma[np.newaxis, :]
return [input_y, t_sigma]
def mics_z_norm_test(train_valid_y, test_y):
"""z normalize y test set based on training data for HCP dataset
Args:
train_valid_y (list): list of y data for both training and validation
test_y (ndarray): test y data
Returns:
Tuple: z normed test y data, and std of training y data
"""
base_y = np.vstack(train_valid_y)
t_v_mu = base_y.mean(axis=0)
test_y = test_y - t_v_mu[np.newaxis, :]
t_v_sigma = base_y.std(axis=0)
test_y = test_y / t_v_sigma[np.newaxis, :]
return test_y, t_v_sigma
def mics_train_valid_split(train_valid_x,
train_valid_y,
fold=None,
is_bnc=False):
"""split training and validation data (HCP only)
Args:
train_valid_x (list): list of y data for both training and validation
train_valid_y (list): list of x data for both training and validation
fold (int, optional): index of fold for validation, if None, no
validation is going to be returned
is_bnc (bool, optional): whether function is used for brainnetcnn
Returns:
Tuple: if fold is None, all data in list train_valid_x and y are
combined as training x and y. If fold is not None, the
corresponding fold is returned as validation data, while the
remaining folds are combined as training data.
"""
if fold is not None:
valid_index = fold
valid_x = train_valid_x[valid_index]
valid_y = train_valid_y[valid_index]
train_valid_x = np.delete(train_valid_x, valid_index, axis=0)
train_valid_y = np.delete(train_valid_y, valid_index, axis=0)
tmp = list(train_valid_x[0].shape)
tmp[0] = 0
train_x = np.zeros(tmp)
train_y = np.zeros((0, train_valid_y[0].shape[-1]))
for i in range(len(train_valid_x)):
train_x = np.concatenate((train_x, train_valid_x[i]), axis=0)
train_y = np.concatenate((train_y, train_valid_y[i]), axis=0)
if is_bnc:
train_x = np.expand_dims(train_x, axis=-1)
if fold is not None:
if is_bnc:
valid_x = np.expand_dims(valid_x, axis=-1)
t_mu = train_y.mean(axis=0)
train_y = train_y - t_mu[np.newaxis, :]
valid_y = valid_y - t_mu[np.newaxis, :]
t_sigma = train_y.std(axis=0)
train_y = train_y / t_sigma[np.newaxis, :]
valid_y = valid_y / t_sigma[np.newaxis, :]
return [train_x, valid_x, train_y, valid_y]
t_mu = train_y.mean(axis=0)
train_y = train_y - t_mu[np.newaxis, :]
t_sigma = train_y.std(axis=0)
train_y = train_y / t_sigma[np.newaxis, :]
return [train_x, train_y]
def mics_train_valid_mask_split(train_valid_mask, fold=None):
"""split training and validation mask for gcnn (HCP only)
Args:
train_valid_mask (list): list of training and validation mask
fold (int, optional): index of fold for validation, if None, no
validation is going to be returned
Returns:
Tuple: training and validation mask
"""
# Data split
if fold is not None:
valid_mask = train_valid_mask[fold]
train_list = np.delete(train_valid_mask, fold, axis=0)
else:
valid_mask = None
train_list = train_valid_mask
train_mask = np.zeros(train_valid_mask[0].shape)
for i in range(len(train_list)):
train_mask = np.logical_or(train_mask, train_list[i])
return [train_mask, valid_mask]
def mics_hcp_log(model_name, out_path, **kwargs):
"""calculate the test result and save the log
Args:
model_name (str): name of the model
out_path (str): path to save the log npz file
**kwargs: record of training, validation and test value
Returns:
None
"""
val_cor = kwargs['val_cor_log']
tes_cor = kwargs['tes_cor_log']
n_folds = tes_cor.shape[0]
temp = np.mean(val_cor, axis=-1)
temp = np.mean(temp, axis=1)
index = np.argmax(temp, axis=-1)
print('Optimal index for each fold at:', index)
result = np.array([tes_cor[i, index[i], :] for i in range(n_folds)])
# avg = np.mean(result, axis=0)
# err = np.std(result, axis=0) / np.sqrt(n_folds)
temp = np.mean(result, axis=1)
print('Optimal result for each fold:', temp)
avg_a = np.mean(temp, axis=0)
# err_a = np.std(temp, axis=0) / np.sqrt(n_folds)
print('Final test result:', avg_a)
kwargs['metric'] = avg_a
# save record value for future use
date_str = time.strftime("%Y_%m_%d_%H_%M")
os.makedirs(out_path, exist_ok=True)
file_str = 'HCP_' + model_name + '_' + date_str + '.npz'
name_str = os.path.join(out_path, file_str)
np.savez(name_str, **kwargs)
print('log saved at:', file_str)
return
def mics_hcp_infer(model, x, y, sigma, x_train=None, y_train=None):
"""evaluate model prediction for given data (HCP only)
Args:
model (keras.models.Model): keras DNN model
x (ndarray): input x data
y (ndarray): y data
sigma (ndarray): std of training y data
x_train (ndarray, optional): training x data
y_train (ndarray, optional): training y data
Returns:
Tuple: correlation and MAE between real and predicted y, and predicted
y value
"""
y_pred = model.predict(x, batch_size=48, verbose=0)
cor = np.zeros((y.shape[-1]))
mae = np.zeros((y.shape[-1]))
for i in range(y.shape[-1]):
cor[i] = pearsonr(y_pred[:, i], y[:, i])[0]
mae[i] = np.mean(np.abs(y_pred[:, i] - y[:, i])) * sigma[i]
if x_train is None:
return cor, mae, y_pred
else:
y_pred_t = model.predict(x_train, batch_size=48, verbose=0)
cor_train = np.zeros((y_train.shape[-1]))
for i in range(y_train.shape[-1]):
cor_train[i] = pearsonr(y_pred_t[:, i], y_train[:, i])[0]
return cor, mae, y_pred, cor_train
def mics_hcp_gcnn_eval(preds, input_y, mask, sigma=None, train_mask=None):
"""evaluate model prediction for given data (HCP and gcnn only)
Args:
preds (ndarray): predicted y value
input_y (ndarray): real y value
mask (ndarray): mask on y value
sigma (ndarray, optional): std of training y data
train_mask (ndarray, optional): mask on training y value
Returns:
TYPE: correlation, loss and MAE between real and predicted y
"""
index = np.nonzero(mask)[0]
pred = preds[index, :]
real = input_y[index, :]
los = np.mean(np.mean(np.square(pred - real), axis=-1))
cor = np.zeros((input_y.shape[-1]))
mae = np.zeros((input_y.shape[-1]))
for i in range(input_y.shape[-1]):
cor[i] = pearsonr(pred[:, i], real[:, i])[0]
if sigma is not None:
mae[i] = np.mean(np.abs(pred[:, i] - real[:, i])) * sigma[i]
if train_mask is None:
return cor, los, mae
else:
index = np.nonzero(train_mask)[0]
pred = preds[index, :]
real = input_y[index, :]
cor_train = np.zeros((input_y.shape[-1]))
for i in range(input_y.shape[-1]):
cor_train[i] = pearsonr(pred[:, i], real[:, i])[0]
return cor, los, mae, cor_train
def mics_infer_metric(dataloader,
net,
criterion,
device,
t_sigma=None,
need_value=False):
'''performance inference with net on data from dataloader and calculate
metric
Args:
dataloader: dataloader to load data for PyTorch framework
net: PyTorch deep learning network
criterion: criterion for loss calculation
t_sigma (float, optional): std of training y data, only use if sex is
not the behavioral measuers
need_value (bool, optional): whether return record of real and
predicted value
Returns:
Tuple: if t_sigma is not None, correlation, MAE and loss are returned.
If t_sigma is None, auccuracy and loss are returned. If need_value
set to True, tuple returned also returns record of real and
predicted y value alongside the metrics. If need_value is false,
only metrics are returned.
'''
# initialize variable for record
record_loss = 0.0
if t_sigma is None:
record_correct = 0.0 # count of correct prediction
record_total = 0.0 # count of total prediction
record_real = np.zeros((0))
record_pred = np.zeros((0, 2))
else:
record_real = np.zeros((0, 1)) # real value
record_pred = np.zeros((0, 1)) # prediction value
# perform inference
for (x, y) in dataloader:
x, y = x.to(device), y.to(device)
outputs = net(x)
loss = criterion(outputs, y)
record_loss += loss.item()
record_real = np.concatenate((record_real, y.data.cpu().numpy()),
axis=0)
record_pred = np.concatenate((record_pred, outputs.data.cpu().numpy()),
axis=0)
if t_sigma is None:
_, predicted = torch.max(outputs.data, 1)
record_total += y.size(0)
record_correct += (predicted == y.data).sum()
# metric calculation
loss = record_loss / len(dataloader)
if t_sigma is None:
aucc = record_correct.to(torch.float) / record_total
if need_value:
return aucc, loss, record_real, record_pred
else:
return aucc, loss
else:
corr = pearsonr(record_real, record_pred)[0]
mae = np.mean(np.abs(record_real - record_pred)) * t_sigma
if need_value:
return corr, mae, loss, record_real, record_pred
else:
return corr, mae, loss
def mics_log(model_name, out_path, index=None, item=None, **kwargs):
'''function to calculate the final result and save the record
Args:
model_name (str): name of network/model
index (int): index of optimal epoch
out_path (str): path to save the log
item (float, optional): indicate which behavioral meausers is predicted
**kwargs: record of training, validation and test value
Returns:
None
'''
date_str = time.strftime("%Y_%m_%d_%H_%M")
if index is None:
if item is None:
val_record = kwargs['val_auc_record']
else:
val_record = kwargs['val_cor_record']
temp = np.mean(val_record, axis=0)
temp = np.convolve(temp, np.ones(3, dtype=int), 'valid') / 3
index = np.nanargmax(temp)
index = index + 1
print('\nBest validation at index: ', index)
if item is None:
val_auc_record = kwargs['val_auc_record']
tes_auc_record = kwargs['tes_auc_record']
tes_res_record = kwargs['tes_res_record']
final_original = kwargs['final_original']
# get result at that epoch for both validation and test
print('Average validation aucc:',
np.nanmean(val_auc_record[:, index], axis=0))
print('Average test aucc:', np.nanmean(
tes_auc_record[:, index], axis=0))
# get ensamble result for test data
final_predict = np.argmax(
np.nanmean(tes_res_record[:, index, :, :], axis=0), axis=1)
if len(final_original.shape) == 2:
final_original = np.argmax(final_original, axis=1)
n_test = float(final_original.shape[0])
metric = (final_predict == final_original).sum() / n_test
print('Final averaged test aucc', metric)
file_str = model_name + '_sex_' + date_str + '.npz'
else:
val_cor_record = kwargs['val_cor_record']
val_mae_record = kwargs['val_mae_record']
tes_cor_record = kwargs['tes_cor_record']
tes_mae_record = kwargs['tes_mae_record']
tes_res_record = kwargs['tes_res_record']
final_original = kwargs['final_original']
t_sigma = kwargs['t_sigma']
# get average result for validation and test data
print('Average validation corr:',
np.nanmean(val_cor_record[:, index], axis=0), ', MAE:',
np.nanmean(val_mae_record[:, index], axis=0))
print('Average test corr', np.nanmean(
tes_cor_record[:, index], axis=0), ', MAE',
np.nanmean(tes_mae_record[:, index], axis=0))
# get ensamble result for test data
final_predict = np.nanmean(tes_res_record[:, index, :], axis=0)
final_original = np.squeeze(final_original)
metric = pearsonr(final_predict, final_original)[0]
print('Final ensemble test corr', metric, ', MAE',
np.nanmean(np.abs(final_predict - final_original)) * t_sigma)
file_str = model_name + '_pred_' + str(item) + '_' + date_str + '.npz'
kwargs['final_predict'] = final_predict
kwargs['metric'] = metric
# save record value for future use
name_str = os.path.join(out_path, file_str)
os.makedirs(out_path, exist_ok=True)
np.savez(name_str, **kwargs)
print('file saved:', name_str)
return
def mics_graph_matrix(num_subject, graph_folder, GRAPH_ADJ, FILTER,
MAX_DEGREE):
"""Generate graph matrix for GCNN
Args:
num_subject (int): number of subject for data
graph_folder (str): location of folder for graph
GRAPH_ADJ (str): the filename of graph
FILTER (str): type of gcnn filter
MAX_DEGREE (int): degree of Chebyshev polynomial
Returns:
Tuple: contains the graph_matrix and number of support used for GCNN
Raises:
Exception: invalid FILTER type
"""
SYM_NORM = True # symmetric (True) vs. left-only (False) normalization
# build the graph
A = load_graph(dimension=num_subject, path=graph_folder, graph=GRAPH_ADJ)
# estimate the laplacian
if FILTER == 'localpool':
""" Local pooling filters
(see 'renormalization trick' in Kipf & Welling, arXiv 2016)
"""
print('Using local pooling filters...')
A_ = preprocess_adj(A, SYM_NORM)
support = 1
graph_matrix = [A_]
elif FILTER == 'chebyshev':
""" Chebyshev polynomial basis filters
(Defferard et al., NIPS 2016)
"""
print('Using Chebyshev polynomial basis filters...')
L = normalized_laplacian(A, SYM_NORM)
L_scaled = rescale_laplacian(L)
T_k = chebyshev_polynomial(L_scaled, MAX_DEGREE)
support = MAX_DEGREE + 1
graph_matrix = T_k
else:
raise Exception('Invalid filter type.')
return graph_matrix, support
def mics_eval(preds, input_y, train_mask, valid_mask, test_mask, t_sigma=None):
"""evaluate the prediction for GCNN
Args:
preds (ndarray): GCNN prediction
input_y (ndarray): original y data
train_mask (ndarray): mask of training subjects
valid_mask (ndarray): mask of validation subjects
test_mask (ndarray): mask of testing subjects
t_sigma (float, optional): std of training y data, only use if sex is
not the behavioral measuers
Returns:
Tuple: if t_sigma is None, return the accuracy of GCNN predition. If
t_sigma is not None, return the loss, correlation and MAE result.
"""
val_index = np.nonzero(valid_mask)[0]
tes_index = np.nonzero(test_mask)[0]
if t_sigma is None:
val_pred = np.argmax(preds[val_index, :], axis=1)
tes_pred = np.argmax(preds[tes_index, :], axis=1)
tra_pred = np.argmax(preds[train_mask, :], axis=1)
val_real = np.argmax(input_y[val_index, :], axis=1)
tes_real = np.argmax(input_y[tes_index, :], axis=1)
tra_real = np.argmax(input_y[train_mask, :], axis=1)
val_auc = (val_pred == val_real).mean()
tes_auc = (tes_pred == tes_real).mean()
tra_auc = (tra_pred == tra_real).mean()
return [val_auc, tes_auc, tra_auc]
val_pred = np.squeeze(preds[val_index])
tes_pred = np.squeeze(preds[tes_index])
tra_pred = np.squeeze(preds[train_mask])
val_real = np.squeeze(input_y[val_index])
tes_real = np.squeeze(input_y[tes_index])
tra_real = np.squeeze(input_y[train_mask])
val_los = np.mean(np.square(val_pred - val_real), axis=-1)
tes_los = np.mean( | np.square(tes_pred - tes_real) | numpy.square |
from __future__ import print_function, division
import numpy as np
from scipy.interpolate import RegularGridInterpolator, interpn
import scipy.ndimage.interpolation as spinterp
import scipy.ndimage.filters as spfilter
from geometry import gencoords, gencoords_centermask
import pyximport; pyximport.install(setup_args={"include_dirs": np.get_include()}, reload_support=True)
import sparsemul
def compute_density_moments(M, mu=None):
N = M.shape[0]
absM = (M**2).reshape((N**3, 1))
absM /= np.sum(absM)
coords = gencoords(N, 3).reshape((N**3, 3))
if mu == None:
wcoords = coords.reshape((N**3, 3)) * absM
mu = np.sum(wcoords, axis=0).reshape((1, 3))
wccoords = np.sqrt(absM / N**3) * (coords - mu)
covar = np.dot(wccoords.T, wccoords)
return mu, covar
def rotate_density(M, R, t=None, upsamp=1.0):
assert len(M.shape) == 3
N = M.shape[0]
Nup = int(np.round(N * upsamp))
# print "Upsampling by", upsamp, "to", Nup, "^3"
coords = gencoords(Nup, 3).reshape((Nup**3, 3)) / float(upsamp)
if t is None:
interp_coords = np.transpose(np.dot(coords, R.T)).reshape(
(3, Nup, Nup, Nup)) + int(N / 2)
else:
interp_coords = np.transpose(
np.dot(coords, R.T) + t).reshape((3, Nup, Nup, Nup)) + int(N / 2)
out = spinterp.map_coordinates(M, interp_coords, order=1)
return out
def align_density(M, upsamp=1.0):
assert len(M.shape) == 3
(mu, covar) = compute_density_moments(M)
(w, V) = np.linalg.eigh(covar)
idx = w.argsort()
w = w[idx]
V = V[:, idx]
if np.linalg.det(V) < 0:
# ensure we have a valid rotation
V[:, 0] *= -1
out = rotate_density(M, V, mu, upsamp)
# (mu,covar) = compute_density_moments(out)
return out, V
def rotational_average(M, maxRadius=None, doexpand=False, normalize=True, return_cnt=False):
N = M.shape[0]
D = len(M.shape)
assert D >= 2, 'Cannot rotationally average a 1D array'
pts = gencoords(N, D).reshape((N**D, D))
r = np.sqrt(np.sum(pts**2, axis=1)).reshape(M.shape)
ir = np.require(np.floor(r), dtype='uint32')
f = r - ir
if maxRadius is None:
maxRadius = np.ceil(np.sqrt(D) * N / D)
if maxRadius < np.max(ir) + 2:
valid_ir = ir + 1 < maxRadius
ir = ir[valid_ir]
f = f[valid_ir]
M = M[valid_ir]
if np.iscomplexobj(M):
raps = 1.0j * np.bincount(ir, weights=(1 - f) * M.imag, minlength=maxRadius) + \
np.bincount(ir + 1, weights=f * M.imag, minlength=maxRadius)
raps += np.bincount(ir, weights=(1 - f) * M.real, minlength=maxRadius) + \
np.bincount(ir + 1, weights=f * M.real, minlength=maxRadius)
else:
raps = np.bincount(ir, weights=(1 - f) * M, minlength=maxRadius) + \
np.bincount(ir + 1, weights=f * M, minlength=maxRadius)
raps = raps[0:maxRadius]
if normalize or return_cnt:
cnt = np.bincount(ir, weights=(1 - f), minlength=maxRadius) + \
np.bincount(ir + 1, weights=f, minlength=maxRadius)
cnt = cnt[0:maxRadius]
if normalize:
raps[cnt <= 0] = 0
raps[cnt > 0] /= cnt[cnt > 0]
if doexpand:
raps = rotational_expand(raps, N, D)
if return_cnt:
return raps, cnt
else:
return raps
def rotational_expand(vals, N, D, interp_order=1):
interp_coords = np.sqrt(np.sum(gencoords(N, D).reshape(
(N**D, D))**2, axis=1)).reshape((1,) + D * (N,))
if np.iscomplexobj(vals):
rotexp = 1.0j * spinterp.map_coordinates(vals.imag, interp_coords,
order=interp_order, mode='nearest')
rotexp += spinterp.map_coordinates(vals.real, interp_coords,
order=interp_order, mode='nearest')
else:
rotexp = spinterp.map_coordinates(vals, interp_coords,
order=interp_order, mode='nearest')
return rotexp
def resize_ndarray(D, nsz, axes):
zfs = tuple([float(nsz[i]) / float(D.shape[i]) if i in axes else 1
for i in range(len(nsz))])
sigmas = tuple([0.66 / zfs[i] if i in axes else 0
for i in range(len(nsz))])
# print(zfs, sigmas, D.shape)
# print("blurring..."); sys.stdout.flush()
blurD = spfilter.gaussian_filter(D, sigma=sigmas, order=0, mode='constant')
# print("zooming..."); sys.stdout.flush()
return spinterp.zoom(blurD, zfs, order=0)
def compute_fsc(VF1, VF2, maxrad, width=1.0, thresholds=[0.143, 0.5]):
assert VF1.shape == VF2.shape
N = VF1.shape[0]
r = np.sqrt(np.sum(gencoords(N, 3).reshape((N, N, N, 3))**2, axis=3))
prev_rad = -np.inf
fsc = []
rads = []
resInd = len(thresholds) * [None]
for i, rad in enumerate(np.arange(1.5, maxrad * N / 2.0, width)):
cxyz = np.logical_and(r >= prev_rad, r < rad)
cF1 = VF1[cxyz]
cF2 = VF2[cxyz]
if len(cF1) == 0:
break
cCorr = np.vdot(cF1, cF2) / np.sqrt(np.vdot(cF1, cF1) * np.vdot(cF2, cF2))
for j, thr in enumerate(thresholds):
if cCorr < thr and resInd[j] is None:
resInd[j] = i
fsc.append(cCorr.real)
rads.append(rad / (N / 2.0))
prev_rad = rad
fsc = np.array(fsc)
rads = np.array(rads)
resolutions = []
for rI, thr in zip(resInd, thresholds):
if rI is None:
resolutions.append(rads[-1])
elif rI == 0:
resolutions.append(np.inf)
else:
x = (thr - fsc[rI]) / (fsc[rI - 1] - fsc[rI])
resolutions.append(x * rads[rI - 1] + (1 - x) * rads[rI])
return rads, fsc, thresholds, resolutions
# So the key is to make sure that the image is zero at the nyquist frequency (index n/2)
# The interpolation idea is to assume that the actual function f(x,y) is band-limited i.e.
# made up of exactly the frequency components in the FFT. Since we are interpolating in frequency space,
# The assumption is that in frequency space the signal F(wx,wy) is band-limited.
# This means that it's fourier transform should have components less than the nyquist frequency.
# But the fourier transform of F(wx,wy) is ~f(x,y) since FFT and iFFT are same. So f(x,y) must be nonzero at the nyquist frequency (and preferrably even less than that) which means in image space, the n/2 row and n/2 column (and n/2 page).
# since the image will be zero at the edges once some windowing (circular or hamming etc) is applied,
# we can just fftshift the image since translations do not change the FFT except by phase. This makes the nyquist components
# zero and everything is fine and dandy. Even linear iterpolation works
# then, except it leaves ghosting.
def getslices(V, SLOP, res=None):
vV = V.reshape((-1,))
assert vV.shape[0] == SLOP.shape[1]
if res is None:
res = np.zeros(SLOP.shape[0], dtype=vV.dtype)
else:
assert res.shape[0] == SLOP.shape[0]
assert len(res.shape) == 1 or res.shape[1] == 1
assert res.dtype == vV.dtype
res[:] = 0
if np.iscomplexobj(vV):
sparsemul.spdot(SLOP, vV.real, res.real)
sparsemul.spdot(SLOP, vV.imag, res.imag)
else:
sparsemul.spdot(SLOP, vV, res)
return res
def getslices_interp(V, Rs, rad, beamstop_rad=None, res=None):
ndim = V.ndim
assert ndim > 1
num_slices = len(Rs)
# if ndim == 2:
# assert Rs.shape[1] == 2
# elif ndim == 3:
# assert Rs.shape[1] == 3
# Rs.shape[2] == 2
N = V.shape[0]
center = int(N/2)
if beamstop_rad is None:
coords = gencoords(N, 2, rad)
else:
coords = gencoords_centermask(N, 2, rad, beamstop_rad)
N_T = coords.shape[0]
grid = (np.arange(N),) * ndim
slicing_func = RegularGridInterpolator(grid, V, bounds_error=False, fill_value=0.0)
if res is None:
res = np.zeros((num_slices, N_T), dtype=V.dtype)
else:
assert res.shape[0] == Rs.shape[0]
assert res.dtype == V.dtype
res[:] = 0
for i, R in enumerate(Rs):
rotated_coords = R.dot(coords.T).T + center
res[i] = slicing_func(rotated_coords)
# res[i] = interpn(grid, V, rotated_coords)
# res[i] = spinterp.map_coordinates(V, rotated_coords.T)
return res
def merge_slices(slices, Rs, N, rad, beamstop_rad=None, res=None):
center = int(N/2)
if beamstop_rad is None:
coords = gencoords(N, 2, rad)
else:
coords = gencoords_centermask(N, 2, rad, beamstop_rad)
assert slices.shape[1] == coords.shape[0]
if res is None:
res = np.zeros((N,) * 3, dtype=np.float32)
else:
assert res.shape == (N,) * 3
assert res.dtype == slices.dtype
res[:] = 0.0
model_weight = np.zeros((N,) * 3)
for i, R in enumerate(Rs):
curr_slices = slices[i, :]
for j, xy in enumerate(coords):
voxel_intensity = curr_slices[j]
rot_coord = R.dot(xy.T).reshape(1, -1)[0] + center
rot_coord = np.int_(np.round(rot_coord))
in_x = rot_coord[0] >= 0 and rot_coord[0] < N
in_y = rot_coord[1] >= 0 and rot_coord[1] < N
in_z = rot_coord[2] >= 0 and rot_coord[2] < N
if in_x and in_y and in_z:
index_coord = tuple(rot_coord)
model_voxel_intensity = res[index_coord]
model_weight[index_coord] += 1
voxel_weight = model_weight[index_coord]
delta_intensity = voxel_intensity - model_voxel_intensity
model_voxel_intensity += delta_intensity / voxel_weight
res[index_coord] = model_voxel_intensity
return res
# 3D Densities
# ===============================================================================================
def window(v, func='hanning', params=None):
""" applies a windowing function to the 3D volume v (inplace, as reference) """
N = v.shape[0]
D = v.ndim
if any([d != N for d in list(v.shape)]) or D != 3:
raise Exception("Error: Volume is not Cube.")
def apply_seperable_window(v, w):
v *= np.reshape(w, (-1, 1, 1))
v *= np.reshape(w, (1, -1, 1))
v *= np.reshape(w, (1, 1, -1))
if func == "hanning":
w = np.hanning(N)
apply_seperable_window(v, w)
elif func == 'hamming':
w = np.hamming(N)
apply_seperable_window(v, w)
elif func == 'gaussian':
raise Exception('Unimplimented')
elif func == 'circle':
c = gencoords(N, 3)
if params == None:
r = N / 2 - 1
else:
r = params[0] * (N / 2 * 1)
v *= (np.sum(c**2, 1) < (r ** 2)).reshape((N, N, N))
elif func == 'box':
v[:, 0, 0] = 0.0
v[0, :, 0] = 0.0
v[0, 0, :] = 0.0
else:
raise Exception("Error: Window Type Not Supported")
def generate_phantom_density(N, window, sigma, num_blobs, seed=None):
if seed is not None:
np.random.seed(seed)
M = np.zeros((N, N, N), dtype=np.float32)
coords = gencoords(N, 3).reshape((N**3, 3))
inside_window = np.sum(coords**2, axis=1).reshape((N, N, N)) < window**2
curr_c = np.array([0.0, 0.0, 0.0])
curr_n = 0
while curr_n < num_blobs:
csigma = sigma * np.exp(0.25 * | np.random.randn() | numpy.random.randn |
# Copyright (c) 2018-present, Facebook, Inc.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
#
import torch
import numpy as np
# PyTorch-backed implementations
def qmul(q, r):
"""
Multiply quaternion(s) q with quaternion(s) r.
Expects two equally-sized tensors of shape (*, 4), where * denotes any number of dimensions.
Returns q*r as a tensor of shape (*, 4).
"""
assert q.shape[-1] == 4
assert r.shape[-1] == 4
original_shape = q.shape
# Compute outer product
terms = torch.bmm(r.view(-1, 4, 1), q.view(-1, 1, 4))
w = terms[:, 0, 0] - terms[:, 1, 1] - terms[:, 2, 2] - terms[:, 3, 3]
x = terms[:, 0, 1] + terms[:, 1, 0] - terms[:, 2, 3] + terms[:, 3, 2]
y = terms[:, 0, 2] + terms[:, 1, 3] + terms[:, 2, 0] - terms[:, 3, 1]
z = terms[:, 0, 3] - terms[:, 1, 2] + terms[:, 2, 1] + terms[:, 3, 0]
return torch.stack((w, x, y, z), dim=1).view(original_shape)
def qrot(q, v):
"""
Rotate vector(s) v about the rotation described by quaternion(s) q.
Expects a tensor of shape (*, 4) for q and a tensor of shape (*, 3) for v,
where * denotes any number of dimensions.
Returns a tensor of shape (*, 3).
"""
assert q.shape[-1] == 4
assert v.shape[-1] == 3
assert q.shape[:-1] == v.shape[:-1]
original_shape = list(v.shape)
q = q.view(-1, 4)
v = v.view(-1, 3)
qvec = q[:, 1:]
uv = torch.cross(qvec, v, dim=1)
uuv = torch.cross(qvec, uv, dim=1)
return (v + 2 * (q[:, :1] * uv + uuv)).view(original_shape)
def qeuler(q, order, epsilon=0):
"""
Convert quaternion(s) q to Euler angles.
Expects a tensor of shape (*, 4), where * denotes any number of dimensions.
Returns a tensor of shape (*, 3).
"""
assert q.shape[-1] == 4
original_shape = list(q.shape)
original_shape[-1] = 3
q = q.view(-1, 4)
q0 = q[:, 0]
q1 = q[:, 1]
q2 = q[:, 2]
q3 = q[:, 3]
if order == 'xyz':
x = torch.atan2(2 * (q0 * q1 - q2 * q3), 1 - 2*(q1 * q1 + q2 * q2))
y = torch.asin(torch.clamp(2 * (q1 * q3 + q0 * q2), -1+epsilon, 1-epsilon))
z = torch.atan2(2 * (q0 * q3 - q1 * q2), 1 - 2*(q2 * q2 + q3 * q3))
elif order == 'yzx':
x = torch.atan2(2 * (q0 * q1 - q2 * q3), 1 - 2*(q1 * q1 + q3 * q3))
y = torch.atan2(2 * (q0 * q2 - q1 * q3), 1 - 2*(q2 * q2 + q3 * q3))
z = torch.asin(torch.clamp(2 * (q1 * q2 + q0 * q3), -1+epsilon, 1-epsilon))
elif order == 'zxy':
x = torch.asin(torch.clamp(2 * (q0 * q1 + q2 * q3), -1+epsilon, 1-epsilon))
y = torch.atan2(2 * (q0 * q2 - q1 * q3), 1 - 2*(q1 * q1 + q2 * q2))
z = torch.atan2(2 * (q0 * q3 - q1 * q2), 1 - 2*(q1 * q1 + q3 * q3))
elif order == 'xzy':
x = torch.atan2(2 * (q0 * q1 + q2 * q3), 1 - 2*(q1 * q1 + q3 * q3))
y = torch.atan2(2 * (q0 * q2 + q1 * q3), 1 - 2*(q2 * q2 + q3 * q3))
z = torch.asin(torch.clamp(2 * (q0 * q3 - q1 * q2), -1+epsilon, 1-epsilon))
elif order == 'yxz':
x = torch.asin(torch.clamp(2 * (q0 * q1 - q2 * q3), -1+epsilon, 1-epsilon))
y = torch.atan2(2 * (q1 * q3 + q0 * q2), 1 - 2*(q1 * q1 + q2 * q2))
z = torch.atan2(2 * (q1 * q2 + q0 * q3), 1 - 2*(q1 * q1 + q3 * q3))
elif order == 'zyx':
x = torch.atan2(2 * (q0 * q1 + q2 * q3), 1 - 2*(q1 * q1 + q2 * q2))
y = torch.asin(torch.clamp(2 * (q0 * q2 - q1 * q3), -1+epsilon, 1-epsilon))
z = torch.atan2(2 * (q0 * q3 + q1 * q2), 1 - 2*(q2 * q2 + q3 * q3))
else:
raise
return torch.stack((x, y, z), dim=1).view(original_shape)
# Numpy-backed implementations
def qmul_np(q, r):
q = torch.from_numpy(q).contiguous()
r = torch.from_numpy(r).contiguous()
return qmul(q, r).numpy()
def qrot_np(q, v):
q = torch.from_numpy(q).contiguous()
v = torch.from_numpy(v).contiguous()
return qrot(q, v).numpy()
def qeuler_np(q, order, epsilon=0, device=None):
if device is not None:
q = torch.from_numpy(q).to(device=device)
return qeuler(q, order, epsilon).cpu().numpy()
else:
q = torch.from_numpy(q).contiguous()
return qeuler(q, order, epsilon).numpy()
def qfix(q):
"""
Enforce quaternion continuity across the time dimension by selecting
the representation (q or -q) with minimal distance (or, equivalently, maximal dot product)
between two consecutive frames.
Expects a tensor of shape (L, J, 4), where L is the sequence length and J is the number of joints.
Returns a tensor of the same shape.
"""
assert len(q.shape) == 3
assert q.shape[-1] == 4
result = q.copy()
dot_products = np.sum(q[1:]*q[:-1], axis=2)
mask = dot_products < 0
mask = (np.cumsum(mask, axis=0)%2).astype(bool)
result[1:][mask] *= -1
return result
def expmap_to_quaternion(e):
"""
Convert axis-angle rotations (aka exponential maps) to quaternions.
Stable formula from "Practical Parameterization of Rotations Using the Exponential Map".
Expects a tensor of shape (*, 3), where * denotes any number of dimensions.
Returns a tensor of shape (*, 4).
"""
assert e.shape[-1] == 3
original_shape = list(e.shape)
original_shape[-1] = 4
e = e.reshape(-1, 3)
theta = np.linalg.norm(e, axis=1).reshape(-1, 1)
w = np.cos(0.5*theta).reshape(-1, 1)
xyz = 0.5*np.sinc(0.5*theta/np.pi)*e
return np.concatenate((w, xyz), axis=1).reshape(original_shape)
def euler_to_quaternion(e, order):
"""
Convert Euler angles to quaternions.
"""
assert e.shape[-1] == 3
original_shape = list(e.shape)
original_shape[-1] = 4
e = e.reshape(-1, 3)
x = e[:, 0]
y = e[:, 1]
z = e[:, 2]
rx = np.stack((np.cos(x/2), np.sin(x/2), np.zeros_like(x), np.zeros_like(x)), axis=1)
ry = np.stack((np.cos(y/2), np.zeros_like(y), np.sin(y/2), np.zeros_like(y)), axis=1)
rz = np.stack((np.cos(z/2), np.zeros_like(z), | np.zeros_like(z) | numpy.zeros_like |
import numpy as np
import copy
import random
from .network import *
class Generation:
# generate initial population
def __init__(self, pop_size=10, best_candidate_size=2, mutation_rate=0.5, mutation_range=(0.8, 1.2), crossing_points=1):
self.max_population_size = pop_size
self.population = [Network() for i in range(pop_size)]
self.best_candidate = []
self.best_candidate_size = best_candidate_size if best_candidate_size < pop_size else pop_size
self.mutation_rate = mutation_rate
self.mutation_range = mutation_range
self.crossing_points = crossing_points
def loadPopulation(self, candidates):
self.population = []
for c in candidates:
self.population.append(Network(np.array(c[0]), | np.array(c[1]) | numpy.array |
from bs4 import BeautifulSoup
import numpy as np
from PIL import ImageOps
from gallica_autobib.gallipy import Resource
from gallica_autobib.process import extract_image
from PyPDF4 import PdfFileReader
from io import BytesIO
import matplotlib.pyplot as plt
import matplotlib.image as mpimg
from matplotlib.patches import Rectangle
from collections import namedtuple
Point = namedtuple("Point", ["x", "y"])
Box = namedtuple("Box", ["upper", "lower"])
ark = "https://gallica.bnf.fr/ark:/12148/bpt6k65545564"
r = Resource(ark)
def fetch_stuff(pno):
pg = r.content_sync(startview=pno, nviews=1, mode="pdf").value
reader = PdfFileReader(BytesIO(pg))
data, type_ = extract_image(reader.getPage(2))
ocr = r.ocr_data_sync(view=pno).value
soup = BeautifulSoup(ocr.decode())
upper_bound = [0, 0]
lower_bound = [0, 0]
page = soup.find("page")
height, width = int(page.get("height")), int(page.get("width"))
xscale = data.height / height
yscale = data.width / width
height *= yscale
printspace = soup.find("printspace")
text_height = round(int(printspace.get("height")) * yscale)
text_width = round(int(printspace.get("width")) * xscale)
vpos = int(printspace.get("vpos")) * yscale
hpos = int(printspace.get("hpos")) * xscale
upper = Point(round(hpos), round(vpos))
return upper, text_height, text_width, data, height
def gen_doc_data():
pno = 128
upper, text_height, text_width, data, height = fetch_stuff(pno)
fig, ax = plt.subplots()
plt.imshow(data)
text_box = ax.add_patch(
Rectangle(
upper, text_width, text_height, edgecolor="red", facecolor="none", lw=2
)
)
fig.savefig(
"docs/img/content_box.svg", bbox_inches="tight", transparent=True, dpi=72
)
ax2 = ax.twiny()
a = np.array(ImageOps.grayscale(data))
mean = a.mean(axis=1)
ax2.plot(mean, range(len(mean)), label="mean")
gradient = np.gradient(mean) + 70
ax2.plot(gradient, range(len(gradient)), color="green", label="differential")
plt.legend()
fig.savefig("docs/img/mean.svg", bbox_inches="tight", transparent=True, dpi=72)
gstd = np.std(gradient)
gmean = gradient.mean()
ax2.vlines([gmean - 1.5 * gstd, gmean + 1.5 * gstd], 0, data.height, color="orange")
fig.savefig(
"docs/img/mean_bounds.svg", bbox_inches="tight", transparent=True, dpi=72
)
search = round(height * 0.05)
upper_bound = upper.y - search
search_height = text_height + 2 * search
search_upper = Point(upper.x, upper_bound)
search_box = ax.add_patch(
Rectangle(
search_upper,
text_width,
search_height,
edgecolor="green",
facecolor="none",
lw=1,
)
)
fig.savefig("docs/img/search.svg", bbox_inches="tight", transparent=True, dpi=72)
upper_search = gradient[upper_bound : upper.y]
lower_search = gradient[upper.y + text_height : upper_bound + search_height]
lower_thresh = gmean - 1.5 * gstd
upper_thresh = gmean + 1.5 * gstd
peaked = 0
for up, x in enumerate(reversed(upper_search)):
if not peaked and x >= upper_thresh:
peaked = 1
if peaked and x <= lower_thresh:
peaked = 2
print("Line above detected.")
break
up = up if peaked == 2 else 0
peaked = 0
for down, x in enumerate(lower_search):
if not peaked and x <= lower_thresh:
peaked = 1
if peaked and x >= upper_thresh:
peaked = 2
print("Line below detected.")
break
down = down if peaked == 2 else 0
final_upper = Point(upper.x, upper.y - up)
final_height = text_height + up + down
search_box = ax.add_patch(
Rectangle(
final_upper,
text_width,
final_height,
edgecolor="pink",
facecolor="none",
lw=1,
)
)
fig.savefig("docs/img/searched.svg", bbox_inches="tight", transparent=True, dpi=72)
stretch = round(height * 0.005)
streched_upper = Point(final_upper[0] - stretch, final_upper[1] - 2 * stretch)
stretched_width = text_width + 2 * stretch
stretched_height = final_height + 4 * stretch
fig, ax = plt.subplots()
plt.imshow(data)
final_box = ax.add_patch(
Rectangle(
streched_upper,
stretched_width,
stretched_height,
edgecolor="black",
facecolor="none",
lw=1,
)
)
fig.savefig("docs/img/stretched.svg", bbox_inches="tight", transparent=True, dpi=72)
def process_page(pno):
upper, text_height, text_width, data, height = fetch_stuff(pno)
fig, ax = plt.subplots()
plt.imshow(data)
text_box = ax.add_patch(
Rectangle(
upper, text_width, text_height, edgecolor="red", facecolor="none", lw=2
)
)
ax2 = ax.twiny()
a = np.array(ImageOps.grayscale(data))
mean = a.mean(axis=1)
gradient = np.gradient(mean) + 70
ax2.plot(gradient, range(len(gradient)), color="green", label="differential")
gstd = | np.std(gradient) | numpy.std |
import numpy as np
class SudokuState:
# dict of constraints, RCV as keys
get_constraints = {}
# Populate get_constraints
for r in range(9):
block_y = r // 3
for c in range(9):
block_x = c // 3
b = (block_y * 3) + block_x
for v in range(1, 10):
# Truncates r, c down to nearest multiple of 3
# Get block id
# 0 1 2
# 3 4 5
# 6 7 8
get_constraints[(r, c, v)] = [
# Every cell must have a value, (x, y)
("Cell", (r, c)),
# Every row must contain each value, (row, val)
("Row", (r, v)),
# Every column must contain each value, (column, val)
("Col", (c, v)),
# Every block must contain each value, (block, val)
("Block", (b, v))
]
def __init__(self, values: np.ndarray):
"""
Create a new Sudoku State.
Calculates matrix A from passed values
:param values: 9x9 grid of initial state
"""
self.solvable = True
self.solution = {}
self.values = values
# matrix A
self.a = {
c: set() for c in (
# Every cell must contain a value, (col, row)
[("Cell", (x, y)) for x in range(9) for y in range(9)] +
# Every row must contain each value, (row, val)
[("Row", (row, val)) for row in range(9) for val in range(1, 10)] +
# Every column must contain each value, (column, val)
[("Col", (col, val)) for col in range(9) for val in range(1, 10)] +
# Every block must contain each value, (block, val)
[("Block", (blk, val)) for blk in range(9) for val in range(1, 10)]
)
}
# Populate A with the associated RCVs
for rcv, consts in SudokuState.get_constraints.items():
for c in consts:
self.a[c].add(rcv)
# Update constraints to reflect initial state
for (y, x), value in | np.ndenumerate(values) | numpy.ndenumerate |
"""Kernels for Gaussian process regression and classification.
The kernels in this module allow kernel-engineering, i.e., they can be
combined via the "+" and "*" operators or be exponentiated with a scalar
via "**". These sum and product expressions can also contain scalar values,
which are automatically converted to a constant kernel.
All kernels allow (analytic) gradient-based hyperparameter optimization.
The space of hyperparameters can be specified by giving lower und upper
boundaries for the value of each hyperparameter (the search space is thus
rectangular). Instead of specifying bounds, hyperparameters can also be
declared to be "fixed", which causes these hyperparameters to be excluded from
optimization.
"""
# Author: <NAME> <<EMAIL>>
# License: BSD 3 clause
# Note: this module is strongly inspired by the kernel module of the george
# package.
from abc import ABCMeta, abstractmethod
from collections import namedtuple
import math
from inspect import signature
import warnings
import numpy as np
from scipy.special import kv, gamma
from scipy.spatial.distance import pdist, cdist, squareform
from ..metrics.pairwise import pairwise_kernels
from ..base import clone
def _check_length_scale(X, length_scale):
length_scale = np.squeeze(length_scale).astype(float)
if np.ndim(length_scale) > 1:
raise ValueError("length_scale cannot be of dimension greater than 1")
if np.ndim(length_scale) == 1 and X.shape[1] != length_scale.shape[0]:
raise ValueError("Anisotropic kernel must have the same number of "
"dimensions as data (%d!=%d)"
% (length_scale.shape[0], X.shape[1]))
return length_scale
class Hyperparameter(namedtuple('Hyperparameter',
('name', 'value_type', 'bounds',
'n_elements', 'fixed'))):
"""A kernel hyperparameter's specification in form of a namedtuple.
.. versionadded:: 0.18
Attributes
----------
name : string
The name of the hyperparameter. Note that a kernel using a
hyperparameter with name "x" must have the attributes self.x and
self.x_bounds
value_type : string
The type of the hyperparameter. Currently, only "numeric"
hyperparameters are supported.
bounds : pair of floats >= 0 or "fixed"
The lower and upper bound on the parameter. If n_elements>1, a pair
of 1d array with n_elements each may be given alternatively. If
the string "fixed" is passed as bounds, the hyperparameter's value
cannot be changed.
n_elements : int, default=1
The number of elements of the hyperparameter value. Defaults to 1,
which corresponds to a scalar hyperparameter. n_elements > 1
corresponds to a hyperparameter which is vector-valued,
such as, e.g., anisotropic length-scales.
fixed : bool, default: None
Whether the value of this hyperparameter is fixed, i.e., cannot be
changed during hyperparameter tuning. If None is passed, the "fixed" is
derived based on the given bounds.
"""
# A raw namedtuple is very memory efficient as it packs the attributes
# in a struct to get rid of the __dict__ of attributes in particular it
# does not copy the string for the keys on each instance.
# By deriving a namedtuple class just to introduce the __init__ method we
# would also reintroduce the __dict__ on the instance. By telling the
# Python interpreter that this subclass uses static __slots__ instead of
# dynamic attributes. Furthermore we don't need any additional slot in the
# subclass so we set __slots__ to the empty tuple.
__slots__ = ()
def __new__(cls, name, value_type, bounds, n_elements=1, fixed=None):
if not isinstance(bounds, str) or bounds != "fixed":
bounds = np.atleast_2d(bounds)
if n_elements > 1: # vector-valued parameter
if bounds.shape[0] == 1:
bounds = np.repeat(bounds, n_elements, 0)
elif bounds.shape[0] != n_elements:
raise ValueError("Bounds on %s should have either 1 or "
"%d dimensions. Given are %d"
% (name, n_elements, bounds.shape[0]))
if fixed is None:
fixed = isinstance(bounds, str) and bounds == "fixed"
return super(Hyperparameter, cls).__new__(
cls, name, value_type, bounds, n_elements, fixed)
# This is mainly a testing utility to check that two hyperparameters
# are equal.
def __eq__(self, other):
return (self.name == other.name and
self.value_type == other.value_type and
np.all(self.bounds == other.bounds) and
self.n_elements == other.n_elements and
self.fixed == other.fixed)
class Kernel(metaclass=ABCMeta):
"""Base class for all kernels.
.. versionadded:: 0.18
"""
def get_params(self, deep=True):
"""Get parameters of this kernel.
Parameters
----------
deep : boolean, optional
If True, will return the parameters for this estimator and
contained subobjects that are estimators.
Returns
-------
params : mapping of string to any
Parameter names mapped to their values.
"""
params = dict()
# introspect the constructor arguments to find the model parameters
# to represent
cls = self.__class__
init = getattr(cls.__init__, 'deprecated_original', cls.__init__)
init_sign = signature(init)
args, varargs = [], []
for parameter in init_sign.parameters.values():
if (parameter.kind != parameter.VAR_KEYWORD and
parameter.name != 'self'):
args.append(parameter.name)
if parameter.kind == parameter.VAR_POSITIONAL:
varargs.append(parameter.name)
if len(varargs) != 0:
raise RuntimeError("scikit-learn kernels should always "
"specify their parameters in the signature"
" of their __init__ (no varargs)."
" %s doesn't follow this convention."
% (cls, ))
for arg in args:
try:
value = getattr(self, arg)
except AttributeError:
warnings.warn('From version 0.24, get_params will raise an '
'AttributeError if a parameter cannot be '
'retrieved as an instance attribute. Previously '
'it would return None.',
FutureWarning)
value = None
params[arg] = value
return params
def set_params(self, **params):
"""Set the parameters of this kernel.
The method works on simple kernels as well as on nested kernels.
The latter have parameters of the form ``<component>__<parameter>``
so that it's possible to update each component of a nested object.
Returns
-------
self
"""
if not params:
# Simple optimisation to gain speed (inspect is slow)
return self
valid_params = self.get_params(deep=True)
for key, value in params.items():
split = key.split('__', 1)
if len(split) > 1:
# nested objects case
name, sub_name = split
if name not in valid_params:
raise ValueError('Invalid parameter %s for kernel %s. '
'Check the list of available parameters '
'with `kernel.get_params().keys()`.' %
(name, self))
sub_object = valid_params[name]
sub_object.set_params(**{sub_name: value})
else:
# simple objects case
if key not in valid_params:
raise ValueError('Invalid parameter %s for kernel %s. '
'Check the list of available parameters '
'with `kernel.get_params().keys()`.' %
(key, self.__class__.__name__))
setattr(self, key, value)
return self
def clone_with_theta(self, theta):
"""Returns a clone of self with given hyperparameters theta.
Parameters
----------
theta : array, shape (n_dims,)
The hyperparameters
"""
cloned = clone(self)
cloned.theta = theta
return cloned
@property
def n_dims(self):
"""Returns the number of non-fixed hyperparameters of the kernel."""
return self.theta.shape[0]
@property
def hyperparameters(self):
"""Returns a list of all hyperparameter specifications."""
r = [getattr(self, attr) for attr in dir(self)
if attr.startswith("hyperparameter_")]
return r
@property
def theta(self):
"""Returns the (flattened, log-transformed) non-fixed hyperparameters.
Note that theta are typically the log-transformed values of the
kernel's hyperparameters as this representation of the search space
is more amenable for hyperparameter search, as hyperparameters like
length-scales naturally live on a log-scale.
Returns
-------
theta : array, shape (n_dims,)
The non-fixed, log-transformed hyperparameters of the kernel
"""
theta = []
params = self.get_params()
for hyperparameter in self.hyperparameters:
if not hyperparameter.fixed:
theta.append(params[hyperparameter.name])
if len(theta) > 0:
return np.log(np.hstack(theta))
else:
return np.array([])
@theta.setter
def theta(self, theta):
"""Sets the (flattened, log-transformed) non-fixed hyperparameters.
Parameters
----------
theta : array, shape (n_dims,)
The non-fixed, log-transformed hyperparameters of the kernel
"""
params = self.get_params()
i = 0
for hyperparameter in self.hyperparameters:
if hyperparameter.fixed:
continue
if hyperparameter.n_elements > 1:
# vector-valued parameter
params[hyperparameter.name] = np.exp(
theta[i:i + hyperparameter.n_elements])
i += hyperparameter.n_elements
else:
params[hyperparameter.name] = np.exp(theta[i])
i += 1
if i != len(theta):
raise ValueError("theta has not the correct number of entries."
" Should be %d; given are %d"
% (i, len(theta)))
self.set_params(**params)
@property
def bounds(self):
"""Returns the log-transformed bounds on the theta.
Returns
-------
bounds : array, shape (n_dims, 2)
The log-transformed bounds on the kernel's hyperparameters theta
"""
bounds = [hyperparameter.bounds
for hyperparameter in self.hyperparameters
if not hyperparameter.fixed]
if len(bounds) > 0:
return np.log(np.vstack(bounds))
else:
return np.array([])
def __add__(self, b):
if not isinstance(b, Kernel):
return Sum(self, ConstantKernel(b))
return Sum(self, b)
def __radd__(self, b):
if not isinstance(b, Kernel):
return Sum(ConstantKernel(b), self)
return Sum(b, self)
def __mul__(self, b):
if not isinstance(b, Kernel):
return Product(self, ConstantKernel(b))
return Product(self, b)
def __rmul__(self, b):
if not isinstance(b, Kernel):
return Product(ConstantKernel(b), self)
return Product(b, self)
def __pow__(self, b):
return Exponentiation(self, b)
def __eq__(self, b):
if type(self) != type(b):
return False
params_a = self.get_params()
params_b = b.get_params()
for key in set(list(params_a.keys()) + list(params_b.keys())):
if np.any(params_a.get(key, None) != params_b.get(key, None)):
return False
return True
def __repr__(self):
return "{0}({1})".format(self.__class__.__name__,
", ".join(map("{0:.3g}".format, self.theta)))
@abstractmethod
def __call__(self, X, Y=None, eval_gradient=False):
"""Evaluate the kernel."""
@abstractmethod
def diag(self, X):
"""Returns the diagonal of the kernel k(X, X).
The result of this method is identical to np.diag(self(X)); however,
it can be evaluated more efficiently since only the diagonal is
evaluated.
Parameters
----------
X : array, shape (n_samples_X, n_features)
Left argument of the returned kernel k(X, Y)
Returns
-------
K_diag : array, shape (n_samples_X,)
Diagonal of kernel k(X, X)
"""
@abstractmethod
def is_stationary(self):
"""Returns whether the kernel is stationary. """
class NormalizedKernelMixin:
"""Mixin for kernels which are normalized: k(X, X)=1.
.. versionadded:: 0.18
"""
def diag(self, X):
"""Returns the diagonal of the kernel k(X, X).
The result of this method is identical to np.diag(self(X)); however,
it can be evaluated more efficiently since only the diagonal is
evaluated.
Parameters
----------
X : array, shape (n_samples_X, n_features)
Left argument of the returned kernel k(X, Y)
Returns
-------
K_diag : array, shape (n_samples_X,)
Diagonal of kernel k(X, X)
"""
return np.ones(X.shape[0])
class StationaryKernelMixin:
"""Mixin for kernels which are stationary: k(X, Y)= f(X-Y).
.. versionadded:: 0.18
"""
def is_stationary(self):
"""Returns whether the kernel is stationary. """
return True
class CompoundKernel(Kernel):
"""Kernel which is composed of a set of other kernels.
.. versionadded:: 0.18
Parameters
----------
kernels : list of Kernel objects
The other kernels
"""
def __init__(self, kernels):
self.kernels = kernels
def get_params(self, deep=True):
"""Get parameters of this kernel.
Parameters
----------
deep : boolean, optional
If True, will return the parameters for this estimator and
contained subobjects that are estimators.
Returns
-------
params : mapping of string to any
Parameter names mapped to their values.
"""
return dict(kernels=self.kernels)
@property
def theta(self):
"""Returns the (flattened, log-transformed) non-fixed hyperparameters.
Note that theta are typically the log-transformed values of the
kernel's hyperparameters as this representation of the search space
is more amenable for hyperparameter search, as hyperparameters like
length-scales naturally live on a log-scale.
Returns
-------
theta : array, shape (n_dims,)
The non-fixed, log-transformed hyperparameters of the kernel
"""
return np.hstack([kernel.theta for kernel in self.kernels])
@theta.setter
def theta(self, theta):
"""Sets the (flattened, log-transformed) non-fixed hyperparameters.
Parameters
----------
theta : array, shape (n_dims,)
The non-fixed, log-transformed hyperparameters of the kernel
"""
k_dims = self.k1.n_dims
for i, kernel in enumerate(self.kernels):
kernel.theta = theta[i * k_dims:(i + 1) * k_dims]
@property
def bounds(self):
"""Returns the log-transformed bounds on the theta.
Returns
-------
bounds : array, shape (n_dims, 2)
The log-transformed bounds on the kernel's hyperparameters theta
"""
return np.vstack([kernel.bounds for kernel in self.kernels])
def __call__(self, X, Y=None, eval_gradient=False):
"""Return the kernel k(X, Y) and optionally its gradient.
Note that this compound kernel returns the results of all simple kernel
stacked along an additional axis.
Parameters
----------
X : array, shape (n_samples_X, n_features)
Left argument of the returned kernel k(X, Y)
Y : array, shape (n_samples_Y, n_features), (optional, default=None)
Right argument of the returned kernel k(X, Y). If None, k(X, X)
if evaluated instead.
eval_gradient : bool (optional, default=False)
Determines whether the gradient with respect to the kernel
hyperparameter is determined.
Returns
-------
K : array, shape (n_samples_X, n_samples_Y, n_kernels)
Kernel k(X, Y)
K_gradient : array, shape (n_samples_X, n_samples_X, n_dims, n_kernels)
The gradient of the kernel k(X, X) with respect to the
hyperparameter of the kernel. Only returned when eval_gradient
is True.
"""
if eval_gradient:
K = []
K_grad = []
for kernel in self.kernels:
K_single, K_grad_single = kernel(X, Y, eval_gradient)
K.append(K_single)
K_grad.append(K_grad_single[..., np.newaxis])
return np.dstack(K), np.concatenate(K_grad, 3)
else:
return np.dstack([kernel(X, Y, eval_gradient)
for kernel in self.kernels])
def __eq__(self, b):
if type(self) != type(b) or len(self.kernels) != len(b.kernels):
return False
return np.all([self.kernels[i] == b.kernels[i]
for i in range(len(self.kernels))])
def is_stationary(self):
"""Returns whether the kernel is stationary. """
return np.all([kernel.is_stationary() for kernel in self.kernels])
def diag(self, X):
"""Returns the diagonal of the kernel k(X, X).
The result of this method is identical to np.diag(self(X)); however,
it can be evaluated more efficiently since only the diagonal is
evaluated.
Parameters
----------
X : array, shape (n_samples_X, n_features)
Left argument of the returned kernel k(X, Y)
Returns
-------
K_diag : array, shape (n_samples_X, n_kernels)
Diagonal of kernel k(X, X)
"""
return np.vstack([kernel.diag(X) for kernel in self.kernels]).T
class KernelOperator(Kernel):
"""Base class for all kernel operators.
.. versionadded:: 0.18
"""
def __init__(self, k1, k2):
self.k1 = k1
self.k2 = k2
def get_params(self, deep=True):
"""Get parameters of this kernel.
Parameters
----------
deep : boolean, optional
If True, will return the parameters for this estimator and
contained subobjects that are estimators.
Returns
-------
params : mapping of string to any
Parameter names mapped to their values.
"""
params = dict(k1=self.k1, k2=self.k2)
if deep:
deep_items = self.k1.get_params().items()
params.update(('k1__' + k, val) for k, val in deep_items)
deep_items = self.k2.get_params().items()
params.update(('k2__' + k, val) for k, val in deep_items)
return params
@property
def hyperparameters(self):
"""Returns a list of all hyperparameter."""
r = [Hyperparameter("k1__" + hyperparameter.name,
hyperparameter.value_type,
hyperparameter.bounds, hyperparameter.n_elements)
for hyperparameter in self.k1.hyperparameters]
for hyperparameter in self.k2.hyperparameters:
r.append(Hyperparameter("k2__" + hyperparameter.name,
hyperparameter.value_type,
hyperparameter.bounds,
hyperparameter.n_elements))
return r
@property
def theta(self):
"""Returns the (flattened, log-transformed) non-fixed hyperparameters.
Note that theta are typically the log-transformed values of the
kernel's hyperparameters as this representation of the search space
is more amenable for hyperparameter search, as hyperparameters like
length-scales naturally live on a log-scale.
Returns
-------
theta : array, shape (n_dims,)
The non-fixed, log-transformed hyperparameters of the kernel
"""
return np.append(self.k1.theta, self.k2.theta)
@theta.setter
def theta(self, theta):
"""Sets the (flattened, log-transformed) non-fixed hyperparameters.
Parameters
----------
theta : array, shape (n_dims,)
The non-fixed, log-transformed hyperparameters of the kernel
"""
k1_dims = self.k1.n_dims
self.k1.theta = theta[:k1_dims]
self.k2.theta = theta[k1_dims:]
@property
def bounds(self):
"""Returns the log-transformed bounds on the theta.
Returns
-------
bounds : array, shape (n_dims, 2)
The log-transformed bounds on the kernel's hyperparameters theta
"""
if self.k1.bounds.size == 0:
return self.k2.bounds
if self.k2.bounds.size == 0:
return self.k1.bounds
return np.vstack((self.k1.bounds, self.k2.bounds))
def __eq__(self, b):
if type(self) != type(b):
return False
return (self.k1 == b.k1 and self.k2 == b.k2) \
or (self.k1 == b.k2 and self.k2 == b.k1)
def is_stationary(self):
"""Returns whether the kernel is stationary. """
return self.k1.is_stationary() and self.k2.is_stationary()
class Sum(KernelOperator):
"""Sum-kernel k1 + k2 of two kernels k1 and k2.
The resulting kernel is defined as
k_sum(X, Y) = k1(X, Y) + k2(X, Y)
.. versionadded:: 0.18
Parameters
----------
k1 : Kernel object
The first base-kernel of the sum-kernel
k2 : Kernel object
The second base-kernel of the sum-kernel
"""
def __call__(self, X, Y=None, eval_gradient=False):
"""Return the kernel k(X, Y) and optionally its gradient.
Parameters
----------
X : array, shape (n_samples_X, n_features)
Left argument of the returned kernel k(X, Y)
Y : array, shape (n_samples_Y, n_features), (optional, default=None)
Right argument of the returned kernel k(X, Y). If None, k(X, X)
if evaluated instead.
eval_gradient : bool (optional, default=False)
Determines whether the gradient with respect to the kernel
hyperparameter is determined.
Returns
-------
K : array, shape (n_samples_X, n_samples_Y)
Kernel k(X, Y)
K_gradient : array (opt.), shape (n_samples_X, n_samples_X, n_dims)
The gradient of the kernel k(X, X) with respect to the
hyperparameter of the kernel. Only returned when eval_gradient
is True.
"""
if eval_gradient:
K1, K1_gradient = self.k1(X, Y, eval_gradient=True)
K2, K2_gradient = self.k2(X, Y, eval_gradient=True)
return K1 + K2, np.dstack((K1_gradient, K2_gradient))
else:
return self.k1(X, Y) + self.k2(X, Y)
def diag(self, X):
"""Returns the diagonal of the kernel k(X, X).
The result of this method is identical to np.diag(self(X)); however,
it can be evaluated more efficiently since only the diagonal is
evaluated.
Parameters
----------
X : array, shape (n_samples_X, n_features)
Left argument of the returned kernel k(X, Y)
Returns
-------
K_diag : array, shape (n_samples_X,)
Diagonal of kernel k(X, X)
"""
return self.k1.diag(X) + self.k2.diag(X)
def __repr__(self):
return "{0} + {1}".format(self.k1, self.k2)
class Product(KernelOperator):
"""Product-kernel k1 * k2 of two kernels k1 and k2.
The resulting kernel is defined as
k_prod(X, Y) = k1(X, Y) * k2(X, Y)
.. versionadded:: 0.18
Parameters
----------
k1 : Kernel object
The first base-kernel of the product-kernel
k2 : Kernel object
The second base-kernel of the product-kernel
"""
def __call__(self, X, Y=None, eval_gradient=False):
"""Return the kernel k(X, Y) and optionally its gradient.
Parameters
----------
X : array, shape (n_samples_X, n_features)
Left argument of the returned kernel k(X, Y)
Y : array, shape (n_samples_Y, n_features), (optional, default=None)
Right argument of the returned kernel k(X, Y). If None, k(X, X)
if evaluated instead.
eval_gradient : bool (optional, default=False)
Determines whether the gradient with respect to the kernel
hyperparameter is determined.
Returns
-------
K : array, shape (n_samples_X, n_samples_Y)
Kernel k(X, Y)
K_gradient : array (opt.), shape (n_samples_X, n_samples_X, n_dims)
The gradient of the kernel k(X, X) with respect to the
hyperparameter of the kernel. Only returned when eval_gradient
is True.
"""
if eval_gradient:
K1, K1_gradient = self.k1(X, Y, eval_gradient=True)
K2, K2_gradient = self.k2(X, Y, eval_gradient=True)
return K1 * K2, np.dstack((K1_gradient * K2[:, :, np.newaxis],
K2_gradient * K1[:, :, np.newaxis]))
else:
return self.k1(X, Y) * self.k2(X, Y)
def diag(self, X):
"""Returns the diagonal of the kernel k(X, X).
The result of this method is identical to np.diag(self(X)); however,
it can be evaluated more efficiently since only the diagonal is
evaluated.
Parameters
----------
X : array, shape (n_samples_X, n_features)
Left argument of the returned kernel k(X, Y)
Returns
-------
K_diag : array, shape (n_samples_X,)
Diagonal of kernel k(X, X)
"""
return self.k1.diag(X) * self.k2.diag(X)
def __repr__(self):
return "{0} * {1}".format(self.k1, self.k2)
class Exponentiation(Kernel):
"""Exponentiate kernel by given exponent.
The resulting kernel is defined as
k_exp(X, Y) = k(X, Y) ** exponent
.. versionadded:: 0.18
Parameters
----------
kernel : Kernel object
The base kernel
exponent : float
The exponent for the base kernel
"""
def __init__(self, kernel, exponent):
self.kernel = kernel
self.exponent = exponent
def get_params(self, deep=True):
"""Get parameters of this kernel.
Parameters
----------
deep : boolean, optional
If True, will return the parameters for this estimator and
contained subobjects that are estimators.
Returns
-------
params : mapping of string to any
Parameter names mapped to their values.
"""
params = dict(kernel=self.kernel, exponent=self.exponent)
if deep:
deep_items = self.kernel.get_params().items()
params.update(('kernel__' + k, val) for k, val in deep_items)
return params
@property
def hyperparameters(self):
"""Returns a list of all hyperparameter."""
r = []
for hyperparameter in self.kernel.hyperparameters:
r.append(Hyperparameter("kernel__" + hyperparameter.name,
hyperparameter.value_type,
hyperparameter.bounds,
hyperparameter.n_elements))
return r
@property
def theta(self):
"""Returns the (flattened, log-transformed) non-fixed hyperparameters.
Note that theta are typically the log-transformed values of the
kernel's hyperparameters as this representation of the search space
is more amenable for hyperparameter search, as hyperparameters like
length-scales naturally live on a log-scale.
Returns
-------
theta : array, shape (n_dims,)
The non-fixed, log-transformed hyperparameters of the kernel
"""
return self.kernel.theta
@theta.setter
def theta(self, theta):
"""Sets the (flattened, log-transformed) non-fixed hyperparameters.
Parameters
----------
theta : array, shape (n_dims,)
The non-fixed, log-transformed hyperparameters of the kernel
"""
self.kernel.theta = theta
@property
def bounds(self):
"""Returns the log-transformed bounds on the theta.
Returns
-------
bounds : array, shape (n_dims, 2)
The log-transformed bounds on the kernel's hyperparameters theta
"""
return self.kernel.bounds
def __eq__(self, b):
if type(self) != type(b):
return False
return (self.kernel == b.kernel and self.exponent == b.exponent)
def __call__(self, X, Y=None, eval_gradient=False):
"""Return the kernel k(X, Y) and optionally its gradient.
Parameters
----------
X : array, shape (n_samples_X, n_features)
Left argument of the returned kernel k(X, Y)
Y : array, shape (n_samples_Y, n_features), (optional, default=None)
Right argument of the returned kernel k(X, Y). If None, k(X, X)
if evaluated instead.
eval_gradient : bool (optional, default=False)
Determines whether the gradient with respect to the kernel
hyperparameter is determined.
Returns
-------
K : array, shape (n_samples_X, n_samples_Y)
Kernel k(X, Y)
K_gradient : array (opt.), shape (n_samples_X, n_samples_X, n_dims)
The gradient of the kernel k(X, X) with respect to the
hyperparameter of the kernel. Only returned when eval_gradient
is True.
"""
if eval_gradient:
K, K_gradient = self.kernel(X, Y, eval_gradient=True)
K_gradient *= \
self.exponent * K[:, :, np.newaxis] ** (self.exponent - 1)
return K ** self.exponent, K_gradient
else:
K = self.kernel(X, Y, eval_gradient=False)
return K ** self.exponent
def diag(self, X):
"""Returns the diagonal of the kernel k(X, X).
The result of this method is identical to np.diag(self(X)); however,
it can be evaluated more efficiently since only the diagonal is
evaluated.
Parameters
----------
X : array, shape (n_samples_X, n_features)
Left argument of the returned kernel k(X, Y)
Returns
-------
K_diag : array, shape (n_samples_X,)
Diagonal of kernel k(X, X)
"""
return self.kernel.diag(X) ** self.exponent
def __repr__(self):
return "{0} ** {1}".format(self.kernel, self.exponent)
def is_stationary(self):
"""Returns whether the kernel is stationary. """
return self.kernel.is_stationary()
class ConstantKernel(StationaryKernelMixin, Kernel):
"""Constant kernel.
Can be used as part of a product-kernel where it scales the magnitude of
the other factor (kernel) or as part of a sum-kernel, where it modifies
the mean of the Gaussian process.
k(x_1, x_2) = constant_value for all x_1, x_2
.. versionadded:: 0.18
Parameters
----------
constant_value : float, default: 1.0
The constant value which defines the covariance:
k(x_1, x_2) = constant_value
constant_value_bounds : pair of floats >= 0, default: (1e-5, 1e5)
The lower and upper bound on constant_value
"""
def __init__(self, constant_value=1.0, constant_value_bounds=(1e-5, 1e5)):
self.constant_value = constant_value
self.constant_value_bounds = constant_value_bounds
@property
def hyperparameter_constant_value(self):
return Hyperparameter(
"constant_value", "numeric", self.constant_value_bounds)
def __call__(self, X, Y=None, eval_gradient=False):
"""Return the kernel k(X, Y) and optionally its gradient.
Parameters
----------
X : array, shape (n_samples_X, n_features)
Left argument of the returned kernel k(X, Y)
Y : array, shape (n_samples_Y, n_features), (optional, default=None)
Right argument of the returned kernel k(X, Y). If None, k(X, X)
if evaluated instead.
eval_gradient : bool (optional, default=False)
Determines whether the gradient with respect to the kernel
hyperparameter is determined. Only supported when Y is None.
Returns
-------
K : array, shape (n_samples_X, n_samples_Y)
Kernel k(X, Y)
K_gradient : array (opt.), shape (n_samples_X, n_samples_X, n_dims)
The gradient of the kernel k(X, X) with respect to the
hyperparameter of the kernel. Only returned when eval_gradient
is True.
"""
X = np.atleast_2d(X)
if Y is None:
Y = X
elif eval_gradient:
raise ValueError("Gradient can only be evaluated when Y is None.")
K = np.full((X.shape[0], Y.shape[0]), self.constant_value,
dtype=np.array(self.constant_value).dtype)
if eval_gradient:
if not self.hyperparameter_constant_value.fixed:
return (K, np.full((X.shape[0], X.shape[0], 1),
self.constant_value,
dtype=np.array(self.constant_value).dtype))
else:
return K, np.empty((X.shape[0], X.shape[0], 0))
else:
return K
def diag(self, X):
"""Returns the diagonal of the kernel k(X, X).
The result of this method is identical to np.diag(self(X)); however,
it can be evaluated more efficiently since only the diagonal is
evaluated.
Parameters
----------
X : array, shape (n_samples_X, n_features)
Left argument of the returned kernel k(X, Y)
Returns
-------
K_diag : array, shape (n_samples_X,)
Diagonal of kernel k(X, X)
"""
return np.full(X.shape[0], self.constant_value,
dtype=np.array(self.constant_value).dtype)
def __repr__(self):
return "{0:.3g}**2".format(np.sqrt(self.constant_value))
class WhiteKernel(StationaryKernelMixin, Kernel):
"""White kernel.
The main use-case of this kernel is as part of a sum-kernel where it
explains the noise of the signal as independently and identically
normally-distributed. The parameter noise_level equals the variance of this
noise.
k(x_1, x_2) = noise_level if x_1 == x_2 else 0
.. versionadded:: 0.18
Parameters
----------
noise_level : float, default: 1.0
Parameter controlling the noise level (variance)
noise_level_bounds : pair of floats >= 0, default: (1e-5, 1e5)
The lower and upper bound on noise_level
"""
def __init__(self, noise_level=1.0, noise_level_bounds=(1e-5, 1e5)):
self.noise_level = noise_level
self.noise_level_bounds = noise_level_bounds
@property
def hyperparameter_noise_level(self):
return Hyperparameter(
"noise_level", "numeric", self.noise_level_bounds)
def __call__(self, X, Y=None, eval_gradient=False):
"""Return the kernel k(X, Y) and optionally its gradient.
Parameters
----------
X : array, shape (n_samples_X, n_features)
Left argument of the returned kernel k(X, Y)
Y : array, shape (n_samples_Y, n_features), (optional, default=None)
Right argument of the returned kernel k(X, Y). If None, k(X, X)
if evaluated instead.
eval_gradient : bool (optional, default=False)
Determines whether the gradient with respect to the kernel
hyperparameter is determined. Only supported when Y is None.
Returns
-------
K : array, shape (n_samples_X, n_samples_Y)
Kernel k(X, Y)
K_gradient : array (opt.), shape (n_samples_X, n_samples_X, n_dims)
The gradient of the kernel k(X, X) with respect to the
hyperparameter of the kernel. Only returned when eval_gradient
is True.
"""
X = np.atleast_2d(X)
if Y is not None and eval_gradient:
raise ValueError("Gradient can only be evaluated when Y is None.")
if Y is None:
K = self.noise_level * np.eye(X.shape[0])
if eval_gradient:
if not self.hyperparameter_noise_level.fixed:
return (K, self.noise_level
* | np.eye(X.shape[0]) | numpy.eye |
# -*- coding: utf-8 -*-
import cv2
import time
import sys
import numpy as np
import os
DEVICE_ID = 0
DEFAULT_SIZE = (320, 240)
FLIP = True
class Rect:
def __init__(self, x, y, w, h):
self.x = x
self.y = y
self.w = w
self.h = h
def modify(self):
if self.w < 0:
self.w *= -1
self.x -= self.w
if self.h < 0:
self.h *= -1
self.y -= self.h
class Meta:
def __init__(self, window_name, img, rect):
self.img = img
self.img_bk = | np.copy(img) | numpy.copy |
import os
imdb_dir = '/home/han/code/data/aclImdb'
train_dir = os.path.join(imdb_dir, 'train')
# Processing the labels of the raw IMDB data
labels = []
texts = []
for label_type in ['neg', 'pos']:
dir_name = os.path.join(train_dir, label_type)
for fname in os.listdir(dir_name):
if fname[-4:] == '.txt':
f = open(os.path.join(dir_name, fname))
texts.append(f.read())
f.close()
if label_type == 'neg':
labels.append(0)
else:
labels.append(1)
# Tokenizing the text of the raw IMDB data
from keras.preprocessing.text import Tokenizer
from keras.preprocessing.sequence import pad_sequences
import numpy as np
maxlen = 100
training_samples = 10000
validation_samples = 10000
max_words = 10000
tokenizer = Tokenizer(num_words=max_words)
tokenizer.fit_on_texts(texts)
sequences = tokenizer.texts_to_sequences(texts)
word_index = tokenizer.word_index
data = pad_sequences(sequences, maxlen=maxlen)
labels = np.asarray(labels)
indices = | np.arange(data.shape[0]) | numpy.arange |
import numpy as np
from utils.octree_partition import partition_octree
import time
from glob import glob
import tensorflow as tf
import multiprocessing
from tqdm import tqdm
from pyntcloud import PyntCloud
import pandas as pd
#VOXEL-OCTREE
def timing(f):
def wrap(*args, **kwargs):
time1 = time.time()
ret = f(*args, **kwargs)
time2 = time.time()
print('{:s} function took {:.3f} ms'.format(f.__name__, (time2 - time1) * 1000.0))
return ret
return wrap
def get_bin_stream_blocks(path_to_ply, pc_level, departition_level):
# co 10 level --> binstr of 10 level, blocks size =1
level = int(departition_level)
pc = PyntCloud.from_file(path_to_ply)
points = pc.points.values
no_oc_voxels = len(points)
box = int(2 ** pc_level)
blocks2, binstr2 = timing(partition_octree)(points, [0, 0, 0], [box, box, box], level)
return no_oc_voxels, blocks2, binstr2
def voxel_block_2_octree(box,oct_seq):
box_size=box.shape[0]
child_bbox=int(box_size/2)
if(box_size>2):
for d in range(2):
for h in range(2):
for w in range(2):
child_box=box[d * child_bbox:(d + 1) * child_bbox, h * child_bbox:(h + 1) * child_bbox, w * child_bbox:(w + 1) * child_bbox]
if(np.sum(child_box)!=0):
oct_seq.append(1)
voxel_block_2_octree(child_box, oct_seq)
else:
oct_seq.append(0)
else:
curr_octant=[int(x) for x in box.flatten()]
oct_seq+=curr_octant
return oct_seq
#FOR VOXEL
def input_fn_super_res(points, batch_size, dense_tensor_shape32, data_format, repeat=True, shuffle=True, prefetch_size=1):
# Create input data pipeline.
def gen():
iterator=iter(points)
done=False
while not done:
try:
p = next(iterator)
except StopIteration:
done=True
else:
ds = np.abs(np.round((p - 0.01) / 2))
ds = np.unique(ds,axis=0)
yield (ds, p)
p_max = np.array([64, 64, 64])
dense_tensor_shape64 = np.concatenate([p_max, [1]]).astype('int64')
dense_tensor_shape=[dense_tensor_shape32,dense_tensor_shape64]
dataset = tf.data.Dataset.from_generator(generator=gen, output_types=(tf.int64,tf.int64),output_shapes= (tf.TensorShape([None, 3]),tf.TensorShape([None, 3])))
if shuffle:
dataset = dataset.shuffle(len(points))
if repeat:
dataset = dataset.repeat()
dataset = dataset.map(lambda x,y: pc_to_tf(x,y
, dense_tensor_shape, data_format))
dataset = dataset.map(lambda x,y: process_x(x,y, dense_tensor_shape))
dataset = dataset.batch(batch_size)
dataset = dataset.prefetch(prefetch_size)
return dataset
# Main launcher
def input_fn_voxel_dnn(points, batch_size, dense_tensor_shape, data_format, repeat=True, shuffle=True, prefetch_size=1):
print('point shape: ', points.shape)
# Create input data pipeline.
dataset = tf.data.Dataset.from_generator(lambda: iter(points), tf.int64, tf.TensorShape([None, 3]))
if shuffle:
dataset = dataset.shuffle(len(points))
if repeat:
dataset = dataset.repeat()
dataset = dataset.map(lambda x: pc_to_tf_voxel_dnn(x, dense_tensor_shape, data_format))
dataset = dataset.map(lambda x: process_x_voxel_dnn(x, dense_tensor_shape))
dataset = dataset.batch(batch_size)
dataset = dataset.prefetch(prefetch_size)
return dataset
def df_to_pc(df):
points = df[['x', 'y', 'z']].values
return points
def pa_to_df(points):
cols = ['x', 'y', 'z', 'red', 'green', 'blue']
types = (['float32'] * 3) + (['uint8'] * 3)
d = {}
assert 3 <= points.shape[1] <= 6
for i in range(points.shape[1]):
col = cols[i]
dtype = types[i]
d[col] = points[:, i].astype(dtype)
df = pd.DataFrame(data=d)
return df
def pc_to_df(pc):
points = pc.points
return pa_to_df(points)
def pc_to_tf(x,y, dense_tensor_shape, data_format):
assert data_format in ['channels_last', 'channels_first']
# Add one channel (channels_last convention)
if data_format == 'channels_last':
x = tf.pad(x, [[0, 0], [0, 1]])
else:
x = tf.pad(x, [[0, 0], [1, 0]])
st0 = tf.sparse.SparseTensor(x, tf.ones_like(x[:, 0]), dense_tensor_shape[0])
# Add one channel (channels_last convention)
if data_format == 'channels_last':
y = tf.pad(y, [[0, 0], [0, 1]])
else:
y = tf.pad(y, [[0, 0], [1, 0]])
st1 = tf.sparse.SparseTensor(y, tf.ones_like(y[:, 0]), dense_tensor_shape[1])
return (st0,st1)
def process_x(x,y, dense_tensor_shape):
x = tf.sparse.to_dense(x, default_value=0, validate_indices=False)
x.set_shape(dense_tensor_shape[0])
x = tf.cast(x, tf.float32)
y = tf.sparse.to_dense(y, default_value=0, validate_indices=False)
y.set_shape(dense_tensor_shape[1])
y = tf.cast(y, tf.float32)
return (x,y)
def pc_to_tf_voxel_dnn(points, dense_tensor_shape, data_format):
x = points
assert data_format in ['channels_last', 'channels_first']
# Add one channel (channels_last convention)
if data_format == 'channels_last':
x = tf.pad(x, [[0, 0], [0, 1]])
else:
x = tf.pad(x, [[0, 0], [1, 0]])
st = tf.sparse.SparseTensor(x, tf.ones_like(x[:, 0]), dense_tensor_shape)
# print('st in pc to tf: ',st)
return st
def process_x_voxel_dnn(x, dense_tensor_shape):
x = tf.sparse.to_dense(x, default_value=0, validate_indices=False)
x.set_shape(dense_tensor_shape)
x = tf.cast(x, tf.float32)
# print('x in process x: ',x)
return x
def get_shape_data(resolution, data_format):
assert data_format in ['channels_last', 'channels_first']
bbox_min = 0
bbox_max = resolution
p_max = | np.array([bbox_max, bbox_max, bbox_max]) | numpy.array |
import numpy as np
from tuning.cyMINoncyclic import mc_mean_grad_gaussian
import matplotlib.pyplot as plt
#============Gaussian Noncyclic model with NO constraints============
# -----------Bandit Algorithm helper functions-----------
def gaussian_log_ratio(bin_index, output_value, input_set, input_prob_vec, inverse_cov_matrix):
# bin_index: i = 0,1,...,numBin-1
# output_value: response r (vector with length=numNeuro)
# input_set: {\lambda_{k,j}} (vector shape=(numNeuro,numBin)) (same as tuning_curve) (all nonzero)
# input_prob_vec: {w_j} (list or 1-d vector with length = numBin)
# inverse_cov_matrix: numNeuro-by-numNeuro numpy array.
if input_set.ndim == 1:
input_set = input_set.reshape((1,-1))
numNeuro, numBin = input_set.shape
sum_exp = 0
for l in range(numBin):
vec_l = output_value - input_set[:,l]
vec_bin_index = output_value - input_set[:, bin_index]
quad_l = np.dot(vec_l, np.dot(inverse_cov_matrix, vec_l))
quad_bin_index = np.dot(vec_bin_index, np.dot(inverse_cov_matrix, vec_bin_index))
sum_exp += input_prob_vec[l]*np.exp(-0.5*(quad_l - quad_bin_index))
return - | np.log(sum_exp) | numpy.log |
import numpy as np
from scipy.ndimage.interpolation import rotate
class GridMask(object):
def __init__(self,
k,
D,
theta=360,
mode=['topleft', 'botright'],
always_apply=True,
p_start=0,
p_end=0.8,
policy='linear'):
self.k = k
self.D = D
self.theta = theta
self.mode = mode
self.always_apply = always_apply
self.p_start = p_start
self.p_end = p_end
self.policy = 'linear'
self.steps = 0
self.p = p_start
def _annealing_cos(self, start, end, pct):
"Cosine anneal from `start` to `end` as pct goes from 0.0 to 1.0."
cos_out = math.cos(math.pi * pct) + 1
return end + (start - end) / 2.0 * cos_out
def _annealing_linear(self, start, end, pct):
"Linearly anneal from `start` to `end` as pct goes from 0.0 to 1.0."
return (end - start) * pct + start
def set_p(self, total_steps):
self.steps += 1
pct = min(1.0, self.steps / float(total_steps))
if self.policy == 'linear':
self.p = self._annealing_linear(self.p_start, self.p_end, pct)
elif self.policy == 'cosine':
self.p = self._annealing_cos(self.p_start, self.p_end, pct)
def apply(self, image):
# Sample k if range is provided
if isinstance(self.k, (tuple,list)):
k = np.random.uniform(self.k[0], self.k[1])
else:
k = self.k
# Sample D if range is provided
if isinstance(self.D, (tuple,list)):
D = np.random.uniform(self.D[0], self.D[1])
else:
D = self.D
if D <= 1:
D = D * np.min(image.shape[:2])
D = int(D)
dx = np.random.randint(D)
dy = np.random.randint(D)
dx = dy = 0
rm = int(D * (1 - (1 - np.sqrt(1 - k))))
_mode = np.random.choice(self.mode)
mask = | np.ones(image.shape[:2]) | numpy.ones |
# SAC training code reference
# https://github.com/vitchyr/rlkit/blob/master/rlkit/torch/sac/sac.py
import copy
from math import ceil
import numpy as np
import torch
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as F
from gym import spaces
from rl.dataset import ReplayBuffer, RandomSampler
from rl.base_agent import BaseAgent
from rl.planner_agent import PlannerAgent
from util.logger import logger
from util.mpi import mpi_average
from util.pytorch import (
optimizer_cuda,
count_parameters,
sync_networks,
sync_grads,
to_tensor,
)
from util.gym import action_size, observation_size
class SACAgent(BaseAgent):
def __init__(
self,
config,
ob_space,
ac_space,
actor,
critic,
non_limited_idx=None,
ref_joint_pos_indexes=None,
joint_space=None,
is_jnt_limited=None,
jnt_indices=None,
ac_scale=None,
):
super().__init__(config, ob_space)
self._ob_space = ob_space
self._ac_space = ac_space
self._jnt_indices = jnt_indices
self._ref_joint_pos_indexes = ref_joint_pos_indexes
self._ac_scale = ac_scale
self._log_alpha = torch.tensor(
np.log(config.alpha), requires_grad=True, device=config.device
)
self._alpha_optim = optim.Adam([self._log_alpha], lr=config.lr_actor)
self._joint_space = joint_space
self._is_jnt_limited = is_jnt_limited
if joint_space is not None:
self._jnt_minimum = joint_space["default"].low
self._jnt_maximum = joint_space["default"].high
if config.load_pretrained and config.bc_loss:
self.bc_mse_loss = nn.MSELoss()
else:
self.bc_mse_loss = None
# build up networks
self._build_actor(actor)
self._build_critic(critic)
self._network_cuda(config.device)
self._target_entropy = -action_size(self._actor._ac_space)
self._actor_optim = optim.Adam(self._actor.parameters(), lr=config.lr_actor)
self._critic1_optim = optim.Adam(
self._critic1.parameters(), lr=config.lr_critic
)
self._critic2_optim = optim.Adam(
self._critic2.parameters(), lr=config.lr_critic
)
sampler = RandomSampler()
buffer_keys = ["ob", "ac", "meta_ac", "done", "rew"]
if config.mopa or config.expand_ac_space:
buffer_keys.append("intra_steps")
self._buffer = ReplayBuffer(
buffer_keys, config.buffer_size, sampler.sample_func
)
self._log_creation()
self._planner = None
self._is_planner_initialized = False
if config.mopa:
self._planner = PlannerAgent(
config,
ac_space,
non_limited_idx,
planner_type=config.planner_type,
passive_joint_idx=config.passive_joint_idx,
ignored_contacts=config.ignored_contact_geom_ids,
is_simplified=config.is_simplified,
simplified_duration=config.simplified_duration,
range_=config.range,
)
self._simple_planner = PlannerAgent(
config,
ac_space,
non_limited_idx,
planner_type=config.simple_planner_type,
passive_joint_idx=config.passive_joint_idx,
ignored_contacts=config.ignored_contact_geom_ids,
goal_bias=1.0,
is_simplified=config.simple_planner_simplified,
simplified_duration=config.simple_planner_simplified_duration,
range_=config.simple_planner_range,
)
self._omega = config.omega
def _log_creation(self):
if self._config.is_chef:
logger.info("creating a sac agent")
logger.info("the actor has %d parameters", count_parameters(self._actor))
logger.info(
"the critic1 has %d parameters", count_parameters(self._critic1)
)
logger.info(
"the critic2 has %d parameters", count_parameters(self._critic2)
)
def _build_actor(self, actor):
self._actor = actor(
self._config,
self._ob_space,
self._ac_space,
self._config.tanh_policy,
self._ac_scale,
)
def _build_critic(self, critic):
config = self._config
self._critic1 = critic(config, self._ob_space, self._ac_space)
self._critic2 = critic(config, self._ob_space, self._ac_space)
# build up target networks
self._critic1_target = critic(config, self._ob_space, self._ac_space)
self._critic2_target = critic(config, self._ob_space, self._ac_space)
self._critic1_target.load_state_dict(self._critic1.state_dict())
self._critic2_target.load_state_dict(self._critic2.state_dict())
def store_episode(self, rollouts):
self._buffer.store_episode(rollouts)
def valid_action(self, ac):
return np.all(ac["default"] >= -1.0) and np.all(ac["default"] <= 1.0)
def is_planner_ac(self, ac):
if np.any(
ac["default"][: len(self._ref_joint_pos_indexes)] < -self._omega
) or np.any(ac["default"][: len(self._ref_joint_pos_indexes)] > self._omega):
return True
return False
def isValidState(self, state):
return self._planner.isValidState(state)
def convert2planner_displacement(self, ac, ac_scale):
ac_space_type = self._config.ac_space_type
action_range = self._config.action_range
if ac_space_type == "normal":
return ac * action_range
elif ac_space_type == "piecewise":
return np.where(
| np.abs(ac) | numpy.abs |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Sat Aug 1 12:01:29 2020
@author: oschoppe
"""
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Jun 30 09:23:48 2020
@author: oschoppe
"""
"""Dataset class template
This module provides a template for users to implement custom datasets.
You can specify '--dataset_mode template' to use this dataset.
The class name should be consistent with both the filename and its dataset_mode option.
The filename should be <dataset_mode>_dataset.py
The class name should be <Dataset_mode>Dataset.py
You need to implement the following functions:
-- <modify_commandline_options>: Add dataset-specific options and rewrite default values for existing options.
-- <__init__>: Initialize this dataset class.
-- <__getitem__>: Return a data point and its metadata information.
-- <__len__>: Return the number of images.
"""
from data.base_dataset import BaseDataset
import os
import pickle
import torch
import numpy as np
import random
import time
class MetganDataset(BaseDataset):
"""A template dataset class for you to implement custom datasets."""
@staticmethod
def modify_commandline_options(parser, is_train):
"""Add new dataset-specific options, and rewrite default values for existing options.
Parameters:
parser -- original option parser
is_train (bool) -- whether training phase or test phase. You can use this flag to add training-specific or test-specific options.
Returns:
the modified parser.
"""
parser.add_argument('--new_dataset_option', type=float, default=1.0, help='new dataset option')
parser.set_defaults(max_dataset_size=7000, new_dataset_option=2.0) # specify dataset-specific default values
return parser
def __init__(self, opt):
"""Initialize this dataset class.
Parameters:
opt (Option class) -- stores all the experiment flags; needs to be a subclass of BaseOptions
A few things can be done here.
- save the options (have been done in BaseDataset)
- get image paths and meta information of the dataset.
- define the image transformation.
"""
# save the option and dataset root
BaseDataset.__init__(self, opt)
# get the image paths of your dataset;
self.folder_cancer = './data/sample_data/cancer_ch'
self.folder_labels = './data/sample_data/labels'
self.folder_anato = './data/sample_data/anato_channel'
self.image_paths = os.listdir(self.folder_cancer)
#self.split = opt.dm_fold
volumes_test = np.load("./data/balanced_test_set_pids.npy")
samples_for_test = []
print(len(volumes_test))
for sample_test in volumes_test:
samples_for_test.append('data_patch_'+str(sample_test)+'_X_F15.pickledump')
samples_for_test.append('data_patch_'+str(sample_test)+'_Y_F15.pickledump')
samples_for_test.append('data_patch_'+str(sample_test)+'_Z_F15.pickledump')
self.folder_test_cancer = './data/sample_data/cancer_ch'
self.folder_test_labels = './data/sample_data/labels'
self.folder_test_anato = './data/sample_data/anato_channel'
image_paths_test = os.listdir(self.folder_test_cancer)
self.test_samples = [image_name for image_name in self.image_paths if image_name in samples_for_test]
self.train_samples = [image_name for image_name in self.image_paths if image_name not in self.test_samples]
self.phase = opt.phase
assert(self.opt.load_size >= self.opt.crop_size) # crop_size should be smaller than the size of loaded image
self.input_nc = self.opt.output_nc if self.opt.direction == 'BtoA' else self.opt.input_nc
self.output_nc = self.opt.input_nc if self.opt.direction == 'BtoA' else self.opt.output_nc
def __getitem__(self, index):
"""Return a data point and its metadata information.
Parameters:
index -- a random integer for data indexing
Returns:
a dictionary of data with their names. It usually contains the data itself and its metadata information.
Step 1: get a random image path: e.g., path = self.image_paths[index]
Step 2: load your data from the disk: e.g., image = Image.open(path).convert('RGB').
Step 3: convert your data to a PyTorch tensor. You can use helpder functions such as self.transform. e.g., data = self.transform(image)
Step 4: return a data point as a dictionary.
"""
if(self.phase =='train'):
sample_cancer = self.train_samples[index]
folder_cancer = self.folder_cancer
folder_anato= self.folder_anato
folder_label = self.folder_labels
else:
sample_cancer = self.test_samples[index]
folder_cancer = self.folder_test_cancer
folder_anato= self.folder_test_anato
folder_label = self.folder_test_labels
image_anato = pickle.load(open(os.path.join(folder_anato,sample_cancer) , "rb" ))['raw']
image_cancer = pickle.load(open(os.path.join(folder_cancer, sample_cancer) , "rb" ))['raw']
label_name = sample_cancer.replace("data", "label")
label_01 = pickle.load(open(os.path.join(folder_label,label_name) , "rb" ))
#normalize cancer image:
image_anato = 2*(image_anato - np.min(image_anato))/(np.max(image_anato)- np.min(image_anato)+1e-10) -1
image_cancer = 2*(image_cancer - np.min(image_cancer))/(np.max(image_cancer)- np.min(image_cancer)+1e-10)-1
#data is saved normalized between -1 and 1
if(self.phase != 'test'):
t = 1000*time.time() # current time in milliseconds
random.seed(int(t))
rotations = random.randrange(4)
if(rotations):
if(rotations ==1):
image_cancer = np.rot90(image_cancer)
image_anato = np.rot90(image_anato)
label_01 = np.rot90(label_01)
elif (rotations ==2):
image_cancer = np.flipud(image_cancer)
image_anato = | np.flipud(image_anato) | numpy.flipud |
# coding: utf-8
from __future__ import division, print_function
# Standard library
import time
# Third-party
import matplotlib.pyplot as plt
import numpy as np
from scipy.misc import derivative
from astropy.extern.six.moves import cPickle as pickle
import pytest
# Project
from ..io import load
from ..core import CompositePotential
from ....units import UnitSystem, DimensionlessUnitSystem
from ....dynamics import PhaseSpacePosition
from ....integrate import LeapfrogIntegrator
def partial_derivative(func, point, dim_ix=0, **kwargs):
xyz = np.array(point, copy=True)
def wraps(a):
xyz[dim_ix] = a
return func(xyz)
return derivative(wraps, point[dim_ix], **kwargs)
class PotentialTestBase(object):
name = None
potential = None # MUST SET THIS
tol = 1E-5
show_plots = False
@classmethod
def setup_class(cls):
if cls.name is None:
cls.name = cls.__name__[4:] # remove Test
print("Testing potential: {}".format(cls.name))
cls.w0 = np.array(cls.w0)
cls.ndim = cls.w0.size // 2
# TODO: need to test also quantity objects and phasespacepositions!
# these are arrays we will test the methods on:
w0_2d = np.repeat(cls.w0[:,None], axis=1, repeats=16)
w0_3d = np.repeat(w0_2d[...,None], axis=2, repeats=8)
w0_list = list(cls.w0)
w0_slice = w0_2d[:,:4]
cls.w0s = [cls.w0, w0_2d, w0_3d, w0_list, w0_slice]
cls._grad_return_shapes = [cls.w0[:cls.ndim].shape + (1,),
w0_2d[:cls.ndim].shape,
w0_3d[:cls.ndim].shape,
cls.w0[:cls.ndim].shape + (1,),
w0_slice[:cls.ndim].shape]
cls._hess_return_shapes = [(cls.ndim,) + cls.w0[:cls.ndim].shape + (1,),
(cls.ndim,) + w0_2d[:cls.ndim].shape,
(cls.ndim,) + w0_3d[:cls.ndim].shape,
(cls.ndim,) + cls.w0[:cls.ndim].shape + (1,),
(cls.ndim,) + w0_slice[:cls.ndim].shape]
cls._valu_return_shapes = [x[1:] for x in cls._grad_return_shapes]
def test_unitsystem(self):
assert isinstance(self.potential.units, UnitSystem)
def test_energy(self):
assert self.ndim == self.potential.ndim
for arr,shp in zip(self.w0s, self._valu_return_shapes):
v = self.potential.energy(arr[:self.ndim])
assert v.shape == shp
g = self.potential.energy(arr[:self.ndim], t=0.1)
g = self.potential.energy(arr[:self.ndim], t=0.1*self.potential.units['time'])
t = np.zeros(np.array(arr).shape[1:]) + 0.1
g = self.potential.energy(arr[:self.ndim], t=t)
g = self.potential.energy(arr[:self.ndim], t=t*self.potential.units['time'])
def test_gradient(self):
for arr,shp in zip(self.w0s, self._grad_return_shapes):
g = self.potential.gradient(arr[:self.ndim])
assert g.shape == shp
g = self.potential.gradient(arr[:self.ndim], t=0.1)
g = self.potential.gradient(arr[:self.ndim], t=0.1*self.potential.units['time'])
t = np.zeros(np.array(arr).shape[1:]) + 0.1
g = self.potential.gradient(arr[:self.ndim], t=t)
g = self.potential.gradient(arr[:self.ndim], t=t*self.potential.units['time'])
def test_hessian(self):
for arr,shp in zip(self.w0s, self._hess_return_shapes):
g = self.potential.hessian(arr[:self.ndim])
assert g.shape == shp
g = self.potential.hessian(arr[:self.ndim], t=0.1)
g = self.potential.hessian(arr[:self.ndim], t=0.1*self.potential.units['time'])
t = np.zeros(np.array(arr).shape[1:]) + 0.1
g = self.potential.hessian(arr[:self.ndim], t=t)
g = self.potential.hessian(arr[:self.ndim], t=t*self.potential.units['time'])
def test_mass_enclosed(self):
for arr,shp in zip(self.w0s, self._valu_return_shapes):
g = self.potential.mass_enclosed(arr[:self.ndim])
assert g.shape == shp
assert np.all(g > 0.)
g = self.potential.mass_enclosed(arr[:self.ndim], t=0.1)
g = self.potential.mass_enclosed(arr[:self.ndim], t=0.1*self.potential.units['time'])
t = np.zeros(np.array(arr).shape[1:]) + 0.1
g = self.potential.mass_enclosed(arr[:self.ndim], t=t)
g = self.potential.mass_enclosed(arr[:self.ndim], t=t*self.potential.units['time'])
def test_circular_velocity(self):
for arr,shp in zip(self.w0s, self._valu_return_shapes):
g = self.potential.circular_velocity(arr[:self.ndim])
assert g.shape == shp
assert np.all(g > 0.)
g = self.potential.circular_velocity(arr[:self.ndim], t=0.1)
g = self.potential.circular_velocity(arr[:self.ndim], t=0.1*self.potential.units['time'])
t = np.zeros(np.array(arr).shape[1:]) + 0.1
g = self.potential.circular_velocity(arr[:self.ndim], t=t)
g = self.potential.circular_velocity(arr[:self.ndim], t=t*self.potential.units['time'])
def test_repr(self):
pot_repr = repr(self.potential)
if isinstance(self.potential.units, DimensionlessUnitSystem):
assert "dimensionless" in pot_repr
else:
assert str(self.potential.units['length']) in pot_repr
assert str(self.potential.units['time']) in pot_repr
assert str(self.potential.units['mass']) in pot_repr
for k in self.potential.parameters.keys():
assert "{}=".format(k) in pot_repr
def test_compare(self):
# skip if composite potentials
if len(self.potential.parameters) == 0:
return
other = self.potential.__class__(units=self.potential.units,
**self.potential.parameters)
assert other == self.potential
pars = self.potential.parameters.copy()
for k in pars.keys():
if k != 0:
pars[k] = 1.1*pars[k]
other = self.potential.__class__(units=self.potential.units, **pars)
assert other != self.potential
# check that comparing to non-potentials works
assert not self.potential == "sup"
assert not self.potential == None
def test_plot(self):
p = self.potential
if self.show_plots:
f = p.plot_contours(grid=(np.linspace(-10., 10., 100), 0., 0.),
labels=["X"])
# f.suptitle("slice off from 0., won't have cusp")
# f.savefig(os.path.join(plot_path, "contour_x.png"))
f = p.plot_contours(grid=(np.linspace(-10., 10., 100),
np.linspace(-10., 10., 100),
0.),
cmap='Blues')
# f.savefig(os.path.join(plot_path, "contour_xy.png"))
f = p.plot_contours(grid=(np.linspace(-10., 10., 100),
1.,
np.linspace(-10., 10., 100)),
cmap='Blues', labels=["X", "Z"])
# f.savefig(os.path.join(plot_path, "contour_xz.png"))
plt.show()
plt.close('all')
def test_save_load(self, tmpdir):
"""
Test writing to a YAML file, and reading back in
"""
fn = str(tmpdir.join("{}.yml".format(self.name)))
self.potential.save(fn)
p = load(fn)
p.energy(self.w0[:self.w0.size//2])
p.gradient(self.w0[:self.w0.size//2])
def test_numerical_gradient_vs_gradient(self):
"""
Check that the value of the implemented gradient function is close to a
numerically estimated value. This is to check the coded-up version.
"""
dx = 1E-3 * np.sqrt(np.sum(self.w0[:self.w0.size//2]**2))
max_x = np.sqrt(np.sum([x**2 for x in self.w0[:self.w0.size//2]]))
grid = np.linspace(-max_x,max_x,8)
grid = grid[grid != 0.]
grids = [grid for i in range(self.w0.size//2)]
xyz = np.ascontiguousarray(np.vstack(map(np.ravel, np.meshgrid(*grids))).T)
def energy_wrap(xyz):
xyz = np.ascontiguousarray(xyz[None])
return self.potential._energy(xyz, t=np.array([0.]))[0]
num_grad = | np.zeros_like(xyz) | numpy.zeros_like |
import numpy as np
from gym import utils
from gym.envs.mujoco import mujoco_env
import os
class StrikerEnv(mujoco_env.MujocoEnv, utils.EzPickle):
def __init__(self):
self.ball = | np.array([0.5, -0.3]) | numpy.array |
import numpy as np
def rotX(theta):
return np.array([[1, 0, 0]
, [0, np.cos(theta), -np.sin(theta)]
, [0, np.sin(theta), np.cos(theta)]])
def rotY(theta):
return np.array([[np.cos(theta), 0, np.sin(theta)]
, [0, 1, 0]
, [-np.sin(theta), 0, np.cos(theta)]])
def rotZ(theta):
return np.array([[np.cos(theta), -np.sin(theta), 0]
, [np.sin(theta), np.cos(theta), 0]
, [0, 0, 1]])
def euler_matrix(x, y, z):
return rotX(x).dot(rotY(y)).dot(rotZ(z))
def vector_slerp(v1, v2, fraction):
perp_v = np.cross(v1, v2)
# perp_v /= np.linalg.norm(perp_v)
angle = np.arccos(np.dot(v1,v2)/(np.linalg.norm(v1)*np.linalg.norm(v2))) * fraction
return rotation_matrix(angle, perp_v).dot(v1)
def unit_vector(v):
return v/ | np.linalg.norm(v) | numpy.linalg.norm |
import numpy as np
import matplotlib.pyplot as plt
import math
import scipy.optimize
import frontend.stock_analytics as salib
import numba as nb
from numba import jit
@jit(nb.types.UniTuple(nb.float64[:],2)(nb.float64,nb.float64,nb.int32,nb.float64), nopython=True, nogil=True, cache=True)
def generate_series_parameters(g_omega, g_beta, K=15, b=5.):
k = np.arange(0,K,1)
omegak = g_omega/(b**k)
a = omegak**g_beta
a /= np.sum(a)
return omegak, a
def c_exp_series_wrap(tau, g, g_omega, g_beta, K=15, b=5.):
omegak, a = generate_series_parameters(g_omega, g_beta, K, b)
return c_exp_series(tau, g, omegak, a)
@jit(nb.float64[:](nb.float64[:], nb.float64, nb.float64[:],nb.float64[:]), nopython=True, nogil=True, cache=True)
def c_exp_series(tau, c, omegak, a):
return c*np.sum(np.multiply ( np.multiply ( np.exp(-np.outer(omegak ,tau)) .T, omegak), a ), axis=1)
def dobins(ts_array, N = 1000, x_bins=None, useinteger=False, stepsize=None, ignoreabove=False):
ts_array.sort()
if x_bins is None:
if useinteger:
minp = math.floor(ts_array[0])
maxp = math.ceil(ts_array[-1])
steps = stepsize if stepsize is not None else np.ceil((maxp-minp)/N)
x_bins = np.arange(minp, maxp+2, steps)
else:
if stepsize is None:
stepsize = (ts_array[-1]-ts_array[0])/N
x_bins = np.arange(ts_array[0], ts_array[-1]+2*stepsize, stepsize)
stepsize = x_bins[1]-x_bins[0]
N = len(x_bins)-1
dt = x_bins[1]-x_bins[0]
y_bins = np.zeros(len(x_bins))
unique, counts = np.unique(np.floor((ts_array-x_bins[0])/dt), return_counts=True)
if ignoreabove:
for a,b, in zip(unique.astype(int), counts):
if a < len(y_bins):
y_bins[a] = b
else:
y_bins[unique.astype(int)] = counts#[:-1]
while not ignoreabove and x_bins[-1] >= ts_array[-1]:
x_bins = x_bins[:-1]
y_bins = y_bins[:-1]
x_bins += stepsize/2.
E = y_bins.mean()
V = y_bins.var()
return x_bins, y_bins, V/E
def print_stats(ats_array, tau = np.logspace(-1,3,20), N=1000, splitpoint=None,stepsize_hist=2.):
if len(ats_array) > 20:
ats_array = [ats_array]
plt.rcParams['figure.figsize'] = (15, 15)
grid = plt.GridSpec(3, 3, wspace=0.4, hspace=0.3)
for kts_array in ats_array:
if type(kts_array) is tuple:
ts_array = kts_array[1]
label = kts_array[0]
else:
ts_array = kts_array
plt.subplot(grid[0, 0:2])
x_bins, y_bins, _ = dobins(ts_array, N = N)
plt.plot(x_bins, y_bins, label=label)
plt.legend()
plt.subplot(grid[0, 2])
if splitpoint is not None:
y_bins1 = y_bins[:int(splitpoint*len(y_bins))]
y_bins2 = y_bins[int(splitpoint*len(y_bins)):]
a_bins1, b_bins1, _ = dobins(y_bins1, useinteger=True, N = 25)
a_bins2, b_bins2, _ = dobins(y_bins2, useinteger=True, N = 25)
plt.plot(b_bins1, a_bins1, label=label)
plt.plot(b_bins2, a_bins2, label=label)
print('(1) V =',y_bins1.var(),'; E =',y_bins1.mean(),'; V/E =', y_bins1.var()/y_bins1.mean())
print('(2) V =',y_bins2.var(),'; E =',y_bins2.mean(),'; V/E =', y_bins2.var()/y_bins2.mean())
a_bins, b_bins, _ = dobins(y_bins, useinteger=True, stepsize=stepsize_hist)
plt.plot(b_bins, a_bins, label=label)
print('V =',y_bins.var(),'; E =',y_bins.mean(),'; V/E =', y_bins.var()/y_bins.mean())
plt.subplot(grid[1, :])
r = calc_r_tau(ts_array, tau)
f = lambda tau,beta,A: A/(tau**beta)
fitted = scipy.optimize.curve_fit(f, tau,np.sqrt(1/r))
plt.loglog(tau,np.sqrt(1/r) , label=label)
plt.loglog(tau,f(tau, fitted[0][0], fitted[0][1]), label=label+' fitted' )
plt.legend()
plt.subplot(grid[2, :])
plt.plot(tau,r , label=label)
plt.legend()
plt.show()
plt.rcParams['figure.figsize'] = (15, 5)
def calc_r_tau(ts_array, tau):
r = np.zeros(len(tau))
for i in range(0,len(tau)):
_,_,rr = dobins(ts_array, stepsize=tau[i])
r[i] = rr
return r
g_cache_dict = {}
@jit(nb.float64(nb.float64, nb.float64[:], nb.int64, nb.float64[:],nb.float64[:]), nopython=True, nogil=True, cache=True)
def c_exp_series_sum(t, tau, uptoi, omegak, a):
return np.sum(np.multiply ( np.multiply ( np.exp(-np.outer(omegak ,t-tau[:uptoi])) .T, omegak), a ))
@jit(nb.float64[:](nb.float64, nb.types.UniTuple(nb.float64,3),nb.int64,nb.float64[:,:],nb.int64,nb.boolean,nb.boolean), nopython=True, nogil=True, cache=True)
def simulate_by_thinning_nocache(phi_dash, g_params, K, news_params, N = 250000, reseed=True, status_update=True):
# Initialize parameters
g, g_omega, g_beta = g_params
phi_0 = phi_dash * (1-g)
omegak, a = generate_series_parameters(g_omega, g_beta, K, b=5.)
if reseed:
np.random.seed(124)
#salib.tic()
i = randi1i = randi2i = 0
t = 0.
randpool1 = - np.log(np.random.rand(100*N))
randpool2 = | np.random.rand(100*N) | numpy.random.rand |
"""Data utils functions for pre-processing and data loading."""
import os
import pickle as pkl
import sys
import networkx as nx
import numpy as np
import scipy.sparse as sp
import torch
from scipy import sparse
import logging
def load_data(args, datapath):
## Load data
data = load_data_lp(args.dataset, args.use_feats, datapath)
adj = data['adj_train']
## TAKES a lot of time
if args.node_cluster == 1:
task = 'nc'
else:
task = 'lp'
cached_dir = os.path.join('/root/tmp', task, args.dataset,
f"seed{args.split_seed}-val{args.val_prop}-test{args.test_prop}")
if not os.path.isdir(cached_dir):
logging.info(f"Caching at `{cached_dir}`randomly masked edges")
os.makedirs(cached_dir, exist_ok=True)
adj_train, train_edges, train_edges_false, val_edges, val_edges_false, test_edges, test_edges_false = mask_edges(
adj, args.val_prop, args.test_prop, args.split_seed
)
if args.val_prop + args.test_prop > 0:
torch.save(val_edges, os.path.join(cached_dir, 'val_edges.pth'))
torch.save(val_edges_false, os.path.join(cached_dir, 'val_edges_false.pth'))
torch.save(test_edges, os.path.join(cached_dir, 'test_edges.pth'))
torch.save(test_edges_false, os.path.join(cached_dir, 'test_edges_false.pth'))
torch.save(train_edges, os.path.join(cached_dir, 'train_edges.pth'))
torch.save(train_edges_false, os.path.join(cached_dir, 'train_edges_false.pth'))
sparse.save_npz(os.path.join(cached_dir, "adj_train.npz"), adj_train)
st0 = np.random.get_state()
np.save(os.path.join(cached_dir, 'np_state.npy'), st0)
else:
logging.info(f"Loading from `{cached_dir}` randomly masked edges")
if args.val_prop + args.test_prop > 0:
val_edges = torch.load(os.path.join(cached_dir, 'val_edges.pth'))
val_edges_false = torch.load(os.path.join(cached_dir, 'val_edges_false.pth'))
test_edges = torch.load(os.path.join(cached_dir, 'test_edges.pth'))
test_edges_false = torch.load(os.path.join(cached_dir, 'test_edges_false.pth'))
adj_train = sparse.load_npz(os.path.join(cached_dir, "adj_train.npz"))
train_edges = torch.load(os.path.join(cached_dir, 'train_edges.pth'))
train_edges_false = torch.load(os.path.join(cached_dir, 'train_edges_false.pth'))
st0 = np.load(os.path.join(cached_dir, 'np_state.npy'))
np.random.set_state(st0)
## TAKES a lot of time
data['adj_train'] = adj_train
data['train_edges'], data['train_edges_false'] = train_edges, train_edges_false
if args.val_prop + args.test_prop > 0:
data['val_edges'], data['val_edges_false'] = val_edges, val_edges_false
data['test_edges'], data['test_edges_false'] = test_edges, test_edges_false
all_info=""
## Adj matrix
adj = data['adj_train']
data['adj_train_enc'], data['features'] = process(
data['adj_train'], data['features'], args.normalize_adj, args.normalize_feats
)
if args.lambda_rec:
data['adj_train_dec'] = rowwise_normalizing(data['adj_train'])
adj_2hop = get_adj_2hop(adj)
data['adj_train_enc_2hop'] = symmetric_laplacian_smoothing(adj_2hop)
# NOTE : Re-adjust labels
# Some data omit `0` class, thus n_classes are wrong with `max(labels)+1`
label_set = set(list(data['labels'].numpy()))
label_convert_table = {list(label_set)[i]:i for i in range(len(label_set))}
for label_prev, label_now in label_convert_table.items():
data['labels'][data['labels']==label_prev] = label_now
args.n_classes = int(data['labels'].max() + 1)
data['idx_all'] = range(data['features'].shape[0])
data_info = "Dataset {} Loaded : dimensions are adj:{}, edges:{}, features:{}, labels:{}\n".format(
args.dataset, data['adj_train'].shape, data['adj_train'].sum(), data['features'].shape, data['labels'].shape)
data['info'] = data_info
return data
def process(adj, features, normalize_adj, normalize_feats):
if sp.isspmatrix(features):
features = np.array(features.todense())
if normalize_feats:
features = normalize(features)
features = torch.Tensor(features)
if normalize_adj:
adj = normalize(adj + sp.eye(adj.shape[0]))
return adj, features
def get_adj_2hop(adj):
adj_self = adj + sp.eye(adj.shape[0])
adj_2hop = adj_self.dot(adj_self)
adj_2hop.data = np.clip(adj_2hop.data, 0, 1)
adj_2hop = adj_2hop - sp.eye(adj.shape[0]) - adj
return adj_2hop
def normalize(mx):
"""Row-normalize sparse matrix."""
rowsum = np.array(mx.sum(1))
r_inv = np.power(rowsum, -1).flatten()
r_inv[np.isinf(r_inv)] = 0.
r_mat_inv = sp.diags(r_inv)
mx = r_mat_inv.dot(mx)
return mx
def symmetric_laplacian_smoothing(adj):
"""Symmetrically normalize adjacency matrix."""
adj = adj + sp.eye(adj.shape[0]) # self-loop
adj = sp.coo_matrix(adj)
rowsum = np.array(adj.sum(1))
d_inv_sqrt = np.power(rowsum, -0.5).flatten()
d_inv_sqrt[np.isinf(d_inv_sqrt)] = 0.
d_mat_inv_sqrt = sp.diags(d_inv_sqrt)
return adj.dot(d_mat_inv_sqrt).transpose().dot(d_mat_inv_sqrt).tocoo()
def rowwise_normalizing(adj):
"""Row-wise normalize adjacency matrix."""
adj = adj + sp.eye(adj.shape[0]) # self-loop
adj = sp.coo_matrix(adj)
rowsum = np.array(adj.sum(1))
d_inv = np.power(rowsum, -1.0).flatten()
d_inv[np.isinf(d_inv)] = 0.
d_mat_inv = sp.diags(d_inv)
return adj.dot(d_mat_inv).transpose().tocoo()
def sparse_mx_to_torch_sparse_tensor(sparse_mx):
"""Convert a scipy sparse matrix to a torch sparse tensor."""
sparse_mx = sparse_mx.tocoo()
indices = torch.from_numpy(
np.vstack((sparse_mx.row, sparse_mx.col)).astype(np.int64)
)
values = torch.Tensor(sparse_mx.data)
shape = torch.Size(sparse_mx.shape)
return torch.sparse.FloatTensor(indices, values, shape)
def mask_edges(adj, val_prop, test_prop, seed):
np.random.seed(seed) # get tp edges
x, y = sp.triu(adj).nonzero()
pos_edges = np.array(list(zip(x, y)))
np.random.shuffle(pos_edges)
# get tn edges
x, y = sp.triu(sp.csr_matrix(1. - adj.toarray())).nonzero() # LONG
neg_edges = np.array(list(zip(x, y))) # EVEN LONGER
| np.random.shuffle(neg_edges) | numpy.random.shuffle |
import numpy as np
import os
import re
import requests
import sys
import time
from netCDF4 import Dataset
import pandas as pd
from bs4 import BeautifulSoup
from tqdm import tqdm
# setup constants used to access the data from the different M2M interfaces
BASE_URL = 'https://ooinet.oceanobservatories.org/api/m2m/' # base M2M URL
SENSOR_URL = '12576/sensor/inv/' # Sensor Information
# setup access credentials
AUTH = ['OOIAPI-853A3LA6QI3L62', '<KEY>']
def M2M_Call(uframe_dataset_name, start_date, end_date):
options = '?beginDT=' + start_date + '&endDT=' + end_date + '&format=application/netcdf'
r = requests.get(BASE_URL + SENSOR_URL + uframe_dataset_name + options, auth=(AUTH[0], AUTH[1]))
if r.status_code == requests.codes.ok:
data = r.json()
else:
return None
# wait until the request is completed
print('Waiting for OOINet to process and prepare data request, this may take up to 20 minutes')
url = [url for url in data['allURLs'] if re.match(r'.*async_results.*', url)][0]
check_complete = url + '/status.txt'
with tqdm(total=400, desc='Waiting') as bar:
for i in range(400):
r = requests.get(check_complete)
bar.update(1)
if r.status_code == requests.codes.ok:
bar.n = 400
bar.last_print_n = 400
bar.refresh()
print('\nrequest completed in %f minutes.' % elapsed)
break
else:
time.sleep(3)
elapsed = (i * 3) / 60
return data
def M2M_Files(data, tag=''):
"""
Use a regex tag combined with the results of the M2M data request to collect the data from the THREDDS catalog.
Collected data is gathered into an xarray dataset for further processing.
:param data: JSON object returned from M2M data request with details on where the data is to be found for download
:param tag: regex tag to use in discriminating the data files, so we only collect the correct ones
:return: the collected data as an xarray dataset
"""
# Create a list of the files from the request above using a simple regex as a tag to discriminate the files
url = [url for url in data['allURLs'] if re.match(r'.*thredds.*', url)][0]
files = list_files(url, tag)
return files
def list_files(url, tag=''):
"""
Function to create a list of the NetCDF data files in the THREDDS catalog created by a request to the M2M system.
:param url: URL to user's THREDDS catalog specific to a data request
:param tag: regex pattern used to distinguish files of interest
:return: list of files in the catalog with the URL path set relative to the catalog
"""
page = requests.get(url).text
soup = BeautifulSoup(page, 'html.parser')
pattern = re.compile(tag)
return [node.get('href') for node in soup.find_all('a', text=pattern)]
def M2M_Data(nclist,variables):
thredds = 'https://opendap.oceanobservatories.org/thredds/dodsC/ooi/'
#nclist is going to contain more than one url eventually
for jj in range(len(nclist)):
url=nclist[jj]
url=url[25:]
dap_url = thredds + url + '#fillmismatch'
openFile = Dataset(dap_url,'r')
for ii in range(len(variables)):
dum = openFile.variables[variables[ii].name]
variables[ii].data = np.append(variables[ii].data, dum[:].data)
tmp = variables[0].data/60/60/24
time_converted = pd.to_datetime(tmp, unit='D', origin=pd.Timestamp('1900-01-01'))
return variables, time_converted
class var(object):
def __init__(self):
"""A Class that generically holds data with a variable name
and the units as attributes"""
self.name = ''
self.data = np.array([])
self.units = ''
def __repr__(self):
return_str = "name: " + self.name + '\n'
return_str += "units: " + self.units + '\n'
return_str += "data: size: " + str(self.data.shape)
return return_str
class structtype(object):
def __init__(self):
""" A class that imitates a Matlab structure type
"""
self._data = []
def __getitem__(self, index):
"""implement index behavior in the struct"""
if index == len(self._data):
self._data.append(var())
return self._data[index]
def __len__(self):
return len(self._data)
def M2M_URLs(platform_name,node,instrument_class,method):
var_list = structtype()
#MOPAK
if platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/SBD17/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD11/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD11/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/SBD17/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD11/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD11/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSPM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSPM/SBS01/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#METBK
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD11/06-METBKA000/telemetered/metbk_a_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD11/06-METBKA000/telemetered/metbk_a_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD11/06-METBKA000/telemetered/metbk_a_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD11/06-METBKA000/telemetered/metbk_a_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
#FLORT
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/02-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/SBD17/06-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/02-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/SBD17/06-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID27/02-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID27/02-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID27/02-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID27/02-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSPM/WFP01/04-FLORTK000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
#FDCHP
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'FDCHP' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD12/08-FDCHPA000/telemetered/fdchp_a_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#DOSTA
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/03-DOSTAD000/telemetered/dosta_abcdjm_ctdbp_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID27/04-DOSTAD000/telemetered/dosta_abcdjm_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID27/04-DOSTAD000/telemetered/dosta_abcdjm_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/03-DOSTAD000/telemetered/dosta_abcdjm_ctdbp_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID27/04-DOSTAD000/telemetered/dosta_abcdjm_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID27/04-DOSTAD000/telemetered/dosta_abcdjm_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD37/03-DOSTAD000/telemetered/dosta_abcdjm_ctdbp_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD37/03-DOSTAD000/telemetered/dosta_abcdjm_ctdbp_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD37/03-DOSTAD000/telemetered/dosta_abcdjm_ctdbp_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD37/03-DOSTAD000/telemetered/dosta_abcdjm_ctdbp_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSPM/WFP01/02-DOFSTK000/telemetered/dofst_k_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dofst_k_oxygen_l2'
var_list[2].name = 'dofst_k_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'Hz'
var_list[3].units = 'dbar'
#ADCP
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID26/01-ADCPTA000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID26/01-ADCPTC000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID26/01-ADCPTA000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID26/01-ADCPTC000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD35/04-ADCPTM000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD35/04-ADCPTM000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD35/04-ADCPTC000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD35/04-ADCPSJ000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
#ZPLSC
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD37/07-ZPLSCC000/telemetered/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD37/07-ZPLSCC000/telemetered/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD37/07-ZPLSCC000/telemetered/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD37/07-ZPLSCC000/telemetered/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/MFD37/07-ZPLSCC000/recovered_host/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/MFD37/07-ZPLSCC000/recovered_host/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/MFD37/07-ZPLSCC000/recovered_host/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/MFD37/07-ZPLSCC000/recovered_host/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#WAVSS
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_statistics'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_statistics'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_statistics'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_statistics'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
#VELPT
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/SBD17/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD11/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD11/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/SBD17/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD11/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD11/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID26/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID26/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID26/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID26/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
#PCO2W
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'PCO2W' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/05-PCO2WB000/telemetered/pco2w_abc_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD35/05-PCO2WB000/telemetered/pco2w_abc_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'PCO2W' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/05-PCO2WB000/telemetered/pco2w_abc_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD35/05-PCO2WB000/telemetered/pco2w_abc_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD35/05-PCO2WB000/telemetered/pco2w_abc_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD35/05-PCO2WB000/telemetered/pco2w_abc_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
#PHSEN
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID26/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID26/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID26/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID26/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD35/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD35/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD35/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD35/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
#SPKIR
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/08-SPKIRB000/telemetered/spkir_abj_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID26/08-SPKIRB000/telemetered/spkir_abj_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID26/08-SPKIRB000/telemetered/spkir_abj_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/08-SPKIRB000/telemetered/spkir_abj_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID26/08-SPKIRB000/telemetered/spkir_abj_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID26/08-SPKIRB000/telemetered/spkir_abj_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
#PRESF
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD35/02-PRESFA000/telemetered/presf_abc_dcl_tide_measurement'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD35/02-PRESFA000/telemetered/presf_abc_dcl_tide_measurement'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD35/02-PRESFB000/telemetered/presf_abc_dcl_tide_measurement'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD35/02-PRESFC000/telemetered/presf_abc_dcl_tide_measurement'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
#CTDBP
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD37/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/SBD17/06-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD37/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/SBD17/06-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID27/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID27/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID27/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID27/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD37/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD37/03-CTDBPE000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
#VEL3D
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD35/01-VEL3DD000/telemetered/vel3d_cd_dcl_velocity_data'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD35/01-VEL3DD000/telemetered/vel3d_cd_dcl_velocity_data'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD35/01-VEL3DD000/telemetered/vel3d_cd_dcl_velocity_data'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD35/01-VEL3DD000/telemetered/vel3d_cd_dcl_velocity_data'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
#VEL3DK
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'VEL3D' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSPM/WFP01/01-VEL3DK000/telemetered/vel3d_k_wfp_stc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_k_eastward_velocity'
var_list[2].name = 'vel3d_k_northward_velocity'
var_list[3].name = 'vel3d_k_upward_velocity'
var_list[4].name = 'vel3d_k_heading'
var_list[5].name = 'vel3d_k_pitch'
var_list[6].name = 'vel3d_k_roll'
var_list[7].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'ddegrees'
var_list[5].units = 'ddegrees'
var_list[6].units = 'ddegrees'
var_list[7].units = 'dbar'
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSPM/WFP01/03-CTDPFK000/telemetered/ctdpf_ckl_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'ctdpf_ckl_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdpf_ckl_seawater_pressure'
var_list[5].name = 'ctdpf_ckl_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
#PCO2A
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD12/04-PCO2AA000/telemetered/pco2a_a_dcl_instrument_water'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD12/04-PCO2AA000/telemetered/pco2a_a_dcl_instrument_water'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD12/04-PCO2AA000/telemetered/pco2a_a_dcl_instrument_water'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD12/04-PCO2AA000/telemetered/pco2a_a_dcl_instrument_water'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
#PARAD
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSPM/WFP01/05-PARADK000/telemetered/parad_k__stc_imodem_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_k_par'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
#OPTAA
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID27/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID27/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID27/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID27/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD37/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD37/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD37/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD37/01-OPTAAC000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#NUTNR
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/07-NUTNRB000/telemetered/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID26/07-NUTNRB000/telemetered/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID26/07-NUTNRB000/telemetered/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/07-NUTNRB000/telemetered/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID26/07-NUTNRB000/telemetered/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID26/07-NUTNRB000/telemetered/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
##
#MOPAK
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/SBD17/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD11/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD11/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/SBD17/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD11/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD11/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSPM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSPM/SBS01/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#METBK
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD11/06-METBKA000/recovered_host/metbk_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD11/06-METBKA000/recovered_host/metbk_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = | np.array([]) | numpy.array |
#!/usr/bin/env python3
"""
Created on Tue Jun 16 15:19:24 2020
@author: Barney
"""
import os
import geopandas as gpd
import pandas as pd
from numpy import linspace, zeros, diff, where, NaN
from shapely.geometry import Point, LineString
from shutil import copyfile
import sys
from glob import glob
def aggregate(root: str,
partition_root: str,
partition: str,
catchment: str = "cranbrook",
dt_sim = 5,
demo = False):
""" Aggregates partitions into compartments.
Paremeters: catchment can be "cranbrook" or "norwich"
Returns a file
"""
E = 2.71828
DT_ARC = pd.Timedelta('1s') #i.e. assumes arc flows are given in average m3/s over timestep
DT_RAW = pd.Timedelta('1m') #Timestep of the infoworks files (both shp and results)
DT_SIM = pd.Timedelta(value = dt_sim, unit = 'm') #Needs to match the same DT_SIM in orchestration
M3_S_TO_M3_DT = DT_SIM / pd.Timedelta('1s')
S_TO_DT = 1 / DT_SIM.total_seconds() #Convert seconds to timestep
SURFACE_TRAVEL_TIME = 5 * 60 # seconds
TIMEAREA_RESOLUTION = 1000 #Resolution (s) at which timearea diagrams of individual nodes are superimposed at
CRS = "EPSG:27700"
# catchment = "cranbrook" # can be 'cranbrook' or 'norwich'
driver = 'GeoJSON'
extension = '.geojson'
sim_start = '2000-01-01' #Misc start date that detailed simulation results are assumed to start at if actual date isn't given
sim_rains = ["january","august"]
"""Load data
"""
data_root = os.path.join(root, catchment,"processed")
raw_root = os.path.join(root, catchment,"raw")
if not demo:
edges_fid = os.path.join(data_root,"edges_clean.geojson")
else:
edges_fid = os.path.join(data_root,"edges_gdf_scrambled.geojson")
edges_gdf = gpd.read_file(edges_fid)
edges_gdf.capacity = edges_gdf.capacity.astype(float)
nodes_fid = os.path.join(partition_root,"partitions.geojson")
nodes_gdf = gpd.read_file(nodes_fid).rename(columns={'area' : 'surf_area'})
info_dir = "info_sims"
info_dir = os.path.join(raw_root,info_dir)
if not demo:
#InfoWorks node simulation for initial storage only
rain = "january"
info_volume_fid = os.path.join(info_dir,rain, "volume.csv")
info_volume_df = pd.read_csv(info_volume_fid)
del nodes_fid, edges_fid
edges_gdf['travel_time'] = 0
ind = edges_gdf.capacity > 0
edges_gdf.loc[ind,'travel_time'] = edges_gdf.loc[ind,'length'] * edges_gdf.loc[ind, 'cross_sec'] / edges_gdf.loc[ind,'capacity'] #s
edges_gdf.capacity *= M3_S_TO_M3_DT
"""Initial storage
"""
if not demo:
nodes_gdf = nodes_gdf.set_index('node_id').join(info_volume_df.iloc[0]).rename(columns={info_volume_df.iloc[0].name : 'initial_storage'}).reset_index()
nodes_gdf.initial_storage = nodes_gdf.initial_storage.fillna(0)
else:
nodes_gdf['initial_storage'] = 0
# nodes_gdf.storage = nodes_gdf.storage - nodes_gdf.initial_storage
# nodes_gdf = nodes_gdf.drop('initial_storage', axis=1)
"""Classify edges within compartments
"""
edges_gdf['storage'] = edges_gdf['length'] * edges_gdf['cross_sec']
for partition_type in [partition]:
partition_folder = os.path.join(partition_root, partition_type)
if not os.path.exists(partition_folder):
os.mkdir(partition_folder)
partition_folder = os.path.join(partition_folder, "_".join(["sim","dt",str(int(DT_SIM.total_seconds())),"s"]))
if not os.path.exists(partition_folder):
os.mkdir(partition_folder)
edges_partition = pd.merge(edges_gdf, nodes_gdf[['node_id',partition_type]],left_on='us_node_id',right_on='node_id')
edges_partition = edges_partition.drop('node_id',axis=1).rename(columns={partition_type : 'start_comp'})
edges_partition = pd.merge(edges_partition, nodes_gdf[['node_id',partition_type]],left_on='ds_node_id',right_on='node_id')
edges_partition = edges_partition.drop('node_id',axis=1).rename(columns={partition_type : 'end_comp'})
ind = edges_partition.end_comp == edges_partition.start_comp
edges_partition.loc[ind, 'compart_id'] = edges_partition.loc[ind, 'start_comp']
edges_between = edges_partition.loc[~ind]
"""Aggregate nodes to compartment
"""
def f(x):
d = {}
d['node_stor'] = x['storage'].sum()
d['initial_storage'] = x['initial_storage'].sum()
d['surf_area'] = x['surf_area'].sum()
d['run_coef'] = (x['surf_area'] * x['run_coef'] / d['surf_area']).sum()
d['geometry'] = Point((x.geometry.x).mean(),(x.geometry.y).mean())
d['chamber_ar'] = x['chamber_ar'].sum()
return pd.Series(d)
compartment_nodes = nodes_gdf.groupby(partition_type).apply(f).reset_index().rename(columns={partition_type : 'compart_id'})
#Add in pipe storage
pipe_stor = edges_partition.groupby('compart_id').sum().storage.reset_index()
compartment_nodes = pd.merge(compartment_nodes, pipe_stor.rename(columns={'storage':'pipe_stor'}), on = 'compart_id', how="outer")
compartment_nodes.pipe_stor = compartment_nodes.pipe_stor.fillna(0)
compartment_nodes['storage'] = compartment_nodes[['node_stor','pipe_stor']].sum(axis=1)
del pipe_stor
"""Calculate time area graph for each compartment
"""
compartment_nodes['timearea'] = None
compartment_nodes['timearea_pipe'] = None
compartment_nodes['timearea_surface'] = None
compartment_nodes['pipe_time'] = 0
def update_timearea(timeareas, cumul_pipe_travel, outfall, nodes, edges):
#Recursive function to trace upstream and create timearea graph
node = outfall.us_node_id
row = outfall
if node not in timeareas.keys():
cumul_pipe_travel += row.travel_time # s
timeareas[node] = {}
timeareas[node]['pipe_time'] = cumul_pipe_travel
timeareas[node]['surface_time'] = SURFACE_TRAVEL_TIME
timeareas[node]['surf_area'] = nodes.loc[node,['run_coef','surf_area']].prod()
outfalls = edges.loc[edges.ds_node_id == node]
for idx, edge in outfalls.iterrows():
update_timearea(timeareas, cumul_pipe_travel, edge, nodes, edges)
for compart_id in compartment_nodes.compart_id:
in_elev = None
out_elev = None
nodes_in_compartment = nodes_gdf.loc[nodes_gdf[partition_type] == compart_id].set_index('node_id')
edges_in_compartment = edges_partition.loc[edges_partition.start_comp == compart_id]
edges_in_compartment_ = edges_partition.loc[edges_partition.end_comp == compart_id]
if edges_in_compartment.size > 0:
outfalls = edges_between.loc[edges_between.info_id.isin(edges_in_compartment.info_id)].copy()
if out_elev is None:
out_elev = nodes_in_compartment.loc[outfalls.us_node_id, 'chamber_fl'].mean()
compartment_nodes.loc[compart_id,'out_elev'] = out_elev
outfalls_ = edges_between.loc[edges_between.info_id.isin(edges_in_compartment_.info_id)].copy()
if in_elev is None:
in_elev = nodes_in_compartment.loc[outfalls_.ds_node_id,'chamber_fl'].mean()
compartment_nodes.loc[compart_id,'in_elev'] = in_elev
outfalls.travel_time = 0 #Ignore edges between travel time (since they will be modelled live)
timeareas = {}
for idx, edge in outfalls.iterrows():
cumul_pipe_travel = 0
update_timearea(timeareas, cumul_pipe_travel, edge, nodes_in_compartment, edges_in_compartment)
timeareas = pd.DataFrame(timeareas).T
times = linspace(0,timeareas.pipe_time.max() + timeareas.surface_time.max(),TIMEAREA_RESOLUTION)
areas = zeros(times.shape)
"""Pipe flow timearea diagram
"""
#Superimpose timearea diagram
for idx, row in timeareas.iterrows():
ind_start = where(times >= row.pipe_time)[0][0]
ind_end = where(times >= (row.pipe_time + row.surface_time))[0][0]
areas[ind_start : ind_end] += linspace(0,row.surf_area, ind_end - ind_start)
areas[ind_end:] += row.surf_area
#Calculate timearea diagram for entry to pipe network
surface_areas = | zeros(times.shape) | numpy.zeros |
import numpy as np
class Quaternion:
""" Classe pour les rotations 3D via les quaternions """
@classmethod
def from_v_theta(cls, v, theta):
""" Fonction pour passer de v et thêta au quaternion normalisé """
v = np.asarray(v) # Convertis l'entrée (int) en (array)
v = v * np.sin(theta / 2) / np.sqrt(np.sum(v * v)) # v = v*sin(x/2) / ||v||
x = np.ones(4) # x = [1, 1, 1, 1]
x[0], x[1], x[2], x[3] = np.cos(theta / 2), v[0], v[1], v[2] # x = [x[0], v[0], v[1], v[2]]
return cls(x)
def __init__(self, x):
self.x = np.asarray(x) # Convertis l'entrée (int) en (array)
''' Fonction pour permettre la multipliaction de deux quaternions.
Permet la rotation du cube en utilisant la souris '''
def __mul__(self, other):
sxr = self.x.reshape(4, 1) # sxr est x définit en colonne
oxr = other.x.reshape(1, 4) # oxr est x définit en ligne
prod = sxr * oxr # Matrice [4,4] produit de sxr et oxr
# Produit final
ret = np.array([(prod[0, 0] - prod[1, 1] - prod[2, 2] - prod[3, 3]),
(prod[0, 1] + prod[1, 0] + prod[2, 3] - prod[3, 2]),
(prod[0, 2] - prod[1, 3] + prod[2, 0] + prod[3, 1]),
(prod[0, 3] + prod[1, 2] - prod[2, 1] + prod[3, 0])]).T
return self.__class__(ret)
''' Fonction pour passer du quaternion normalisé à v et thêta '''
def as_v_theta(self):
x = self.x.reshape(4, -1) # Décompose x en chacun de ses éléments puis le transpose
# Calcul de theta
norm = np.sqrt((x ** 2).sum(0)) # calcul de la norme de x
theta = 2 * np.arccos(x[0] / norm) # Calcul de thêta
# Calcul de v
v = np.array(x[1:]) # Création de v. Sélectionne les 3 derniers éléments de x
v /= np.sqrt(np.sum(v ** 2)) # Normalise v (v est une matrice ligne)
v = v.T # Transpose v (v est à présent une colonne)
return v, theta
""" Renvoie la matrice de rotation du quaternion normalisé """
def as_rotation_matrix(self):
v, theta = self.as_v_theta() # Convertis x en v et thêta
v = v.T # Transpose v
c = np.cos(theta) # cos(theta)
c1 = 1. - c # 1 - cos(theta)
s = np.sin(theta) # sin(theta)
# Matrice de rotation
mat = np.array([[v[0] ** 2 * c1 + c, v[0] * v[1] * c1 - v[2] * s, v[0] * v[2] * c1 + v[1] * s],
[v[1] * v[0] * c1 + v[2] * s, v[1] ** 2 * c1 + c, v[1] * v[2] * c1 - v[0] * s],
[v[2] * v[0] * c1 - v[1] * s, v[2] * v[1] * c1 + v[0] * s, v[2] ** 2 * c1 + c]]).T
return mat.reshape(3, 3)
def rotate(self, points):
M = self.as_rotation_matrix() # M reçoit la rotation de la matrice
return np.dot(points, M.T) # Produit scalaire entre les points et la matrice de rotation
def project_points(points, q, view, vertical=None):
if vertical is None:
vertical = [0, 1, 0] # Définis l'axe vertical par défaut
view = np.asarray(view) # Convertis l'entrée (int) en array
xdir = np.cross(vertical, view).astype(float) # Création du nouveau vecteur unitaire correspondant à l'horizontale
xdir /= np.sqrt(np.dot(xdir, xdir)) # Normalisation
ydir = np.cross(view, xdir) # Création du nouveau vecteur unitaire correspondant à la verticale
ydir /= np.sqrt(np.dot(ydir, ydir)) # Normalisation
zdir = view / np.sqrt(np.dot(view, view)) # Création du nouveau vecteur unitaire correspondant à la côte
R = q.as_rotation_matrix() # Rotation des points
# Projeté des points sur la vue
dpoint = np.dot(points, R.T) - view
dpoint_view = | np.dot(dpoint, view) | numpy.dot |
# -*- coding: utf-8 -*-
'''codetovec.py
PyCodeVectors converts Python code to encoded vectors using multiprocessing
Todo:
*
'''
import glob
import io
import os
import numpy as np
import string
import multiprocessing
class PyCodeVectors():
'''PyCodeVectors object that converts Python code to one-hot-encoded vectors
Parameters:
source_directory -- directory of the Python code data
encoding -- text file encoding (default ascii)
decode_errors -- decoding error handling (default ignore)
vocabulary -- string containing characters to consider (default string.printable)
sequence_length -- length of each sequence (default 100)
step_size -- number of characters to step to create the next sequence (default 1)
file_extension -- the file extension of teh fiels to use as data (default .py)
pad_token -- token to use as padding (default \x0c)
Attributes:
vocabulary_length -- number of characters in vocabulary
char_to_idx -- dictionary that maps character to one-hot-encoding index
idx_to_char -- dictionary that maps one-hot-encoding index to character
file_list -- list of all files used as data
n_files -- number of files used
source_length -- total number of characters in all the files
'''
def __init__(self,
encoding='ascii',
decode_errors='ignore',
vocabulary=string.printable,
sequence_length=100,
step_size=1,
file_extension='.py',
pad_token='\x0c'):
'''Create a PyCodeVectors object'''
self.source_directory = None
self.encoding = encoding
self.decode_errors = decode_errors
self.vocabulary = vocabulary
self.sequence_length = sequence_length
self.step_size = step_size
self.file_extension = file_extension
self.pad_token = pad_token
self.vocabulary_length = len(self.vocabulary)
self.char_to_idx, self.idx_to_char = self._generate_mapping(
self.vocabulary)
self.file_list = None
self.n_files = None
self.source_length = None
def fit(self, source_directory):
'''Set the object's data directory'''
self.file_list = self._generate_filelist(source_directory)
self.n_files = len(self.file_list)
self.source = self.concatenate_source_code_parallel(self.file_list)
self.source_length = len(self.source)
def transform(self, source_directory, outfile=None, p=1.0):
'''Convert .py files in source directory to feature and target numpy arrays
Save serialized numpy arrays to specified outfile'''
self.source_directory = source_directory
self.file_list = self._generate_filelist(self.source_directory)
# self.n_files = len(self.file_list)
self.n_files = int(p * len(self.file_list))
code_string = self.concatenate_source_code(
self.file_list[:self.n_files])
self.source_length = len(code_string)
X, y = self.generate_dataset(code_string)
if outfile:
if os.path.isdir(os.path.dirname(outfile)):
np.save(outfile + '_X', X)
np.save(outfile + '_y', y)
return X, y
def _generate_filelist(self, directory):
'''Create list of .py files in a specified directory'''
if os.path.isdir(directory):
file_list = glob.iglob(os.path.join(
directory, '**', '*' + self.file_extension), recursive=True)
else:
raise FileNotFoundError(
0, 'Folder %s does not exist' % (directory))
return [f for f in file_list if os.path.isfile(f)]
def _generate_mapping(self, vocab):
'''Create mapping of character to index and index to character'''
idx_to_char = dict(zip(range(len(vocab)), vocab))
char_to_idx = dict(zip(vocab, range(len(vocab))))
return char_to_idx, idx_to_char
def concatenate_source_code(self, file_list):
'''Concatenate all .py files into a single string'''
code_string = ''
for file in file_list:
with io.open(file, 'r', encoding=self.encoding, errors=self.decode_errors) as infile:
code_string += self.pad_token * self.sequence_length + infile.read()
return code_string
def read_source_code_parallel(self, file):
'''Helper for parallel concatenation of all .py files into a single string'''
code_string = ''
with io.open(file, 'r', encoding=self.encoding, errors=self.decode_errors) as infile:
code_string += self.pad_token * self.sequence_length + infile.read()
# return infile.read()
return code_string
def concatenate_source_code_parallel(self, file_list):
'''Concatenate all .py files into a single string using multiple processors'''
pool = multiprocessing.Pool(multiprocessing.cpu_count())
return ''.join(pool.map(self.read_source_code_parallel, file_list))
def vectorize(self, code_string):
'''Non parallel, encode all files as feature and target numpy arrays'''
source_length = len(code_string)
n_samples = source_length - self.sequence_length
X = np.zeros((n_samples, self.sequence_length,
self.vocabulary_length), dtype=bool)
y = | np.zeros((n_samples, self.vocabulary_length), dtype=bool) | numpy.zeros |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# SPDX-License-Identifier: Apache-2.0
# SPDX-FileCopyrightText: © 2021 Massachusetts Institute of Technology.
# SPDX-FileCopyrightText: © 2021 <NAME> <<EMAIL>>
# NOTICE: authors should document their contributions in concisely in NOTICE
# with details inline in source files, comments, and docstrings.
"""
"""
import numpy as np
from ..utilities import ensure_aid
# from .. import fitters_ZPK
def sign_check_flip(fitter):
""" """
xfer = fitter.xfer_fit
data = fitter.data
rat = data / xfer
rat_ang = np.exp(1j * np.angle(rat))
ang_avg_fit = np.sum(rat_ang * fitter.W ** 2) / np.sum(fitter.W ** 2)
if ang_avg_fit.real < 0:
fitter.gain = -fitter.gain
def flip_mindelay_opt(aid):
"""
Attempts to flip each non-mindelay zero and then reoptimize
"""
aid = ensure_aid(aid)
# TODO, also deal with real roots
min_idx = 0
while True:
coding_lst = list(aid.fitter.num_codings)
for idx, coding in enumerate(aid.fitter.num_codings):
if idx <= min_idx:
# TODO, change logic to not need this and be faster
continue
zeros = coding.roots_c_Sf()
if zeros:
z = zeros[0]
if z.real > 0:
aid.log("trying flipping", z)
# using a coding which cannot flip over,
# since that affects the sign of the gain
coding_ins = aid.fitter.coding_map.num_c(aid.fitter)
# flip the root over, but also reduce its effect to help convergence
coding_ins.update_roots_Sf((-z).conjugate())
coding_lst[idx] = coding_ins
min_idx = idx
break
else:
# breaks from the while loop only if for-loop break doesn't occur
break
fitter_new = aid.fitter.__class__(
parent=aid.fitter,
num_codings=coding_lst,
)
# TODO, note, should this only be flipped in s-domain?
# flip the gain,
# print("GAIN: ", aid.fitter.xfer_fit / fitter_new.xfer_fit)
# fitter_new.gain = -fitter_new.gain
# print("GAIN: ", aid.fitter.xfer_fit / fitter_new.xfer_fit)
with fitter_new.with_codings_only([coding_ins]):
fitter_new.optimize(aid=aid)
fitter_new.optimize(aid=aid)
# print("GAIN3: ", aid.fitter.xfer_fit / fitter_new.xfer_fit)
aid.fitter_check(
fitter_new,
hint_name="mindelay_opt",
variant="OrdC",
)
return
def insert_triplets(aid):
"""
iserts on each poles and zeros a complex and real roots with bandwidth 2x the data
"""
aid = ensure_aid(aid)
cplx_t = aid.fitter.coding_map.num_c
real_t = aid.fitter.coding_map.num_r
F_l_Hz = aid.fitter.F_max_Hz
BW_2x_Hz = 2 * F_l_Hz
F_high_Hz = 0.90 * F_l_Hz
coding_num_p = cplx_t(aid.fitter)
coding_num_p.update_roots_Sf(-BW_2x_Hz + 1j * F_high_Hz)
coding_num_p2 = real_t(aid.fitter)
coding_num_p2.update_roots_Sf(-BW_2x_Hz)
coding_den_p2 = real_t(aid.fitter)
coding_den_p2.update_roots_Sf(-BW_2x_Hz)
coding_den_p = cplx_t(aid.fitter)
coding_den_p.update_roots_Sf(-BW_2x_Hz + 1j * F_high_Hz)
fitter_new = aid.fitter.__class__(
parent=aid.fitter,
num_codings=aid.fitter.num_codings + [coding_num_p2],
den_codings=aid.fitter.den_codings + [coding_den_p2],
)
res_pre = fitter_new.residuals_average
with fitter_new.with_codings_only(
[coding_num_p, coding_num_p2] + [coding_den_p, coding_den_p2]
):
fitter_new.optimize(aid=aid)
res_mid = fitter_new.residuals_average
fitter_new.optimize(aid=aid)
res_post = fitter_new.residuals_average
res_ratio = fitter_new.residuals_average / aid.fitter.residuals_average
aid.log(
"TRIPLETS (rat = {0}, pre = {1}, mid = {2}, post = {3})".format(
res_ratio, res_pre, res_mid, res_post
)
)
return aid.fitter_check(
fitter_new,
hint_name="insert_triplets2",
variant="OrdUp",
)
def set_min_BW(aid):
aid = ensure_aid(aid)
def modify_codings(codings):
for coding in codings:
roots = coding.roots_c_Sf()
if roots:
(r,) = roots
root_F_Hz = r.imag
F_idx = | np.searchsorted(aid.fitter.F_Hz, root_F_Hz) | numpy.searchsorted |
import numpy as np
import matplotlib.pyplot as pt
from q1pulse.instrument import Q1Instrument
from init_pulsars import qcm0, qrm1
from plot_util import plot_output
instrument = Q1Instrument('q1')
instrument.add_qcm(qcm0)
instrument.add_qrm(qrm1)
instrument.add_control('q1', qcm0.name, [0,1], nco_frequency=200e6)
instrument.add_control('q2', qrm1.name, [0,1], nco_frequency=200e6)
instrument.add_control('P1', qcm0.name, [2])
instrument.add_control('P2', qcm0.name, [3])
instrument.add_readout('R1', qrm1.name, [], nco_frequency=200e6)
qrm1.in0_gain(0)
qrm1.in1_gain(0)
p = instrument.new_program('nco_sync')
p.repetitions = 1
q1 = p.q1
q2 = p.q2
P1 = p.P1
P2 = p.P2
R1 = p['R1']
N = 10000
R1.add_acquisition_bins('default', N)
R1.integration_length_acq = 400
rabi_amplitude = 0.1
with p.loop_range(N):
q1.block_pulse(100, rabi_amplitude)
q2.block_pulse(100, rabi_amplitude)
with p.parallel():
q1.block_pulse(600, rabi_amplitude)
R1.acquire('default', 'increment', t_offset=120)
p.wait(1000)
p.wait(50_000-1620)
#p.describe()
p.compile(listing=True)
instrument.run_program(p)
plot_output([qcm0, qrm1])
data = instrument.get_acquisition_bins('R1', 'default')
#pprint(data)
I = np.array(data['integration']['path0'])
Q = | np.array(data['integration']['path1']) | numpy.array |
#Importing Builtins
import os
import argparse
def parse_arguments():
#Initilizing ArgumentParser() object
parser = argparse.ArgumentParser(description='Train using either Alexnet Architecture or Nvidia Architecture')
#Give the model architecture name , default is "alexnet"
parser.add_argument('--model_arch',type=str,default='alexnet',help="Either 'nvidia' architecture Or 'alexnet' arch")
#Give the path of the data directory,default is the working directory
parser.add_argument('--datadir',type=str,default='.',help='Specify your data path')
#Specify the size of validation set,default is 0.2
parser.add_argument('--validation_size',type=float,default=0.2,help='Size of validation set , default is 0.2')
parser.add_argument('--batch_size',type=int,default=128,help='Specify your batch size,default 128.')
#Specify the number of epochs you want to train, default is 5
parser.add_argument('--epochs',type=int,default=5,help='Number of epochs you want to train. Default is 1.')
#Specify the activation function default is 'elu'
parser.add_argument('--activation',type=str,default='elu',help='Give your activation function. Default is elu.')
return parser
arg_parser = parse_arguments()
args = arg_parser.parse_args()
#Retreiving the parameters from the command line arguments
activation = args.activation
EPOCHS = args.epochs
BATCH_SIZE = args.batch_size
DATA_DIR = args.datadir
model_arch = args.model_arch
saved_model_path = args.saved_model
test_size = args.validation_size
#Function for checking the arguments
def argument_checker():
if model_arch not in ['nvidia','alexnet']:
raise ValueError('Model architecture must be one of "nvidia" or "alexnet"')
if RETRAIN:
if not os.path.exists(saved_model_path):
raise FileNotFoundError('Specified Path not Found')
if not os.path.exists(os.path.join(DATA_DIR,'IMG')) or not os.path.exists(os.path.join(DATA_DIR,'driving_log.csv')):
raise FileNotFoundError('Data directory path must contain IMG directory and driving_log.csv file')
if activation not in ['elu','relu','sigmoid','tanh','linear']:
raise ValueError('Activation must be one of elu,relu,sigmoid,tanh,linear')
#Cheeck the arguments passed
argument_checker()
#import libraries for number crunching and data exploration
import numpy as np
import pandas as pd
#import libraries for data visualization
import matplotlib.image as mpimg
import matplotlib.pyplot as plt
#importing the deep learning library
import keras
#keras version checker
if keras.__version__!= '2.0.6':
print('This model is developed using keras version 2.0.6')
print('Previous versions may not work properly and versions 1.x.x will not work.')
from keras import backend as K
from keras.optimizers import Adam
from keras.models import Sequential
from keras.models import Model
from keras.models import load_model
from keras.layers import Dense
from keras.layers import Activation
from keras.layers import Conv2D
from keras.layers import Dropout
from keras.layers import MaxPooling2D
from keras.layers import Flatten
from keras.layers import Lambda
from keras.layers import Cropping2D
from keras.layers import BatchNormalization
from keras.callbacks import ModelCheckpoint
from keras.callbacks import TensorBoard
from sklearn.utils import shuffle
from sklearn.model_selection import train_test_split
IMG_DIR = os.path.join(DATA_DIR,'IMG')
CSV_LOG_PATH = os.path.join(DATA_DIR,'driving_log.csv')
#Callback functions to be implemented later at training step
#Tensorboard visualization features added.
model_checkpt = ModelCheckpoint('weights.{epoch:02d}-{val_loss:.2f}.hdf5')
tensor_board = TensorBoard(log_dir='./logdir/',histogram_freq=1,batch_size=32,write_grads=True)
#Correction Factor for left and right images
CF = 0.27
#Learning rate
LR = 1e-3
#Learning rate decay
DECAY = 0.99
#Reading the csv file
samples = pd.read_csv(CSV_LOG_PATH)
samples = pd.read_csv(CSV_LOG_PATH,header=None)
columns = ['center','left','right','steering','throttle','brake','speed']
samples.columns = columns
# as the recorded image names in driving log file contains the whole directory path
#function for removing the directory path
def path_remover(path):
return path.split('/')[-1]
samples.center = list(map(path_remover,samples.center))
samples.left = list(map(path_remover,samples.left))
samples.right = list(map(path_remover,samples.right))
print(samples.head())
steering_center = samples.steering
steering_left = steering_center + CF
steering_right = steering_center - CF
steering_angle = | np.concatenate((steering_center,steering_left,steering_right)) | numpy.concatenate |
from collections import defaultdict
import re
import numpy as np
"""
A python script to transform documents by replacing all mentions of co-referent clusters with first non-pronominal
mention. Importantly, it explicitly handles nested coreferent mentions, which is very common and not handled by most
libraries. This script, as of yet, does NOT handle syntax conflicts when replacing text. I.e. if a possessive noun
is the head mention, it will be resolved indiscriminately regardless of contexts in which it is placed. Please see
AllenNLP's function for guidance: https://docs.allennlp.org/models/master/models/coref/predictors/coref/#replace_corefs.
"""
PRONOUNS = {
'all', 'another', 'any', 'anybody', 'anyone', 'anything', 'as', 'aught', 'both', 'each other', 'each', 'either',
'enough', 'everybody', 'everyone', 'everything', 'few', 'he', 'her', 'hers', 'herself', 'him', 'himself', 'his',
'i', 'idem', 'it', 'its', 'itself', 'many', 'me', 'mine', 'most', 'my', 'myself', 'naught', 'neither', 'no one',
'nobody', 'none', 'nothing', 'nought', 'one another', 'one', 'other', 'others', 'ought', 'our', 'ours', 'ourself',
'ourselves', 'several', 'she', 'some', 'somebody', 'someone', 'something', 'somewhat', 'such', 'suchlike', 'that',
'thee', 'their', 'theirs', 'theirself', 'theirselves', 'them', 'themself', 'themselves', 'there', 'these', 'they',
'thine', 'this', 'those', 'thou', 'thy', 'thyself', 'us', 'we', 'what', 'whatever', 'whatnot', 'whatsoever',
'whence', 'where', 'whereby', 'wherefrom', 'wherein', 'whereinto', 'whereof', 'whereon', 'wheresoever', 'whereto',
'whereunto', 'wherever', 'wherewith', 'wherewithal', 'whether', 'which', 'whichever', 'whichsoever', 'who',
'whoever', 'whom', 'whomever', 'whomso', 'whomsoever', 'whose', 'whosesoever', 'whosever', 'whoso', 'whosoever',
'ye', 'yon', 'yonder', 'you', 'your', 'yours', 'yourself', 'yourselves'
}
def build_doc(resolved, spans, document):
"""
:param resolved: ... dictionary of coreferent entities
:param spans:
:param document:
:return:
"""
curr_idx = 0
toks = []
while curr_idx < len(document):
copy_to = -1
for span in spans:
if span[0] == curr_idx:
copy_to = span[1]
break
if copy_to > -1:
copy_str = span2str([curr_idx, copy_to])
toks += resolved[copy_str]
curr_idx = copy_to + 1
else:
toks.append(document[curr_idx])
curr_idx += 1
return toks
def build_str(main_span_str, resolved, dependencies, document, is_target):
"""
:param span_str: string representation of span: {start}_{end}
:param resolved: dictionary where keys are span_str's that have already been resolved to their final string
representation
:param dependencies: set of span_str's on which the resolution of main_span_str depends.
There are 2 types of dependencies:
1. replacement - if is_tgt, the span in dependencies (of which there should be just 1) is the head mention of
the coreferent cluster which includes both
2. subsumed - an entity (possibly part of a different cluster) is a subset of the main_span_str
:param document:
:param is_target: a boolean that is True if main_span_str is a non-head coreferent entity
(i.e. it is a target of replacement). This indicates its dependency is replacement, not subsumed
:return: list of tokens.
Resolves tokens from main_span_str in document according to the resolved tokens of spans on which it depends.
"""
span = str2span(main_span_str)
if is_target:
copy_span_str = list(dependencies)[0] # should only be one dependent (the head entity of the cluster)
if copy_span_str in resolved:
return resolved[copy_span_str]
else:
# They co-depend (for a few various reasons).
# This is an artifact of coreference toolkits.
# Just use target replacement from original document
print('Warning. Circular dependency detected!')
copy_span = str2span(copy_span_str)
return document[copy_span[0]:copy_span[1] + 1]
s = span[0]
e = span[1]
# remove sub-dependencies (sub-spans that are subsumed by larger spans which are also dependents.
# The transformations are already resolved in spanning span)
dep_spans = list(map(str2span, dependencies))
dep_span_lens = list(map(span2len, dep_spans))
dep_order = | np.argsort(dep_span_lens) | numpy.argsort |
# Copyright 2019 Xanadu Quantum Technologies Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r""" Tests for various multi mode state preparation operations"""
import itertools
import pytest
import numpy as np
from strawberryfields import ops
from strawberryfields.utils import random_covariance, displaced_squeezed_state
# make test deterministic
np.random.seed(42)
MAG_ALPHAS = np.linspace(0, 0.8, 3)
PHASE_ALPHAS = np.linspace(0, 2 * np.pi, 3, endpoint=False)
NBARS = np.linspace(0, 5)
@pytest.mark.backends("tf", "fock")
class TestFockBasisMultimode:
"""Testing preparing multimode states on the Fock backends"""
def test_multimode_ket_mode_permutations(self, setup_backend, pure, cutoff, tol):
"""Test multimode ket preparation when modes are permuted"""
backend = setup_backend(4)
random_ket0 = np.random.uniform(-1, 1, cutoff) + 1j * np.random.uniform(
-1, 1, cutoff
)
random_ket0 = random_ket0 / np.linalg.norm(random_ket0)
random_ket1 = np.random.uniform(-1, 1, cutoff) + 1j * np.random.uniform(
-1, 1, cutoff
)
random_ket1 = random_ket1 / np.linalg.norm(random_ket1)
random_ket = np.outer(random_ket0, random_ket1)
rho = np.einsum("ij,kl->ikjl", random_ket, random_ket.conj())
backend.prepare_ket_state(random_ket, modes=[3, 1])
state = backend.state([3, 1])
multi_mode_preparation_dm = state.dm()
assert np.allclose(multi_mode_preparation_dm, rho, atol=tol, rtol=0)
def test_compare_single_mode_and_multimode_ket_preparation(
self, setup_backend, batch_size, pure, cutoff, tol
):
"""Test single and multimode ket preparation"""
backend = setup_backend(4)
random_ket0 = np.random.uniform(-1, 1, cutoff) + 1j * np.random.uniform(
-1, 1, cutoff
)
random_ket0 = random_ket0 / np.linalg.norm(random_ket0)
random_ket1 = np.random.uniform(-1, 1, cutoff) + 1j * np.random.uniform(
-1, 1, cutoff
)
random_ket1 = random_ket1 / np.linalg.norm(random_ket1)
random_ket = np.outer(random_ket0, random_ket1)
backend.prepare_ket_state(random_ket0, 0)
backend.prepare_ket_state(random_ket1, 1)
state = backend.state([0, 1])
single_mode_preparation_dm = state.dm()
single_mode_preparation_probs = np.array(state.all_fock_probs())
backend.reset(pure=pure)
backend.prepare_ket_state(random_ket, [0, 1])
state = backend.state([0, 1])
multi_mode_preparation_dm = state.dm()
multi_mode_preparation_probs = np.array(state.all_fock_probs())
assert np.allclose(
single_mode_preparation_dm, multi_mode_preparation_dm, atol=tol, rtol=0
)
assert np.allclose(
single_mode_preparation_probs,
multi_mode_preparation_probs,
atol=tol,
rtol=0,
)
if batch_size is not None:
single_mode_preparation_dm = single_mode_preparation_dm[0]
multi_mode_preparation_dm = multi_mode_preparation_dm[0]
assert np.all(
single_mode_preparation_dm.shape == multi_mode_preparation_dm.shape
)
def test_compare_single_mode_and_multimode_dm_preparation(
self, setup_backend, batch_size, pure, cutoff, tol
):
"""Compare the results of a successive single mode preparations
and a multi mode preparation of a product state."""
backend = setup_backend(4)
random_rho0 = np.random.normal(size=[cutoff] * 2) + 1j * np.random.normal(
size=[cutoff] * 2
)
random_rho0 = np.dot(random_rho0.conj().T, random_rho0)
random_rho0 = random_rho0 / random_rho0.trace()
random_rho1 = np.random.normal(size=[cutoff] * 2) + 1j * np.random.normal(
size=[cutoff] * 2
)
random_rho1 = np.dot(random_rho1.conj().T, random_rho1)
random_rho1 = random_rho1 / random_rho1.trace()
random_dm = np.outer(random_rho0, random_rho1)
random_dm = random_dm.reshape([cutoff] * 4)
backend.prepare_dm_state(random_rho0, 0)
backend.prepare_dm_state(random_rho1, 1)
state = backend.state([0, 1])
single_mode_preparation_dm = state.dm()
single_mode_preparation_probs = np.array(state.all_fock_probs())
# first we do a preparation from random_dm, with shape [cutoff]*4
backend.reset(pure=pure)
backend.prepare_dm_state(random_dm, [0, 1])
state = backend.state(modes=[0, 1])
multi_mode_preparation_dm = state.dm()
multi_mode_preparation_probs = np.array(state.all_fock_probs())
# second we do a preparation from the corresponding matrix with shape [cutoff**2]*2
backend.reset(pure=pure)
backend.prepare_dm_state(random_dm.reshape([cutoff ** 2] * 2), [0, 1])
state = backend.state(modes=[0, 1])
multi_mode_preparation_from_matrix_dm = state.dm()
multi_mode_preparation_from_matrix_probs = np.array(state.all_fock_probs())
# third we do a preparation from random_dm on modes 3 and 1 (in that order!) and test if the states end up in the correct modes
backend.reset(pure=pure)
backend.prepare_dm_state(random_dm, [3, 1])
multi_mode_preparation_31_mode_0 = backend.state(modes=0).dm()
multi_mode_preparation_31_mode_1 = backend.state(modes=1).dm()
multi_mode_preparation_31_mode_2 = backend.state(modes=2).dm()
multi_mode_preparation_31_mode_3 = backend.state(modes=3).dm()
multi_mode_preparation_31_probs = np.array(
backend.state(modes=[3, 1]).all_fock_probs()
)
single_mode_vac = | np.zeros((cutoff, cutoff), dtype=np.complex128) | numpy.zeros |
## -------------------------------------------------------------------------------------------------
## -- Project : MLPro - A Synoptic Framework for Standardized Machine Learning Tasks
## -- Package : mlpro.wrappers
## -- Module : pettingzoo.py
## -------------------------------------------------------------------------------------------------
## -- History :
## -- yyyy-mm-dd Ver. Auth. Description
## -- 2021-08-27 0.0.0 SY Creation
## -- 2021-09-23 1.0.0 SY Release of first version
## -- 2021-09-28 1.0.1 SY WrEnvPZoo: implementation of method get_cycle_limits()
## -- 2021-09-29 1.1.0 SY Change name:WrEnvPZoo to WrEnvPZOO2MLPro
## -- 2021-10-02 1.2.0 SY New classes: WrEnvMLPro2PZoo, update _recognize_space() in WrEnvGYM2MLPro
## -- 2021-10-05 1.2.1 SY Update following new attributes done and broken in State
## -- 2021-10-06 1.2.2 DA Minor fixes
## -- 2021-10-07 1.2.3 SY Update WrEnvMLPro2PZoo()
## -- 2021-11-03 1.2.4 SY Remove reset() on WrEnvPZOO2MLPro and WrEnvMLPro2PZoo to avoid double reset
## -- 2021-11-13 1.2.5 DA Minor adjustments
## -- 2021-11-16 1.2.6 DA Refactoring
## -- 2021-11-16 1.2.7 SY Refactoring
## -- 2021-12-09 1.2.8 SY Update process action procedure in WrEnvMLPro2PZoo()
## -- 2021-12-11 1.2.9 SY Update WrEnvPZOO2MLPro() in setting up done flag
## -- 2021-12-21 1.3.0 DA - Replaced 'done' by 'success' on mlpro functionality
## -- - Optimized 'done' detection in both classes
## -- 2021-12-23 1.3.1 MRD Remove adding self._num_cycle on simulate_reaction() due to
## -- EnvBase.process_actions() is already adding self._num_cycle
## -- 2022-01-20 1.3.2 SY - Update PettingZoo2MLPro's reward type to C_TYPE_EVERY_AGENT
## -- - Update Wrapper MLPro2PettingZoo - Method step()
## -- 2022-01-21 1.3.3 SY Class WrEnvPZOO2MLPro:
## -- - replace variable _reward to _last_reward
## -- Class WrEnvMLPro2PZoo:
## -- - refactored done detection
## -- - removed artifacts of cycle counting
## -- 2022-02-27 1.3.4 SY Refactoring due to auto generated ID in class Dimension
## -- 2022-03-21 1.3.5 SY Refactoring due to PettingZoo version 1.17.0
## -------------------------------------------------------------------------------------------------
"""
Ver. 1.3.5 (2022-03-21)
This module provides wrapper classes for reinforcement learning tasks.
"""
import gym
import numpy as np
from mlpro.rl.models import *
from mlpro.wrappers.openai_gym import WrEnvMLPro2GYM
from pettingzoo import AECEnv
from pettingzoo.utils import agent_selector
from pettingzoo.utils import wrappers
## -------------------------------------------------------------------------------------------------
## -------------------------------------------------------------------------------------------------
class WrEnvPZOO2MLPro(Environment):
"""
This class is a ready to use wrapper class for Petting Zoo environments.
Objects of this type can be treated as an environment object. Encapsulated
petting zoo environment must be compatible to class pettingzoo.env.
"""
C_TYPE = 'Petting Zoo Env'
## -------------------------------------------------------------------------------------------------
def __init__(self, p_zoo_env, p_state_space:MSpace=None, p_action_space:MSpace=None, p_logging=Log.C_LOG_ALL):
"""
Parameters:
p_pzoo_env Petting Zoo environment object
p_state_space Optional external state space object that meets the
state space of the gym environment
p_action_space Optional external action space object that meets the
state space of the gym environment
p_logging Switch for logging
"""
self._zoo_env = p_zoo_env
self.C_NAME = 'Env "' + self._zoo_env.metadata['name'] + '"'
Environment.__init__(self, p_mode=Environment.C_MODE_SIM, p_logging=p_logging)
if p_state_space is not None:
self._state_space = p_state_space
else:
self._state_space = self._recognize_space(self._zoo_env.observation_spaces, "observation")
if p_action_space is not None:
self._action_space = p_action_space
else:
self._action_space = self._recognize_space(self._zoo_env.action_spaces, "action")
## -------------------------------------------------------------------------------------------------
def __del__(self):
try:
self._zoo_env.close()
except:
pass
self.log(self.C_LOG_TYPE_I, 'Closed')
## -------------------------------------------------------------------------------------------------
def _recognize_space(self, p_zoo_space, dict_name) -> ESpace:
space = ESpace()
if dict_name == "observation":
space.add_dim(Dimension(p_name_short='0', p_base_set='DO'))
elif dict_name == "action":
for k in p_zoo_space:
space.add_dim(Dimension(p_name_short=k, p_base_set='DO'))
return space
## -------------------------------------------------------------------------------------------------
@staticmethod
def setup_spaces():
return None, None
## -------------------------------------------------------------------------------------------------
def _reset(self, p_seed=None):
# 1 Reset Zoo environment and determine initial state
self._zoo_env.seed(p_seed)
self._zoo_env.reset()
observation, _, _, _ = self._zoo_env.last()
obs = DataObject(observation)
# 2 Create state object from Zoo observation
state = State(self._state_space)
if isinstance(observation, dict):
state.set_values(obs.get_data()['observation'])
else:
state.set_values(obs.get_data())
self._set_state(state)
## -------------------------------------------------------------------------------------------------
def simulate_reaction(self, p_state:State, p_action:Action) -> State:
new_state = State(self._state_space)
# 1 Convert action to Zoo syntax
action_sorted = p_action.get_sorted_values()
agent_num = 0
for k in self._zoo_env.action_spaces:
dtype = self._zoo_env.action_spaces[k].dtype
if ( dtype == np.int32 ) or ( dtype == np.int64 ):
action_sorted_agent = action_sorted[agent_num].round(0)
else:
action_sorted_agent = action_sorted[agent_num]
action_zoo = action_sorted_agent.astype(self._zoo_env.action_spaces[k].dtype)
# 2 Process step of Zoo environment that automatically switches control to the next agent.
observation, reward_zoo, done, info = self._zoo_env.last()
obs = DataObject(observation)
if done:
self._zoo_env.step(None)
new_state.set_terminal(True)
else:
try:
self._zoo_env.step(action_zoo)
except:
self._zoo_env.step(np.atleast_1d(action_zoo))
agent_num += 1
# 3 Create state object from Zoo observation
if isinstance(observation, dict):
new_state.set_values(obs.get_data()['observation'])
else:
new_state.set_values(obs.get_data())
# 4 Create and store reward object
self._last_reward = Reward(Reward.C_TYPE_EVERY_AGENT)
for key in self._zoo_env.rewards.keys():
self._last_reward.add_agent_reward(key, self._zoo_env.rewards.get(key))
return new_state
## -------------------------------------------------------------------------------------------------
def compute_reward(self, p_state_old:State=None, p_state_new:State=None) -> Reward:
if ( p_state_old is not None ) or ( p_state_new is not None ):
raise NotImplementedError
return self._last_reward
## -------------------------------------------------------------------------------------------------
def compute_success(self, p_state:State) -> bool:
return self.get_success()
## -------------------------------------------------------------------------------------------------
def compute_broken(self, p_state:State) -> bool:
return self.get_broken()
## -------------------------------------------------------------------------------------------------
def init_plot(self, p_figure=None):
self._zoo_env.render()
## -------------------------------------------------------------------------------------------------
def update_plot(self):
self._zoo_env.render()
## -------------------------------------------------------------------------------------------------
def get_cycle_limit(self):
try:
return self._zoo_env.env.env.max_cycles
except:
return self.C_CYCLE_LIMIT
## -------------------------------------------------------------------------------------------------
## -------------------------------------------------------------------------------------------------
class WrEnvMLPro2PZoo():
"""
This class is a ready to use wrapper class for MLPro to PettingZoo environments.
Objects of this type can be treated as an AECEnv object. Encapsulated
MLPro environment must be compatible to class Environment.
To be noted, this wrapper is not capable for parallel environment yet.
"""
C_TYPE = 'MLPro to PZoo Env'
## -------------------------------------------------------------------------------------------------
def __init__(self, p_mlpro_env, p_num_agents, p_state_space:MSpace=None, p_action_space:MSpace=None):
"""
Parameters:
p_mlpro_env MLPro's Environment object
p_num_agents Number of Agents
p_state_space Optional external state space object that meets the
state space of the MLPro environment
p_action_space Optional external action space object that meets the
state space of the MLPro environment
"""
self.pzoo_env = self.raw_env(p_mlpro_env, p_num_agents, p_state_space, p_action_space)
self.pzoo_env = wrappers.CaptureStdoutWrapper(self.pzoo_env)
self.pzoo_env = wrappers.OrderEnforcingWrapper(self.pzoo_env)
## -------------------------------------------------------------------------------------------------
class raw_env(AECEnv):
metadata = {'render_modes': ['human', 'ansi'], "name": "pzoo_custom"}
## -------------------------------------------------------------------------------------------------
def __init__(self, p_mlpro_env, p_num_agents, p_state_space:MSpace=None, p_action_space:MSpace=None):
self._mlpro_env = p_mlpro_env
self.possible_agents = [str(r) for r in range(p_num_agents)]
self.agent_name_mapping = dict(zip(self.possible_agents, list(range(len(self.possible_agents)))))
if p_state_space is not None:
self.observation_spaces = p_state_space
else:
self.observation_spaces = self._recognize_space(self._mlpro_env.get_state_space())
if p_action_space is not None:
self.action_spaces = p_action_space
else:
self.action_spaces = self._recognize_space(self._mlpro_env.get_action_space())
self.first_refresh = True
## -------------------------------------------------------------------------------------------------
def _recognize_space(self, p_mlpro_space):
space = WrEnvMLPro2GYM.recognize_space(p_mlpro_space)
setup_space = {agent: space for agent in self.possible_agents}
return setup_space
## -------------------------------------------------------------------------------------------------
def step(self, action):
if self.dones[self.agent_selection]:
return self._was_done_step(action)
agent = self.agent_selection
self._cumulative_rewards[agent] = 0
cycle_limit = self._mlpro_env.get_cycle_limit()
self.state[self.agent_selection] = action[int(self.agent_selection)]
if agent == self.possible_agents[-1]:
_action = Action()
idx = self._mlpro_env.get_action_space().get_num_dim()
if isinstance(self.observation_spaces, gym.spaces.Discrete):
action = | np.array([action]) | numpy.array |
# std
import os
import sys
import argparse
import logging
from collections import defaultdict
# 3rd party
import numpy as np
import torch
from torch.optim.lr_scheduler import ExponentialLR
# internal
from load_data import Data
from models import *
logger = logging.getLogger(__name__)
logger.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s:%(name)s:%(levelname)s:%(message)s')
stream_handler = logging.StreamHandler(sys.stdout)
stream_handler.setLevel(logging.INFO)
stream_handler.setFormatter(formatter)
logger.addHandler(stream_handler)
file_handler = logging.FileHandler('hntn_train_validate_and_test_fb15k_237_200d_hypothesis.log')
file_handler.setLevel(logging.INFO)
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
class Experiment:
def __init__(self,
model_name,
learning_rate=0.001,
ent_vec_dim=200,
rel_vec_dim=200,
epochs=100,
batch_size=128,
decay_rate=0.,
cuda=False,
input_dropout=0.,
hidden_dropout=0.,
feature_map_dropout=0.,
in_channels=1,
out_channels=32,
filt_h=3,
filt_w=3,
label_smoothing=0.):
self.model_name = model_name
self.learning_rate = learning_rate
self.ent_vec_dim = ent_vec_dim
self.rel_vec_dim = rel_vec_dim
self.epochs = epochs
self.batch_size = batch_size
self.decay_rate = decay_rate
self.label_smoothing = label_smoothing
self.cuda = cuda
self.kwargs = {"input_dropout": input_dropout,
"hidden_dropout": hidden_dropout,
"feature_map_dropout": feature_map_dropout,
"in_channels": in_channels,
"out_channels": out_channels,
"filt_h": filt_h,
"filt_w": filt_w}
def get_data_idxs(self, data):
data_idxs = [(self.entity_idxs[data[i][0]],
self.relation_idxs[data[i][1]],
self.entity_idxs[data[i][2]]) for i in range(len(data))]
return data_idxs
@staticmethod
def get_er_vocab(data):
er_vocab = defaultdict(list)
for triple in data:
er_vocab[(triple[0], triple[1])].append(triple[2])
return er_vocab
def get_batch(self, er_vocab, triple_idxs, triple_size, idx):
batch = triple_idxs[idx:min(idx + self.batch_size, triple_size)]
targets = np.zeros((len(batch), len(d.entities)))
for idx, pair in enumerate(batch):
targets[idx, er_vocab[pair]] = 1.
targets = torch.FloatTensor(targets)
if self.cuda:
targets = targets.cuda()
return np.array(batch), targets
def evaluate_costs(self, evaluate_triple_idxs, model):
costs = []
er_vocab = self.get_er_vocab(evaluate_triple_idxs)
er_vocab_pairs = list(er_vocab.keys())
er_vocab_pairs_size = len(er_vocab_pairs)
logger.info(f'Number of entity-relational pairs: {er_vocab_pairs_size}')
for i in range(0, er_vocab_pairs_size, self.batch_size):
if i % (128 * 100) == 0:
logger.info(f'Batch: {i + 1} ...')
triples, targets = self.get_batch(er_vocab, er_vocab_pairs, er_vocab_pairs_size, i)
e1_idx = torch.tensor(triples[:, 0])
r_idx = torch.tensor(triples[:, 1])
if self.cuda:
e1_idx = e1_idx.cuda()
r_idx = r_idx.cuda()
predictions = model.forward(e1_idx, r_idx)
if self.label_smoothing:
targets = ((1.0 - self.label_smoothing) * targets) + (1.0 / targets.size(1))
cost = model.loss(predictions, targets)
costs.append(cost.item())
return costs
def evaluate(self, model, data, epoch, data_type=None):
data_type_map = {'training': 'TRAINING', 'validation': 'VALIDATION', 'testing': 'TESTING'}
data_type = data_type_map[data_type] if data_type else 'TRAINING'
logger.info(f'Starting {data_type} evaluation {epoch}')
hits = []
ranks = []
for i in range(10):
hits.append([])
evaluate_triple_idxs = self.get_data_idxs(data)
evaluation_triple_size = len(evaluate_triple_idxs)
logger.info(f'Number of evaluation data points: {evaluation_triple_size}')
logger.info(f'Starting evaluate costs ...')
costs = self.evaluate_costs(evaluate_triple_idxs, model)
logger.info(f'Evaluate costs complete!')
er_vocab = self.get_er_vocab(self.get_data_idxs(d.data)) if data_type == 'TESTING' else \
self.get_er_vocab(self.get_data_idxs(d.data_train_and_valid))
for i in range(0, evaluation_triple_size, self.batch_size):
if i % (128 * 100) == 0:
logger.info(f'Batch: {i + 1} ...')
triples, _ = self.get_batch(er_vocab, evaluate_triple_idxs, evaluation_triple_size, i)
e1_idx = torch.tensor(triples[:, 0])
r_idx = torch.tensor(triples[:, 1])
e2_idx = torch.tensor(triples[:, 2])
if self.cuda:
e1_idx = e1_idx.cuda()
r_idx = r_idx.cuda()
e2_idx = e2_idx.cuda()
predictions = model.forward(e1_idx, r_idx)
for j in range(triples.shape[0]):
filt = er_vocab[(triples[j][0], triples[j][1])]
target_value = predictions[j, e2_idx[j]].item()
predictions[j, filt] = 0.0
predictions[j, e2_idx[j]] = target_value
sort_values, sort_idxs = torch.sort(predictions, dim=1, descending=True)
for j in range(triples.shape[0]):
rank = np.where(sort_idxs[j].cpu() == e2_idx[j].cpu())[0][0]
ranks.append(rank + 1)
for hits_level in range(10):
if rank <= hits_level:
hits[hits_level].append(1.0)
else:
hits[hits_level].append(0.0)
logger.info(f'Epoch: {epoch}, Mean evaluation cost_{data_type.lower()}: {np.mean(costs)}')
logger.info(f'Epoch: {epoch}, Hits @10_{data_type.lower()}: {np.mean(hits[9])}')
logger.info(f'Epoch: {epoch}, Hits @3_{data_type.lower()}: {np.mean(hits[2])}')
logger.info(f'Epoch: {epoch}, Hits @1_{data_type.lower()}: {np.mean(hits[0])}')
logger.info(f'Epoch: {epoch}, Mean rank_{data_type.lower()}: { | np.mean(ranks) | numpy.mean |
"""DEM coregistration classes and functions."""
from __future__ import annotations
import copy
import concurrent.futures
import json
import os
import subprocess
import tempfile
import warnings
from enum import Enum
from typing import Any, Callable, Optional, overload, Union, Sequence, TypeVar
try:
import cv2
_has_cv2 = True
except ImportError:
_has_cv2 = False
import fiona
import geoutils as gu
from geoutils.georaster import RasterType
import numpy as np
import rasterio as rio
import rasterio.warp # pylint: disable=unused-import
import rasterio.windows # pylint: disable=unused-import
import scipy
import scipy.interpolate
import scipy.ndimage
import scipy.optimize
import skimage.transform
from rasterio import Affine
from tqdm import trange, tqdm
import pandas as pd
import xdem
try:
import richdem as rd
_has_rd = True
except ImportError:
_has_rd = False
try:
from pytransform3d.transform_manager import TransformManager
import pytransform3d.transformations
_HAS_P3D = True
except ImportError:
_HAS_P3D = False
def filter_by_range(ds: rio.DatasetReader, rangelim: tuple[float, float]):
"""
Function to filter values using a range.
"""
print('Excluding values outside of range: {0:f} to {1:f}'.format(*rangelim))
out = np.ma.masked_outside(ds, *rangelim)
out.set_fill_value(ds.fill_value)
return out
def filtered_slope(ds_slope, slope_lim=(0.1, 40)):
print("Slope filter: %0.2f - %0.2f" % slope_lim)
print("Initial count: %i" % ds_slope.count())
flt_slope = filter_by_range(ds_slope, slope_lim)
print(flt_slope.count())
return flt_slope
def apply_xy_shift(ds: rio.DatasetReader, dx: float, dy: float) -> np.ndarray:
"""
Apply horizontal shift to rio dataset using Transform affine matrix
:param ds: DEM
:param dx: dx shift value
:param dy: dy shift value
Returns:
Rio Dataset with updated transform
"""
print("X shift: ", dx)
print("Y shift: ", dy)
# Update geotransform
ds_meta = ds.meta
gt_orig = ds.transform
gt_align = Affine(gt_orig.a, gt_orig.b, gt_orig.c+dx,
gt_orig.d, gt_orig.e, gt_orig.f+dy)
print("Original transform:", gt_orig)
print("Updated transform:", gt_align)
# Update ds Geotransform
ds_align = ds
meta_update = ds.meta.copy()
meta_update({"driver": "GTiff", "height": ds.shape[1],
"width": ds.shape[2], "transform": gt_align, "crs": ds.crs})
# to split this part in two?
with rasterio.open(ds_align, "w", **meta_update) as dest:
dest.write(ds_align)
return ds_align
def apply_z_shift(ds: rio.DatasetReader, dz: float):
"""
Apply vertical shift to rio dataset using Transform affine matrix
:param ds: DEM
:param dx: dz shift value
"""
src_dem = rio.open(ds)
a = src_dem.read(1)
ds_shift = a + dz
return ds_shift
def rio_to_rda(ds: rio.DatasetReader) -> rd.rdarray:
"""
Get georeferenced richDEM array from rasterio dataset
:param ds: DEM
:return: DEM
"""
arr = ds.read(1)
rda = rd.rdarray(arr, no_data=ds.get_nodatavals()[0])
rda.geotransform = ds.get_transform()
rda.projection = ds.get_gcps()
return rda
def get_terrainattr(ds: rio.DatasetReader, attrib='slope_degrees') -> rd.rdarray:
"""
Derive terrain attribute for DEM opened with rasterio. One of "slope_degrees", "slope_percentage", "aspect",
"profile_curvature", "planform_curvature", "curvature" and others (see richDEM documentation)
:param ds: DEM
:param attrib: terrain attribute
:return:
"""
rda = rio_to_rda(ds)
terrattr = rd.TerrainAttribute(rda, attrib=attrib)
return terrattr
def get_horizontal_shift(elevation_difference: np.ndarray, slope: np.ndarray, aspect: np.ndarray,
min_count: int = 20) -> tuple[float, float, float]:
"""
Calculate the horizontal shift between two DEMs using the method presented in Nuth and Kääb (2011).
:param elevation_difference: The elevation difference (reference_dem - aligned_dem).
:param slope: A slope map with the same shape as elevation_difference (units = pixels?).
:param aspect: An aspect map with the same shape as elevation_difference (units = radians).
:param min_count: The minimum allowed bin size to consider valid.
:raises ValueError: If very few finite values exist to analyse.
:returns: The pixel offsets in easting, northing, and the c_parameter (altitude?).
"""
input_x_values = aspect
with np.errstate(divide="ignore", invalid="ignore"):
input_y_values = elevation_difference / slope
# Remove non-finite values
x_values = input_x_values[np.isfinite(input_x_values) & np.isfinite(input_y_values)]
y_values = input_y_values[np.isfinite(input_x_values) & np.isfinite(input_y_values)]
assert y_values.shape[0] > 0
# Remove outliers
lower_percentile = np.percentile(y_values, 1)
upper_percentile = np.percentile(y_values, 99)
valids = np.where((y_values > lower_percentile) & (y_values < upper_percentile) & (np.abs(y_values) < 200))
x_values = x_values[valids]
y_values = y_values[valids]
# Slice the dataset into appropriate aspect bins
step = np.pi / 36
slice_bounds = np.arange(start=0, stop=2 * np.pi, step=step)
y_medians = np.zeros([len(slice_bounds)])
count = y_medians.copy()
for i, bound in enumerate(slice_bounds):
y_slice = y_values[(bound < x_values) & (x_values < (bound + step))]
if y_slice.shape[0] > 0:
y_medians[i] = np.median(y_slice)
count[i] = y_slice.shape[0]
# Filter out bins with counts below threshold
y_medians = y_medians[count > min_count]
slice_bounds = slice_bounds[count > min_count]
if slice_bounds.shape[0] < 10:
raise ValueError("Less than 10 different cells exist.")
# Make an initial guess of the a, b, and c parameters
initial_guess: tuple[float, float, float] = (3 * np.std(y_medians) / (2 ** 0.5), 0.0, np.mean(y_medians))
def estimate_ys(x_values: np.ndarray, parameters: tuple[float, float, float]) -> np.ndarray:
"""
Estimate y-values from x-values and the current parameters.
y(x) = a * cos(b - x) + c
:param x_values: The x-values to feed the above function.
:param parameters: The a, b, and c parameters to feed the above function
:returns: Estimated y-values with the same shape as the given x-values
"""
return parameters[0] * np.cos(parameters[1] - x_values) + parameters[2]
def residuals(parameters: tuple[float, float, float], y_values: np.ndarray, x_values: np.ndarray):
"""
Get the residuals between the estimated and measured values using the given parameters.
err(x, y) = est_y(x) - y
:param parameters: The a, b, and c parameters to use for the estimation.
:param y_values: The measured y-values.
:param x_values: The measured x-values
:returns: An array of residuals with the same shape as the input arrays.
"""
err = estimate_ys(x_values, parameters) - y_values
return err
# Estimate the a, b, and c parameters with least square minimisation
plsq = scipy.optimize.leastsq(func=residuals, x0=initial_guess, args=(y_medians, slice_bounds), full_output=1)
a_parameter, b_parameter, c_parameter = plsq[0]
# Calculate the easting and northing offsets from the above parameters
east_offset = a_parameter * np.sin(b_parameter)
north_offset = a_parameter * np.cos(b_parameter)
return east_offset, north_offset, c_parameter
def calculate_slope_and_aspect(dem: np.ndarray) -> tuple[np.ndarray, np.ndarray]:
"""
Calculate the slope and aspect of a DEM.
:param dem: A numpy array of elevation values.
:returns: The slope (in pixels??) and aspect (in radians) of the DEM.
"""
# TODO: Figure out why slope is called slope_px. What unit is it in?
# TODO: Change accordingly in the get_horizontal_shift docstring.
# Calculate the gradient of the slope
gradient_y, gradient_x = np.gradient(dem)
slope_px = np.sqrt(gradient_x ** 2 + gradient_y ** 2)
aspect = np.arctan2(-gradient_x, gradient_y)
aspect += np.pi
return slope_px, aspect
def deramping(elevation_difference, x_coordinates: np.ndarray, y_coordinates: np.ndarray,
degree: int, verbose: bool = False,
metadata: Optional[dict[str, Any]] = None) -> Callable[[np.ndarray, np.ndarray], np.ndarray]:
"""
Calculate a deramping function to account for rotational and non-rigid components of the elevation difference.
:param elevation_difference: The elevation difference array to analyse.
:param x_coordinates: x-coordinates of the above array (must have the same shape as elevation_difference)
:param y_coordinates: y-coordinates of the above array (must have the same shape as elevation_difference)
:param degree: The polynomial degree to estimate the ramp.
:param verbose: Print the least squares optimization progress.
:param metadata: Optional. A metadata dictionary that will be updated with the key "deramp".
:returns: A callable function to estimate the ramp.
"""
#warnings.warn("This function is deprecated in favour of the new Coreg class.", DeprecationWarning)
# Extract only the finite values of the elevation difference and corresponding coordinates.
valid_diffs = elevation_difference[np.isfinite(elevation_difference)]
valid_x_coords = x_coordinates[np.isfinite(elevation_difference)]
valid_y_coords = y_coordinates[np.isfinite(elevation_difference)]
# Randomly subsample the values if there are more than 500,000 of them.
if valid_x_coords.shape[0] > 500_000:
random_indices = np.random.randint(0, valid_x_coords.shape[0] - 1, 500_000)
valid_diffs = valid_diffs[random_indices]
valid_x_coords = valid_x_coords[random_indices]
valid_y_coords = valid_y_coords[random_indices]
# Create a function whose residuals will be attempted to minimise
def estimate_values(x_coordinates: np.ndarray, y_coordinates: np.ndarray,
coefficients: np.ndarray, degree: int) -> np.ndarray:
"""
Estimate values from a 2D-polynomial.
:param x_coordinates: x-coordinates of the difference array (must have the same shape as elevation_difference).
:param y_coordinates: y-coordinates of the difference array (must have the same shape as elevation_difference).
:param coefficients: The coefficients (a, b, c, etc.) of the polynomial.
:param degree: The degree of the polynomial.
:raises ValueError: If the length of the coefficients list is not compatible with the degree.
:returns: The values estimated by the polynomial.
"""
# Check that the coefficient size is correct.
coefficient_size = (degree + 1) * (degree + 2) / 2
if len(coefficients) != coefficient_size:
raise ValueError()
# Do Amaury's black magic to estimate the values.
estimated_values = np.sum([coefficients[k * (k + 1) // 2 + j] * x_coordinates ** (k - j) *
y_coordinates ** j for k in range(degree + 1) for j in range(k + 1)], axis=0)
return estimated_values # type: ignore
# Creat the error function
def residuals(coefficients: np.ndarray, values: np.ndarray, x_coordinates: np.ndarray,
y_coordinates: np.ndarray, degree: int) -> np.ndarray:
"""
Calculate the difference between the estimated and measured values.
:param coefficients: Coefficients for the estimation.
:param values: The measured values.
:param x_coordinates: The x-coordinates of the values.
:param y_coordinates: The y-coordinates of the values.
:param degree: The degree of the polynomial to estimate.
:returns: An array of residuals.
"""
error = estimate_values(x_coordinates, y_coordinates, coefficients, degree) - values
error = error[np.isfinite(error)]
return error
# Run a least-squares minimisation to estimate the correct coefficients.
# TODO: Maybe remove the full_output?
initial_guess = np.zeros(shape=((degree + 1) * (degree + 2) // 2))
if verbose:
print("Deramping...")
coefficients = scipy.optimize.least_squares(
fun=residuals,
x0=initial_guess,
args=(valid_diffs, valid_x_coords, valid_y_coords, degree),
verbose=2 if verbose and degree > 1 else 0
).x
# Generate the return-function which can correctly estimate the ramp
def ramp(x_coordinates: np.ndarray, y_coordinates: np.ndarray) -> np.ndarray:
"""
Get the values of the ramp that corresponds to given coordinates.
:param x_coordinates: x-coordinates of interest.
:param y_coordinates: y-coordinates of interest.
:returns: The estimated ramp offsets.
"""
return estimate_values(x_coordinates, y_coordinates, coefficients, degree)
if metadata is not None:
metadata["deramp"] = {
"coefficients": coefficients,
"nmad": xdem.spatialstats.nmad(residuals(coefficients, valid_diffs, valid_x_coords, valid_y_coords, degree))
}
# Return the function which can be used later.
return ramp
def mask_as_array(reference_raster: gu.georaster.Raster, mask: Union[str, gu.geovector.Vector, gu.georaster.Raster]) -> np.ndarray:
"""
Convert a given mask into an array.
:param reference_raster: The raster to use for rasterizing the mask if the mask is a vector.
:param mask: A valid Vector, Raster or a respective filepath to a mask.
:raises: ValueError: If the mask path is invalid.
:raises: TypeError: If the wrong mask type was given.
:returns: The mask as a squeezed array.
"""
# Try to load the mask file if it's a filepath
if isinstance(mask, str):
# First try to load it as a Vector
try:
mask = gu.geovector.Vector(mask)
# If the format is unsopported, try loading as a Raster
except fiona.errors.DriverError:
try:
mask = gu.georaster.Raster(mask)
# If that fails, raise an error
except rio.errors.RasterioIOError:
raise ValueError(f"Mask path not in a supported Raster or Vector format: {mask}")
# At this point, the mask variable is either a Raster or a Vector
# Now, convert the mask into an array by either rasterizing a Vector or by fetching a Raster's data
if isinstance(mask, gu.geovector.Vector):
mask_array = mask.create_mask(reference_raster)
elif isinstance(mask, gu.georaster.Raster):
# The true value is the maximum value in the raster, unless the maximum value is 0 or False
true_value = np.nanmax(mask.data) if not np.nanmax(mask.data) in [0, False] else True
mask_array = (mask.data == true_value).squeeze()
else:
raise TypeError(
f"Mask has invalid type: {type(mask)}. Expected one of: "
f"{[gu.georaster.Raster, gu.geovector.Vector, str, type(None)]}"
)
return mask_array
def _transform_to_bounds_and_res(shape: tuple[int, ...],
transform: rio.transform.Affine) -> tuple[rio.coords.BoundingBox, float]:
"""Get the bounding box and (horizontal) resolution from a transform and the shape of a DEM."""
bounds = rio.coords.BoundingBox(
*rio.transform.array_bounds(shape[0], shape[1], transform=transform))
resolution = (bounds.right - bounds.left) / shape[1]
return bounds, resolution
def _get_x_and_y_coords(shape: tuple[int, ...], transform: rio.transform.Affine):
"""Generate center coordinates from a transform and the shape of a DEM."""
bounds, resolution = _transform_to_bounds_and_res(shape, transform)
x_coords, y_coords = np.meshgrid(
np.linspace(bounds.left + resolution / 2, bounds.right - resolution / 2, num=shape[1]),
np.linspace(bounds.bottom + resolution / 2, bounds.top - resolution / 2, num=shape[0])[::-1]
)
return x_coords, y_coords
CoregType = TypeVar("CoregType", bound="Coreg")
class Coreg:
"""
Generic Coreg class.
Made to be subclassed.
"""
_fit_called: bool = False # Flag to check if the .fit() method has been called.
_is_affine: Optional[bool] = None
def __init__(self, meta: Optional[dict[str, Any]] = None, matrix: Optional[np.ndarray] = None):
"""Instantiate a generic Coreg method."""
self._meta: dict[str, Any] = meta or {} # All __init__ functions should instantiate an empty dict.
if matrix is not None:
with warnings.catch_warnings():
# This error is fixed in the upcoming 1.8
warnings.filterwarnings("ignore", message="`np.float` is a deprecated alias for the builtin `float`")
valid_matrix = pytransform3d.transformations.check_transform(matrix)
self._meta["matrix"] = valid_matrix
def fit(self: CoregType, reference_dem: np.ndarray | np.ma.masked_array | RasterType,
dem_to_be_aligned: np.ndarray | np.ma.masked_array | RasterType,
inlier_mask: Optional[np.ndarray] = None,
transform: Optional[rio.transform.Affine] = None,
weights: Optional[np.ndarray] = None,
subsample: Union[float, int] = 1.0,
verbose: bool = False) -> CoregType:
"""
Estimate the coregistration transform on the given DEMs.
:param reference_dem: 2D array of elevation values acting reference.
:param dem_to_be_aligned: 2D array of elevation values to be aligned.
:param inlier_mask: Optional. 2D boolean array of areas to include in the analysis (inliers=True).
:param transform: Optional. Transform of the reference_dem. Mandatory in some cases.
:param weights: Optional. Per-pixel weights for the coregistration.
:param subsample: Subsample the input to increase performance. <1 is parsed as a fraction. >1 is a pixel count.
:param verbose: Print progress messages to stdout.
"""
if weights is not None:
raise NotImplementedError("Weights have not yet been implemented")
# Validate that both inputs are valid array-like (or Raster) types.
if not all(hasattr(dem, "__array_interface__") for dem in (reference_dem, dem_to_be_aligned)):
raise ValueError(
"Both DEMs need to be array-like (implement a numpy array interface)."
f"'reference_dem': {reference_dem}, 'dem_to_be_aligned': {dem_to_be_aligned}"
)
# If both DEMs are Rasters, validate that 'dem_to_be_aligned' is in the right grid. Then extract its data.
if isinstance(dem_to_be_aligned, gu.Raster) and isinstance(reference_dem, gu.Raster):
dem_to_be_aligned = dem_to_be_aligned.reproject(reference_dem, silent=True).data
# If any input is a Raster, use its transform if 'transform is None'.
# If 'transform' was given and any input is a Raster, trigger a warning.
# Finally, extract only the data of the raster.
for name, dem in [("reference_dem", reference_dem), ("dem_to_be_aligned", dem_to_be_aligned)]:
if hasattr(dem, "transform"):
if transform is None:
transform = getattr(dem, "transform")
elif transform is not None:
warnings.warn(f"'{name}' of type {type(dem)} overrides the given 'transform'")
"""
if name == "reference_dem":
reference_dem = dem.data
else:
dem_to_be_aligned = dem.data
"""
if transform is None:
raise ValueError("'transform' must be given if both DEMs are array-like.")
ref_dem, ref_mask = xdem.spatial_tools.get_array_and_mask(reference_dem)
tba_dem, tba_mask = xdem.spatial_tools.get_array_and_mask(dem_to_be_aligned)
# Make sure that the mask has an expected format.
if inlier_mask is not None:
inlier_mask = np.asarray(inlier_mask).squeeze()
assert inlier_mask.dtype == bool, f"Invalid mask dtype: '{inlier_mask.dtype}'. Expected 'bool'"
if np.all(~inlier_mask):
raise ValueError("'inlier_mask' had no inliers.")
ref_dem[~inlier_mask] = np.nan
tba_dem[~inlier_mask] = np.nan
if np.all(ref_mask):
raise ValueError("'reference_dem' had only NaNs")
if np.all(tba_mask):
raise ValueError("'dem_to_be_aligned' had only NaNs")
# If subsample is not equal to one, subsampling should be performed.
if subsample != 1.0:
# The full mask (inliers=True) is the inverse of the above masks and the provided mask.
full_mask = (~ref_mask & ~tba_mask & (np.asarray(inlier_mask) if inlier_mask is not None else True)).squeeze()
# If subsample is less than one, it is parsed as a fraction (e.g. 0.8 => retain 80% of the values)
if subsample < 1.0:
subsample = int(np.count_nonzero(full_mask) * (1 - subsample))
# Randomly pick N inliers in the full_mask where N=subsample
random_falses = np.random.choice(np.argwhere(full_mask.flatten()).squeeze(), int(subsample), replace=False)
# Convert the 1D indices to 2D indices
cols = (random_falses // full_mask.shape[0]).astype(int)
rows = random_falses % full_mask.shape[0]
# Set the N random inliers to be parsed as outliers instead.
full_mask[rows, cols] = False
# Run the associated fitting function
self._fit_func(ref_dem=ref_dem, tba_dem=tba_dem, transform=transform, weights=weights, verbose=verbose)
# Flag that the fitting function has been called.
self._fit_called = True
return self
@overload
def apply(self, dem: RasterType, transform: rio.transform.Affine | None) -> RasterType: ...
@overload
def apply(self, dem: np.ndarray, transform: rio.transform.Affine | None) -> np.ndarray: ...
@overload
def apply(self, dem: np.ma.masked_array, transform: rio.transform.Affine | None) -> np.ma.masked_array: ...
def apply(self, dem: np.ndarray | np.ma.masked_array | RasterType,
transform: rio.transform.Affine | None = None) -> RasterType | np.ndarray | np.ma.masked_array:
"""
Apply the estimated transform to a DEM.
:param dem: A DEM array or Raster to apply the transform on.
:param transform: The transform object of the DEM. Required if 'dem' is an array and not a Raster.
:returns: The transformed DEM.
"""
if not self._fit_called and self._meta.get("matrix") is None:
raise AssertionError(".fit() does not seem to have been called yet")
if isinstance(dem, gu.Raster):
if transform is None:
transform = dem.transform
else:
warnings.warn(f"DEM of type {type(dem)} overrides the given 'transform'")
else:
if transform is None:
raise ValueError("'transform' must be given if DEM is array-like.")
# The array to provide the functions will be an ndarray with NaNs for masked out areas.
dem_array, dem_mask = xdem.spatial_tools.get_array_and_mask(dem)
if np.all(dem_mask):
raise ValueError("'dem' had only NaNs")
# See if a _apply_func exists
try:
# Run the associated apply function
applied_dem = self._apply_func(dem_array, transform) # pylint: disable=assignment-from-no-return
# If it doesn't exist, use apply_matrix()
except NotImplementedError:
if self.is_affine: # This only works on it's affine, however.
# Apply the matrix around the centroid (if defined, otherwise just from the center).
applied_dem = apply_matrix(
dem_array,
transform=transform,
matrix=self.to_matrix(),
centroid=self._meta.get("centroid"),
dilate_mask=True
)
else:
raise ValueError("Coreg method is non-rigid but has no implemented _apply_func")
# If the DEM was a masked_array, copy the mask to the new DEM
if hasattr(dem, "mask"):
applied_dem = | np.ma.masked_array(applied_dem, mask=dem.mask) | numpy.ma.masked_array |
import cv2
import keras
from keras.datasets import mnist, cifar10
import numpy as np
def img_2_dct(images, input_size, rgb=True):
final_images = np.zeros((input_size[0], input_size[1], input_size[2]))
output_images = np.zeros((input_size[0], input_size[1], input_size[2]))
for i in range(len(images)):
if rgb:
final_images[i,:,:] = cv2.cvtColor(images[i,:,:],cv2.COLOR_RGB2GRAY)/255.0
else:
final_images[i,:,:] = images[i,:,:]/255.0
output_images[i,:,:] = cv2.dct(final_images[i,:,:])
return (final_images, output_images)
def load_dataset(data_string, flatten):
if data_string =='mnist':
(x_train_temp, _ ), (x_test_temp, _ ) = mnist.load_data()
train_shape = np.shape(x_train_temp)
test_shape = np.shape(x_test_temp)
#load the final mnist images inputs and ouputs(dcts)
(x_train, y_train) = img_2_dct(x_train_temp, train_shape, rgb= len(train_shape)>3)
(x_test, y_test) = img_2_dct(x_test_temp, test_shape, rgb= len(test_shape)>3)
if flatten == True:
x_train = np.reshape(x_train, [train_shape[0], -1])
y_train = np.reshape(y_train, [train_shape[0], -1])
x_test = np.reshape(x_test, [test_shape[0], -1])
y_test = np.reshape(y_test, [test_shape[0], -1])
elif data_string =='cifar10':
(x_train_temp, _ ), (y_train_temp, _) = cifar10.load_data()
train_shape = np.shape(x_train_temp)
test_shape = np.shape(x_test_temp)
#load the final cifar10 images inputs and ouputs(dcts)
(x_train, y_train) = img_2_dct(x_train_temp, train_shape, rgb= len(train_shape)>3)
(x_test, y_test) = img_2_dct(x_test_temp, test_shape, rgb= len(test_shape)>3)
if flatten == True:
x_train = np.reshape(x_train, [train_shape[0], -1])
y_train = np.reshape(y_train, [train_shape[0], -1])
x_test = np.reshape(x_test, [test_shape[0], -1])
y_test = | np.reshape(y_test, [test_shape[0], -1]) | numpy.reshape |
from sklearn.datasets import load_diabetes
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_absolute_error, mean_squared_error
import numpy as np
import matplotlib.pyplot as plt
dataset_diabetes = load_diabetes()
feature_names = dataset_diabetes['feature_names']
data = dataset_diabetes['data']
target = dataset_diabetes['target']
model = LinearRegression()
model.fit(data, target)
prediction = model.predict(data)
# for i, y_i in enumerate(target):
# print('Target', target[i], 'Prediction', prediction[i], 'Error', prediction[i] - target[i])
mse = np.mean((target - prediction) ** 2)
print('Mse Value: ' , mse)
mae = np.mean( | np.abs(target - prediction) | numpy.abs |
#!/usr/bin/env python3
import pytest
import numpy as np
np.seterr(all='raise')
import ss_generator as ssg
def test_transformations():
print("test transformations.")
mean_theta = np.radians(91.8)
std_theta = np.radians(3.35)
mean_tau = np.radians(49.5)
std_tau = np.radians(7.1)
coef = [-1, 0, 1]
for c1 in coef:
for c2 in coef:
theta = mean_theta + c1 * std_theta
tau = mean_tau + c2 * std_tau
axis, xi = ssg.geometry.rotation_matrix_to_axis_and_angle(
ssg.ca_tracing.alpha_helix.theta_tau_to_rotation_matrix(theta, tau))
c_theta, c_tau = ssg.ca_tracing.alpha_helix.axis_to_theta_tau(axis)
print("theta = {0:.2f}\ttau = {1:.2f}\txi = {2:.2f}\taxis = {3}\tc_theta = {4:.2f}\tc_tau = {5:.2f}".format(
np.degrees(theta), np.degrees(tau), np.degrees(xi), axis, np.degrees(c_theta), np.degrees(c_tau)))
def test_build_nexus():
print("test build nexus.")
theta = np.radians(91.8)
tau = np.radians(49.5)
axis = ssg.geometry.rotation_matrix_to_axis_and_angle(
ssg.ca_tracing.alpha_helix.theta_tau_to_rotation_matrix(theta, tau))[0]
c_theta, c_tau = ssg.ca_tracing.alpha_helix.theta_tau_for_nexus(axis, axis)
print("theta = {0:.2f}\ttau = {1:.2f}\taxis = {2}\tc_theta = {3:.2f}\tc_tau = {4:.2f}".format(
np.degrees(theta), np.degrees(tau), axis, | np.degrees(c_theta) | numpy.degrees |
"""
CompareHistograms - compare count vs a probability map
Arguments:
in_count_field: name of field containing histogram of event counts
in_prob_field: name of field containing probability map
out_field: field to contain diff/err
Input payload (alert):
in_prob_field: probability map
Consumes alert.
Input payload (report):
in_count_field: histogram of event counts
in_prob_field (optional): probability map
Output payload (report only):
out_field: diff/err map
"""
import logging
import numpy as np
from snewpdag.dag import Node
class CompareHistograms(Node):
def __init__(self, in_count_field, in_prob_field, out_field, **kwargs):
self.in_count_field = in_count_field
self.in_prob_field = in_prob_field
self.out_field = out_field
self.prob = np.array([]) # null array
super().__init__(**kwargs)
def alert(self, data):
logging.debug('{}: alert'.format(self.name))
# probability map may come in on alert or report
if self.in_prob_field in data:
logging.debug('{}: probability map registered'.format(self.name))
self.prob = np.array(data[self.in_prob_field], copy=True)
return False # count map will come with report
def report(self, data):
logging.debug('{}: report'.format(self.name))
# probability map may come in on alert or report
if self.in_prob_field in data:
self.prob = | np.array(data[self.in_prob_field], copy=True) | numpy.array |
"""
Classify cell identities using scNym
scnym_api() is the main API endpoint for users.
This function allows for training and prediction using scnym_train()
and scnym_predict(). Both of these functions will be infrequently
accessed by users.
scnym_tune() provides an API for hyperparameter tuning of scNym models
using reverse cross-validation.
get_pretrained_weights() is a wrapper function that downloads pretrained
weights from our cloud storage bucket.
atlas2target() downloads preprocessed reference datasets and concatenates
them onto a user supplied target dataset.
"""
from typing import Optional, Union, List
from anndata import AnnData
import scanpy as sc
import numpy as np
import pandas as pd
import torch
import os
import os.path as osp
import copy
import pickle
import warnings
import itertools
import pprint
# for fetching pretrained weights, all in standard lib
import requests
import json
import urllib
# for data splits
from sklearn.model_selection import StratifiedKFold
# from scnym
from . import utils
from . import model
from . import main
from . import predict
from . import dataprep
# Define constants
TEST_URL = 'https://storage.googleapis.com/calico-website-mca-storage/kang_2017_stim_pbmc.h5ad'
WEIGHTS_JSON = 'https://storage.googleapis.com/calico-website-scnym-storage/link_tables/pretrained_weights.json'
REFERENCE_JSON = 'https://storage.googleapis.com/calico-website-scnym-storage/link_tables/cell_atlas.json'
ATLAS_ANNOT_KEYS = {
'human': 'celltype',
'mouse': 'cell_ontology_class',
'rat': 'cell_ontology_class',
}
TASKS = (
'train',
'predict',
)
# Define configurations
CONFIGS = {
'default' : {
'n_epochs': 100,
'patience': 40,
'lr': 1.0,
'optimizer_name': 'adadelta',
'weight_decay': 1e-4,
'batch_size': 256,
'mixup_alpha': 0.3,
'unsup_max_weight': 1.,
'unsup_mean_teacher': False,
'ssl_method': 'mixmatch',
'ssl_kwargs': {
'augment_pseudolabels': False,
'augment': 'log1p_drop',
'unsup_criterion': 'mse',
'n_augmentations': 1,
'T': 0.5,
'ramp_epochs': 100,
'burn_in_epochs': 0,
'dan_criterion': True,
'dan_ramp_epochs': 20,
'dan_max_weight': 0.1,
'min_epochs': 20,
},
'model_kwargs' : {
'n_hidden': 256,
'n_layers': 2,
'init_dropout': 0.0,
'residual': False,
},
'tensorboard': False,
},
}
CONFIGS['no_new_identity'] = copy.deepcopy(CONFIGS['default'])
CONFIGS['no_new_identity']['description'] = (
'Train scNym models with MixMatch and a domain adversary, assuming no new cell types in the target data.'
)
CONFIGS['new_identity_discovery'] = copy.deepcopy(CONFIGS['default'])
CONFIGS['new_identity_discovery']['ssl_kwargs']['pseudolabel_min_confidence'] = 0.9
CONFIGS['new_identity_discovery']['ssl_kwargs']['dan_use_conf_pseudolabels'] = True
CONFIGS['new_identity_discovery']['description'] = (
'Train scNym models with MixMatch and a domain adversary, using pseudolabel thresholding to allow for new cell type discoveries.'
)
CONFIGS['no_dan'] = copy.deepcopy(CONFIGS['default'])
CONFIGS['no_dan']['ssl_kwargs']['dan_max_weight'] = 0.0
CONFIGS['no_dan']['ssl_kwargs']['dan_ramp_epochs'] = 1
CONFIGS['no_dan']['description'] = (
'Train scNym models with MixMatch but no domain adversary. May be useful if class imbalance is very large.'
)
CONFIGS['no_ssl'] = copy.deepcopy(CONFIGS['default'])
CONFIGS['no_ssl']['ssl_kwargs']['dan_max_weight'] = 0.0
CONFIGS['no_ssl']['ssl_kwargs']['dan_ramp_epochs'] = 1
CONFIGS['no_ssl']['ssl_kwargs']['unsup_max_weight'] = 0.0
CONFIGS['no_ssl']['description'] = (
'Train scNym models with MixMatch but no domain adversary. May be useful if class imbalance is very large.'
)
UNLABELED_TOKEN = '<PASSWORD>'
def scnym_api(
adata: AnnData,
task: str='train',
groupby: str=None,
domain_groupby: str=None,
out_path: str='./scnym_outputs',
trained_model: str=None,
config: Union[dict, str]='new_identity_discovery',
key_added: str='scNym',
copy: bool=False,
) -> Optional[AnnData]:
"""
scNym: Semi-supervised adversarial neural networks for
single cell classification [Kimmel2020]_.
scNym is a cell identity classifier that transfers annotations from one
single cell experiment to another. The model is implemented as a neural
network that employs MixMatch semi-supervision and a domain adversary to
take advantage of unlabeled data during training. scNym offers superior
performance to many baseline single cell identity classification methods.
Parameters
----------
adata
Annotated data matrix used for training or prediction.
If `"scNym_split"` in `.obs_keys()`, uses the cells annotated
`"train", "val"` to select data splits.
task
Task to perform, either "train" or "predict".
If "train", uses `adata` as labeled training data.
If "predict", uses `trained_model` to infer cell identities for
observations in `adata`.
groupby
Column in `adata.obs` that contains cell identity annotations.
Values of `"Unlabeled"` indicate that a given cell should be used
only as unlabeled data during training.
domain_groupby
Column in `adata.obs` that contains domain labels as integers.
Each domain of origin (e.g. batch, species) should be given a unique
domain label.
If `domain_groupby is None`, train and target data are each considered
a unique domain.
out_path
Path to a directory for saving scNym model weights and training logs.
trained_model
Path to the output directory of an scNym training run
or a string specifying a pretrained model.
If provided while `task == "train"`, used as an initialization.
config
Configuration name or dictionary of configuration of parameters.
Pre-defined configurations:
"new_identity_discovery" - Default. Employs pseudolabel thresholding to
allow for discovery of new cell identities in the target dataset using
scNym confidence scores.
"no_new_identity" - Assumes all cells in the target data belong to one
of the classes in the training data. Recommended to improve performance
when this assumption is valid.
key_added
Key added to `adata.obs` with scNym predictions if `task=="predict"`.
copy
copy the AnnData object before predicting cell types.
Returns
-------
Depending on `copy`, returns or updates `adata` with the following fields.
`X_scnym` : :class:`~numpy.ndarray`, (:attr:`~anndata.AnnData.obsm`, shape=(n_samples, n_hidden), dtype `float`)
scNym embedding coordinates of data.
`scNym` : (`adata.obs`, dtype `str`)
scNym cell identity predictions for each observation.
`scNym_train_results` : :class:`~dict`, (:attr:`~anndata.AnnData.uns`)
results of scNym model training.
Examples
--------
>>> import scanpy as sc
>>> from scnym.api import scnym_api, atlas2target
**Loading Data and preparing labels**
>>> adata = sc.datasets.kang17()
>>> target_bidx = adata.obs['stim']=='stim'
>>> adata.obs['cell'] = np.array(adata.obs['cell'])
>>> adata.obs.loc[target_bidx, 'cell'] = 'Unlabeled'
**Train an scNym model**
>>> scnym_api(
... adata=adata,
... task='train',
... groupby='clusters',
... out_path='./scnym_outputs',
... config='no_new_identity',
... )
**Predict cell identities with the trained scNym model**
>>> path_to_model = './scnym_outputs/'
>>> scnym_api(
... adata=adata,
... task='predict',
... groupby='scNym',
... trained_model=path_to_model,
... config='no_new_identity',
... )
**Perform semi-supervised training with an atlas**
>>> joint_adata = atlas2target(
... adata=adata,
... species='mouse',
... key_added='annotations',
... )
>>> scnym_api(
... adata=joint_adata,
... task='train',
... groupby='annotations',
... out_path='./scnym_outputs',
... config='no_new_identity',
... )
"""
if task not in TASKS:
msg = f'{task} is not a valid scNym task.\n'
msg += f'must be one of {TASKS}'
raise ValueError(msg)
# check configuration arguments and choose a config
if type(config) == str:
if config not in CONFIGS.keys():
msg = f'{config} is not a predefined configuration.\n'
msg += f'must be one of {CONFIGS.keys()}.'
raise ValueError(msg)
else:
config = CONFIGS[config]
elif type(config) != dict:
msg = f'`config` was a {type(config)}, must be dict or str.'
raise TypeError(msg)
else:
# config is a dictionary of parameters
# add or update default parameters based on these
dconf = CONFIGS['default']
for k in config.keys():
dconf[k] = config[k]
config = dconf
# check for CUDA
if torch.cuda.is_available():
print('CUDA compute device found.')
else:
print('No CUDA device found.')
print('Computations will be performed on the CPU.')
print('Add a CUDA compute device to improve speed dramatically.\n')
if not osp.exists(out_path):
os.makedirs(out_path, exist_ok=True)
# add args to `config`
config['out_path'] = out_path
config['groupby'] = groupby
config['key_added'] = key_added
config['trained_model'] = trained_model
config['domain_groupby'] = domain_groupby
################################################
# check that there are no duplicate genes in the input object
################################################
n_genes = adata.shape[1]
n_unique_genes = len(np.unique(adata.var_names))
if n_genes != n_unique_genes:
msg = 'Duplicate Genes Error\n'
msg += 'Not all genes passed to scNym were unique.\n'
msg += f'{n_genes} genes are present but only {n_unique_genes} unique genes were detected.\n'
msg += 'Please use unique gene names in your input object.\n'
msg += 'This can be achieved by running `adata.var_names_make_unique()`'
raise ValueError(msg)
################################################
# check that `adata.X` are log1p(CPM) counts
################################################
# we can't directly check if cells were normalized to CPM because
# users may have filtered out genes *a priori*, so the cell sum
# may no longer be ~= 1e6.
# however, we can check that our assumptions about log normalization
# are true.
# check that the min/max are within log1p(CPM) range
x_max = np.max(adata.X) > np.log1p(1e6)
x_min = np.min(adata.X) < 0.
# check to see if a user accidently provided raw counts
if type(adata.X) == np.ndarray:
int_counts = np.all(np.equal( | np.mod(adata.X, 1) | numpy.mod |
import numpy as np
from matplotlib import pyplot as plt
plt.style.use("../template.mplstyle")
# purple - green - darkgoldenrod - blue - red
colors = ['purple', '#306B37', 'darkgoldenrod', '#3F7BB6', '#BF4145', "#cf630a"]
linestyles = [(0, (1,1.05)), (0, (3, 1, 1, 1)), (0, (1,3)), (0, (3,3.65)), (0, (3,2.772)), (0, (3, 1, 1, 1, 1, 1))]
#########################################################################################
h = 0.6732
h_theory = 0.6736
TT = np.loadtxt("measurements_TT.txt")
EE = | np.loadtxt("measurements_EE.txt") | numpy.loadtxt |
import numpy as np
class Real():
def __init__(self, value: float = 0):
self.value = np.array([value], dtype=float)
def __add__(self, rhs):
out = Real()
if isinstance(rhs, Real):
out.value = self.value + rhs.value
else:
out.value = self.value + rhs
return out
def __radd__(self, lhs):
out = Real()
if isinstance(lhs, Real):
out.value = lhs.values + self.value
else:
out.value = lhs + self.value
return out
def __sub__(self, rhs):
out = Real()
if isinstance(rhs, Real):
out.value = self.value - rhs.value
else:
out.value = self.value - rhs
return out
def __rsub__(self, lhs):
out = Real()
if isinstance(lhs, Real):
out.value = lhs.value - self.value
else:
out.value = lhs - self.value
return out
def __mul__(self, rhs):
out = Real()
if isinstance(rhs, (Real, Complex, RealMatrix, ComplexMatrix)):
out.value = self.value*rhs.value
elif isinstance(rhs, (float, int, complex)):
out.value = self.value*rhs
return out
def __rmul__(self, lhs):
out = Real()
if isinstance(lhs, (Real, Complex, RealMatrix, ComplexMatrix)):
out.value = lhs.value*self.value
elif isinstance(lhs, (float, int, complex)):
out.value = lhs*self.value
return out
def __pow__(self, n):
out = Real()
if isinstance(n, (float, int)):
out.value = self.value**n
else:
out.value = self.value**n.value
return out
class Complex(Real):
def __init__(self, value: complex = 1j):
super().__init__()
self.value = np.array([value], dtype=complex)
def re(self):
out = Real()
out.value = np.real(self.value)
return out
def im(self):
out = Real()
out.value = np.imag(self.value)
return out
def conj(self):
out = Complex()
out.value = np.conj(self.value)
return out
class RealMatrix():
def __init__(self, N: int = None, value: np.ndarray = None):
if N != None:
self.N = N
self.value = np.zeros((N, N), dtype=float)
else:
self.N = len(value)
self.value = value
def transpose(self):
out = RealMatrix(self.N)
out.value = np.transpose(self.value)
return out
def trace(self):
tr = np.trace(self.value)
return Real(tr)
def det(self):
d = np.linalg.det(self.value)
return Real(d)
def inv(self):
out = RealMatrix(self.N)
out.value = np.linalg.inv(self.value)
return out
def __add__(self, rhs):
if isinstance(rhs, RealMatrix):
out = RealMatrix(self.N)
elif isinstance(rhs, ComplexMatrix):
out = ComplexMatrix(self.N)
assert(self.value.shape == rhs.value.shape)
out.value = self.value + rhs.value
return out
def __radd__(self, lhs):
if isinstance(lhs, RealMatrix):
out = RealMatrix(self.N)
if isinstance(lhs, ComplexMatrix):
out = ComplexMatrix(self.N)
assert(self.value.shape == lhs.value.shape)
out.value = self.value + lhs.value
return out
def __sub__(self, rhs):
if isinstance(rhs, RealMatrix):
out = RealMatrix(self.N)
if isinstance(rhs, ComplexMatrix):
out = ComplexMatrix(self.N)
assert(self.value.shape == rhs.value.shape)
out.value = self.value - rhs.value
return out
def __rsub__(self, lhs):
if isinstance(lhs, RealMatrix):
out = RealMatrix(self.N)
if isinstance(lhs, ComplexMatrix):
out = ComplexMatrix(self.N)
assert(self.value.shape == lhs.value.shape)
out.value = lhs.value - self.value
return out
def __mul__(self, rhs):
if isinstance(rhs, RealMatrix):
out = RealMatrix(self.N)
assert(self.value.shape[1] == rhs.value.shape[0])
out.value = np.dot(self.value, rhs.value)
elif isinstance(rhs, Real):
out = RealMatrix(self.N)
out.value = self.value*rhs.value
elif isinstance(rhs, Complex):
out = ComplexMatrix(self.N)
out.value = self.value*rhs.value
elif isinstance(rhs, VectorComplex):
out = VectorComplex(Nd=self.N)
assert(self.value.shape[1] == rhs.value.shape[0])
out.value = np.dot(self.value, rhs.value)
elif isinstance(rhs, VectorReal):
out = VectorReal(Nd=self.N)
assert(self.value.shape[1] == rhs.value.shape[0])
out.value = np.dot(self.value, rhs.value)
return out
class Identity(RealMatrix):
def __init__(self, N: int):
super().__init__(N)
self.value = np.diag([1]*self.N)
class ComplexMatrix(RealMatrix):
def __init__(self, N: int = None, value: np.ndarray = None):
if N != None:
self.N = N
self.value = np.zeros((N, N), dtype=complex)
else:
self.N = len(value)
self.value = value
def transpose(self):
out = ComplexMatrix(self.N)
out.value = np.transpose(self.value)
return out
def conj(self):
out = ComplexMatrix(self.N)
out.value = np.conj(self.value)
return out
def adj(self):
tmp = ComplexMatrix(self.N)
tmp = self.conj()
return tmp.transpose()
def re(self):
out = RealMatrix(self.N)
out.value = np.real(self.value)
return out
def im(self):
out = RealMatrix(self.N)
out.value = np.imag(self.value)
return out
def trace(self):
tr = np.trace(self.value)
return Complex(tr)
def det(self):
d = np.linalg.det(self.value)
return Complex(d)
def inv(self):
out = ComplexMatrix(self.N)
out.value = np.linalg.inv(self.value)
return out
def __add__(self, rhs):
out = ComplexMatrix(self.N)
if isinstance(rhs, (RealMatrix, ComplexMatrix)):
assert(self.value.shape == rhs.value.shape)
out.value = self.value + rhs.value
return out
def __radd__(self, lhs):
out = ComplexMatrix(self.N)
if isinstance(lhs, (RealMatrix, ComplexMatrix)):
assert(self.value.shape == lhs.value.shape)
out.value = self.value + lhs.value
return out
def __sub__(self, rhs):
out = ComplexMatrix(self.N)
if isinstance(rhs, (RealMatrix, ComplexMatrix)):
assert(self.value.shape == rhs.value.shape)
out.value = self.value - rhs.value
return out
def __rsub__(self, lhs):
out = ComplexMatrix(self.N)
if isinstance(lhs, (RealMatrix, ComplexMatrix)):
assert(self.value.shape == lhs.value.shape)
out.value = lhs.value - self.value
return out
def __mul__(self, rhs):
if isinstance(rhs, RealMatrix):
out = RealMatrix(self.N)
assert(self.value.shape[1] == rhs.value.shape[0])
out.value = np.dot(self.value, rhs.value)
elif isinstance(rhs, (Complex, Real)):
out = RealMatrix(self.N)
out.value = self.value*rhs.value
elif isinstance(rhs, VectorComplex):
out = VectorComplex(Nd=self.N)
assert(self.value.shape[1] == rhs.value.shape[0])
out.value = np.dot(self.value, rhs.value)
return out
class VectorReal():
def __init__(self, Nd: int = None, value: np.ndarray = None):
if Nd != None:
self.Nd = Nd
self.value = np.array([0.]*self.Nd, dtype=float)
else:
self.Nd = len(value)
self.value = value
def __getitem__(self, mu: int):
return Real(self.value[mu])
def poke_component(self, mu: int, m):
if isinstance(m, Real):
self.value[mu] = m.value
elif isinstance(m, (int, float)):
self.value[mu] = m
def __add__(self, rhs):
out = VectorReal(Nd=self.Nd)
if isinstance(rhs, VectorReal):
assert(self.value.shape == rhs.value.shape)
out.value = self.value + rhs.value
elif isinstance(rhs, Real):
out.value = self.value + rhs.value
return out
def __radd__(self, lhs):
out = VectorReal(Nd=self.Nd)
if isinstance(lhs, VectorReal):
assert(self.value.shape == lhs.value.shape)
out.value = self.value + lhs.value
elif isinstance(lhs, Real):
out.value = self.value + lhs.value
return out
def __sub__(self, rhs):
out = VectorReal(Nd=self.Nd)
if isinstance(rhs, VectorReal):
assert(self.value.shape == rhs.value.shape)
out.value = self.value - rhs.value
elif isinstance(rhs, Real):
out.value = self.value - rhs.value
return out
def __rsub__(self, lhs):
out = VectorReal(Nd=self.Nd)
if isinstance(lhs, VectorReal):
assert(self.value.shape == lhs.value.shape)
out.value = lhs.value - self.value
elif isinstance(lhs, Real):
out.value = lhs.value - self.value
return out
def __mul__(self, rhs):
out = VectorReal(Nd=self.Nd)
if isinstance(rhs, VectorReal):
assert(self.value.shape == rhs.value.shape)
out.value = self.value * rhs.value
elif isinstance(rhs, Real):
out.value = self.value * rhs.value
return out
def dot(self, rhs):
out = VectorReal(Nd=self.Nd)
if isinstance(rhs, VectorReal):
assert(self.value.shape == rhs.value.shape)
out.value = np.dot(self.value, rhs.value)
elif isinstance(rhs, Real):
out.value = self.value*rhs.value
return out
def transpose(self):
out = VectorReal(Nd=self.Nd)
out.value = self.value[:]
return out
class VectorComplex():
def __init__(self, Nd: int = None, value: np.ndarray = None):
if Nd != None:
self.Nd = Nd
self.value = np.array([1j]*self.Nd, dtype=complex)
else:
self.Nd = len(value)
self.value = value
def __getitem__(self, mu: int):
return Complex(self.value[mu])
def poke_component(self, mu: int, m):
if isinstance(m, Complex):
self.value[mu] = m.value
elif isinstance(m, (int, float)):
self.value[mu] = m
def __add__(self, rhs):
out = VectorComplex(Nd=self.Nd)
if isinstance(rhs, VectorComplex):
assert(self.value.shape == rhs.value.shape)
out.value = self.value + rhs.value
elif isinstance(rhs, (Real, Complex)):
out.value = self.value + rhs.value
return out
def __radd__(self, lhs):
out = VectorComplex(Nd=self.Nd)
if isinstance(lhs, VectorComplex):
assert(self.value.shape == lhs.value.shape)
out.value = self.value + lhs.value
elif isinstance(lhs, (Real, Complex)):
out.value = self.value + lhs.value
return out
def __sub__(self, rhs):
out = VectorComplex(Nd=self.Nd)
if isinstance(rhs, VectorComplex):
assert(self.value.shape == rhs.value.shape)
out.value = self.value - rhs.value
elif isinstance(rhs, (Real, Complex)):
out.value = self.value - rhs.value
return out
def __rsub__(self, lhs):
out = VectorComplex(Nd=self.Nd)
if isinstance(lhs, VectorComplex):
assert(self.value.shape == lhs.value.shape)
out.value = lhs.value - self.value
elif isinstance(lhs, (Real, Complex)):
out.value = lhs.value - self.value
return out
def __mul__(self, rhs):
out = VectorComplex(Nd=self.Nd)
if isinstance(rhs, VectorComplex):
assert(self.value.shape == rhs.value.shape)
out.value = self.value * rhs.value
elif isinstance(rhs, (Real, Complex)):
out.value = self.value * rhs.value
return out
def dot(self, rhs):
out = VectorComplex(Nd=self.Nd)
if isinstance(rhs, VectorComplex):
assert(self.value.shape == rhs.value.shape)
out.value = np.dot(self.value, rhs.value)
elif isinstance(rhs, (Real, Complex)):
out.value = self.value*rhs.value
return out
def transpose(self):
out = VectorComplex(Nd=self.Nd)
out.value = self.value[:]
return out
class VectorRealMatrix():
def __init__(self, Nd: int = None, N: int = None, value: np.ndarray = None):
self.Nd = Nd
self.N = N
if N != None and Nd != None:
self.value = np.zeros(shape=(Nd, N, N), dtype=float)
else:
self.value = value
self.Nd = value.shape[0]
self.N = value.shape[1]
def __getitem__(self, mu: int):
out = RealMatrix(N=self.N)
out.value = self.value[mu]
return out
def poke_component(self, mu: int, m):
if isinstance(m, RealMatrix):
self.value[mu] = m.value
elif isinstance(m, np.ndarray):
self.value[mu] = m
def __add__(self, rhs):
out = VectorRealMatrix(Nd=self.Nd, N=self.N)
if isinstance(rhs, VectorRealMatrix):
assert(self.value.shape == rhs.value.shape)
for mu in range(self.Nd):
out.value[mu] = self.value[mu] + rhs.value[mu]
elif isinstance(rhs, RealMatrix):
for mu in range(self.Nd):
out.value[mu] = self.value[mu] + rhs.value
elif isinstance(rhs, Real):
out.value = self.value + rhs.value
elif isinstance(rhs, (int, float)):
out.value = self.value + rhs
return out
def __radd__(self, lhs):
out = VectorRealMatrix(Nd=self.Nd, N=self.N)
if isinstance(lhs, VectorRealMatrix):
assert(self.value.shape == lhs.value.shape)
for mu in range(self.Nd):
out.value[mu] = self.value[mu] + lhs.value[mu]
elif isinstance(lhs, RealMatrix):
for mu in range(self.Nd):
out.value[mu] = self.value[mu] + lhs.value
elif isinstance(lhs, Real):
out.value = self.value + lhs.value
elif isinstance(lhs, (float, int)):
out.value = self.value + lhs
return out
def __sub__(self, rhs):
out = VectorRealMatrix(Nd=self.Nd, N=self.N)
if isinstance(rhs, VectorRealMatrix):
assert(self.value.shape == rhs.value.shape)
for mu in range(self.Nd):
out.value[mu] = self.value[mu] - rhs.value[mu]
elif isinstance(rhs, RealMatrix):
for mu in range(self.Nd):
out.value[mu] = self.value[mu] - rhs.value
elif isinstance(rhs, Real):
out.value = self.value - rhs.value
elif isinstance(rhs, (int, float)):
out.value = self.value - rhs
return out
def __rsub__(self, lhs):
out = VectorRealMatrix(Nd=self.Nd, N=self.N)
if isinstance(lhs, VectorRealMatrix):
assert(self.value.shape == lhs.value.shape)
for mu in range(self.Nd):
out.value[mu] = lhs.value[mu] - self.value[mu]
if isinstance(lhs, RealMatrix):
for mu in range(self.Nd):
out.value[mu] = lhs.value - self.value[mu]
elif isinstance(lhs, Real):
out.value = lhs.value - self.value
elif isinstance(lhs, (float, int)):
out.value = lhs - self.value
return out
def __mul__(self, rhs):
out = VectorRealMatrix(Nd=self.Nd, N=self.N)
if isinstance(rhs, VectorRealMatrix):
assert(self.value.shape == rhs.value.shape)
for mu in range(self.Nd):
out.value[mu] = np.dot(self.value[mu], rhs.value[mu])
elif isinstance(rhs, RealMatrix):
for mu in range(self.Nd):
out.value[mu] = np.dot(self.value[mu], rhs.value)
elif isinstance(rhs, Real):
out.value = self.value * rhs.value
elif isinstance(rhs, (float, int)):
out.value = self.value * rhs
return out
def transpose(self):
out = VectorRealMatrix(Nd=self.Nd, N=self.N)
for i in range(self.Nd):
out.value[i] = np.transpose(self.value[i, :, :])
return out
def trace(self):
out = VectorReal(Nd=self.Nd)
for i in range(self.Nd):
out.value[i] = np.trace(self.value[i, :, :])
return out
def det(self):
out = VectorReal(Nd=self.Nd)
for i in range(self.Nd):
out.value[i] = np.linalg.det(self.value[i, :, :])
return out
def inv(self):
out = VectorRealMatrix(Nd=self.Nd, N=self.N)
for i in range(self.Nd):
out.value[i] = | np.linalg.inv(self.value[i, :, :]) | numpy.linalg.inv |
from ..panels import boxPanel
import numpy as np
NAME = 'Threshold'
DESCRIPTION = 'Identify the CP by thresholding it'
DOI = ''
import numpy as np
class CP(boxPanel): # Threshold
def create(self):
self.addParameter('Athreshold','float','Align Threshold [nN]',10.0)
self.addParameter('deltaX','float','Align left step [nm]',2000.0)
self.addParameter('Fthreshold','float','AVG area [nm]',100.0)
self.addParameter('shift','float','shift CP [nm]',0)
def calculate(self, x,y):
yth = self.getValue('Athreshold')*1e-9
if yth > np.max(y) or yth < np.min(y):
return False
jrov = 0
for j in range(len(y)-1, 1, -1):
if y[j] > yth and y[j-1] < yth:
jrov = j
break
if jrov==0 or jrov==len(y)-1:
return False
x0 = x[jrov]
dx = self.getValue('deltaX')*1e-9
ddx = self.getValue('Fthreshold')*1e-9
if ddx <= 0:
jxalign = | np.argmin((x - (x0 - dx)) ** 2) | numpy.argmin |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Tue Jul 27 08:44:52 2021
@author: gianni
"""
from scipy import constants,optimize
import numpy as np
import matplotlib.pyplot as plt
import os
from astropy.io import fits
import h5py
this_folder = os.path.dirname(os.path.abspath(__file__))
R_Sun = 6.955e8
L_Sun = 3.828e26
Rydberg_J = constants.physical_constants['Rydberg constant times hc in J'][0] #J
ionisation_potential = {'C':11.26030*constants.eV, 'O':13.61806*constants.eV} #J
class RadiationSpectrum():
def flux(self,wavelength,**kwargs):
#W/m2/m
raise NotImplementedError
class DraineISF(RadiationSpectrum):
#interstellar radiation field, original from Draine (1978),
#here in the form of Lee (1984)
#(https://ui.adsabs.harvard.edu/abs/1984ApJ...282..172L/abstract)
lambda_min = 91.2*constants.nano
lambda_max = 200*constants.nano
lambda_grid = np.linspace(lambda_min,lambda_max,1000)
def __init__(self,scaling=(lambda wavelength: 1)):
self.scaling = scaling
def flux(self,wavelength):
#for the power law, the wavelenght has to be in nm
#photons/m2/s/m:
photon_flux= 3.2e13*((wavelength/constants.nano)**-3\
- 1.61e2*(wavelength/constants.nano)**-4\
+ 6.41e3*(wavelength/constants.nano)**-5)\
* constants.centi**-2*constants.nano**-1
photon_energy = constants.h*constants.c/wavelength
flux = photon_flux*photon_energy
valid_region = (wavelength>=self.lambda_min) & (wavelength<=self.lambda_max)
flux = np.where(valid_region,flux,0)
return flux*self.scaling(wavelength=wavelength)
class HabingField(RadiationSpectrum):
def __init__(self,scaling=(lambda wavelength: 1)):
self.scaling = scaling
data_filepath = os.path.join(this_folder,'habing_field.txt')
data = np.loadtxt(data_filepath)
self.lambda_grid = data[:,0]*constants.nano
photon_energy = constants.h*constants.c/self.lambda_grid
self.flux_grid = data[:,1]/constants.centi**2/constants.nano * photon_energy #W/m2/m
def flux(self,wavelength):
return np.interp(x=wavelength,xp=self.lambda_grid,fp=self.flux_grid,
left=0,right=0) * self.scaling(wavelength=wavelength)
class StellarAtmosphere(RadiationSpectrum):
def plot_model(self,label=None):
fig,ax = plt.subplots()
ax.plot(self.lambda_grid/constants.nano,self.modelflux,'.-',label=label)
ax.set_xscale('log')
ax.set_yscale('log')
ax.set_xlabel('lambda [nm]')
ax.set_ylabel('flux at {:g} au [W/m2/m]'.format(self.ref_distance/constants.au))
if label is not None:
ax.legend(loc='best')
return ax
def flux(self,wavelength,distance):
return np.interp(wavelength,self.lambda_grid,self.modelflux,left=0,right=0)\
* (self.ref_distance/distance)**2
def luminosity(self):
flux_at_ref_distance = self.flux(wavelength=self.lambda_grid,
distance=self.ref_distance)
return np.trapz(flux_at_ref_distance,self.lambda_grid)\
* 4*np.pi*self.ref_distance**2
def _scale_spectrum(self,scaling):
self.modelflux *= scaling(wavelength=self.lambda_grid)
def write_modelflux_to_file(self,filepath,distance):
flux = self.flux(wavelength=self.lambda_grid,distance=distance)
np.savez(filepath,wavelength=self.lambda_grid,flux=flux)
class ATLASModelAtmosphere(StellarAtmosphere):
Teff_low_grid = np.arange(3000,12999,250)
Teff_high_grid = np.arange(13000,50001,1000)
Teff_grid = np.concatenate((Teff_low_grid,Teff_high_grid))
metallicity_grid = np.array((-2.5,-2.0,-1.5,-1.0,-0.5,0.0,0.2,0.5))
logg_grid = np.arange(0,5.1,0.5)
model_folder = os.path.join(this_folder,'ck04models')
max_RJ_wavelength = 3*constants.milli
@staticmethod
def assert_within_grid(value,grid):
assert np.min(grid) <= value <= np.max(grid)
@staticmethod
def get_closest_grid_value(value,grid):
index = np.argmin(np.abs(grid-value))
return grid[index]
def __init__(self,Teff,metallicity,logg,Rstar=None,obs_luminosity=None,
calibration_spec=None,verbose=False,scaling=None):
'''There are three ways to set the luminosity of the star:
1) define Rstar
2) define obs_luminosity, so that the model flux will be scaled
3) define calibration_spec (i.e. a spectrum, to which the model spectrum
will be scaled to)'''
self.assert_within_grid(value=Teff,grid=self.Teff_grid)
self.assert_within_grid(value=metallicity,grid=self.metallicity_grid)
self.assert_within_grid(value=logg,grid=self.logg_grid)
self.verbose = verbose
self.read_model(metallicity=metallicity,Teff=Teff,logg=logg)
self.extrapolate_RJ()
if Rstar is not None:
assert obs_luminosity is None and calibration_spec is None
self.ref_distance = Rstar
elif obs_luminosity is not None:
assert Rstar is None and calibration_spec is None
self.obs_luminosity = obs_luminosity
if self.verbose:
print('Rstar not specified, going to scale with luminosity')
self.calibrate_with_luminosity()
elif calibration_spec is not None:
assert Rstar is None and obs_luminosity is None
self.calibration_spec = calibration_spec
if self.verbose:
print('going to calibrate with provided spectrum')
self.calibrate_with_spectrum()
else:
raise ValueError('unable to define absolute flux and/or reference distance')
#now that modelflux is calibrated, I can apply the scaling:
if scaling is not None:
self._scale_spectrum(scaling=scaling)
def read_model(self,metallicity,Teff,logg):
self.metallicity = self.get_closest_grid_value(
value=metallicity,grid=self.metallicity_grid)
self.Teff = self.get_closest_grid_value(value=Teff,grid=self.Teff_grid)
self.logg = self.get_closest_grid_value(value=logg,grid=self.logg_grid)
if self.verbose:
print('input metallicity = {:g}, grid metallicity = {:g}'\
.format(metallicity,self.metallicity))
print('input Teff = {:g} K, grid Teff = {:g} K'.format(Teff,self.Teff))
print('input logg = {:g}, grid logg = {:g}'.format(logg,self.logg))
self.metallicity_str = 'ck'
if self.metallicity < 0:
sign_str = 'm'
else:
sign_str = 'p'
self.metallicity_str += '{:s}{:02d}'.format(
sign_str,np.abs(int(10*self.metallicity)))
if self.verbose:
print('metallicity ID: {:s}'.format(self.metallicity_str))
#this string is the key to access the flux for the specified log(g);
#for example for log(g)=4, it would be "g40"; for log(g)=4.5 it would be "g45":
logg_string = ('g%.1f'%self.logg).replace('.','')
filename = self.metallicity_str+'_{:d}.fits'.format(int(self.Teff))
if self.verbose:
print('filename: {:s}'.format(filename))
filepath = os.path.join(self.model_folder,self.metallicity_str,filename)
hdulist = fits.open(filepath)
modeldata = hdulist[1].data
hdulist.close()
self.lambda_grid = modeldata['WAVELENGTH'].astype(np.float64)*constants.angstrom
#flux in [W/m2/m] at the stellar surface:
self.modelflux = modeldata[logg_string].astype(np.float64)\
*constants.erg/constants.centi**2/constants.angstrom
def extrapolate_RJ(self):
max_wavelength = self.lambda_grid[-1]
prop_constant = max_wavelength**4*self.modelflux[-1]
RJ_wavelength = np.logspace(np.log10(max_wavelength*1.05),
np.log10(self.max_RJ_wavelength),100)
RJ_flux = prop_constant/RJ_wavelength**4
self.original_lambda_grid = self.lambda_grid.copy()
self.lambda_grid = np.concatenate((self.lambda_grid,RJ_wavelength))
self.modelflux = np.concatenate((self.modelflux,RJ_flux))
def calibrate_with_luminosity(self):
self.ref_distance = 1*constants.au
uncalibrated_luminosity = self.luminosity()
self.modelflux *= self.obs_luminosity/uncalibrated_luminosity
assert np.isclose(self.obs_luminosity,self.luminosity(),rtol=1e-6,atol=0)
def calibrate_with_spectrum(self):
cal_wave = self.calibration_spec['wave']
cal_flux = self.calibration_spec['flux']
self.ref_distance = self.calibration_spec['ref_distance']
try:
cal_errors = self.calibration_spec['error']
except KeyError:
cal_errors = np.ones_like(cal_flux)
def residual2(scaling):
flux = self.flux(wavelength=cal_wave,
distance=self.calibration_spec['ref_distance'])
scaled_model_flux = scaling*flux
res = cal_flux-scaled_model_flux
return np.sum(res**2/cal_errors**2)
x0 = 1
optimisation = optimize.minimize(residual2,x0,method='Nelder-Mead')
assert optimisation.success
self.spec_calibration_scaling = optimisation.x[0]
if self.verbose:
print('optimal calibration scaling: {:g}'.format(
self.spec_calibration_scaling))
self.modelflux *= self.spec_calibration_scaling
def plot_model(self,title=None):
ax = StellarAtmosphere.plot_model(self,label='final flux')
if title is not None:
ax.set_title(title)
if hasattr(self,'calibration_scaling'):
ax.plot(self.lambda_grid/constants.nano,self.modelflux,'.-',
label='before calibration')
plot_cal_flux = self.calibration['flux']\
*(self.calibration['ref_distance']/self.ref_distance)**2
ax.plot(self.calibration['wave']/constants.nano,plot_cal_flux,
label='calibration')
for lamb,lab in zip((self.original_lambda_grid[-1],self.max_RJ_wavelength),
('RJ region',None)):
ax.axvline(lamb/constants.nano,color='black',linestyle='dashed',label=lab)
ax.legend(loc='best')
return ax
class betaPicObsSpectrum(StellarAtmosphere):
#from Alexis email
this_folder = os.path.dirname(os.path.abspath(__file__))
model_filepath = os.path.join(this_folder,'bPicNormFlux1AU.txt')
cutoff_flux = 15832
max_cutoff_wavelength = 1*constants.micro
def __init__(self,dilution=1,scaling=None):
self.ref_distance = 1*constants.au
model_data = np.loadtxt(self.model_filepath)
data_wave = model_data[:,0]*constants.angstrom
data_flux = 10**model_data[:,1]*constants.erg/constants.centi**2\
/constants.angstrom #W/m2/m
self.min_betaPic_data_wave = np.min(data_wave)
self.max_betaPic_data_wave = np.max(data_wave)
betaPic_ATLAS_atm = ATLASModelAtmosphere(Teff=8052,metallicity=0.05,logg=4.15,
obs_luminosity=8.7*L_Sun)
left_ATLAS_region = betaPic_ATLAS_atm.lambda_grid < self.min_betaPic_data_wave
left_ATLAS_wave = betaPic_ATLAS_atm.lambda_grid[left_ATLAS_region]
right_ATLAS_region = betaPic_ATLAS_atm.lambda_grid > self.max_betaPic_data_wave
right_ATLAS_wave = betaPic_ATLAS_atm.lambda_grid[right_ATLAS_region]
self.lambda_grid = np.concatenate((left_ATLAS_wave,data_wave,right_ATLAS_wave))
left_ATLAS_flux = betaPic_ATLAS_atm.flux(wavelength=left_ATLAS_wave,
distance=self.ref_distance)
right_ATLAS_flux = betaPic_ATLAS_atm.flux(wavelength=right_ATLAS_wave,
distance=self.ref_distance)
self.modelflux = np.concatenate((left_ATLAS_flux,data_flux,right_ATLAS_flux))
#apply dilution:
self.lambda_grid = self.lambda_grid[::dilution]
self.modelflux = self.modelflux[::dilution]
#the original spectrum has some unphysical plateaus, so just put those to 0
#(although to be honest, who knows if that's better...)
cutoff_region = (self.modelflux<self.cutoff_flux)\
& (self.lambda_grid<self.max_cutoff_wavelength)
self.modelflux[cutoff_region] = 0
if scaling is not None:
self._scale_spectrum(scaling=scaling)
def plot_model(self,title=None):
ax = StellarAtmosphere.plot_model(self,label='final beta Pic flux')
if title is not None:
ax.set_title(title)
for lamb,lab in zip((self.min_betaPic_data_wave,self.max_betaPic_data_wave),
('beta Pic data region',None)):
ax.axvline(lamb/constants.nano,linestyle='dashed',color='red',label=lab)
ax.legend(loc='best')
return ax
class CrossSection():
def crosssection(self,wavelength):
raise NotImplementedError
def plot(self,wavelength=None,title=None):
lamb = self.lambda_grid if wavelength is None else wavelength
fig,ax = plt.subplots()
if title is not None:
ax.set_title(title)
ax.plot(lamb/constants.micro,self.crosssection(lamb))
ax.set_xlabel('wavelength [um]')
ax.set_ylabel('cs [m2]')
ax.set_xscale('log')
ax.set_yscale('log')
return ax
class PhotodissociationCrossSection(CrossSection):
data_folderpath = os.path.join(this_folder,'crosssections')
def __init__(self,filename):
data = h5py.File(os.path.join(self.data_folderpath,filename),'r')
self.lambda_grid = data['wavelength'][()] * constants.nano
self.data_cs = data['photodissociation'][()] *constants.centi**2
self.min_data_wavelength = np.min(self.lambda_grid)
self.max_data_wavelength = np.max(self.lambda_grid)
def crosssection(self,wavelength):
return np.interp(wavelength,self.lambda_grid,self.data_cs,left=0,
right=0)
Osterbrock_a_T = {'C':12.2e-18*constants.centi**2,'O':2.94e-18*constants.centi**2}
Osterbrock_s = {'C':2,'O':1}
Osterbrock_beta = {'C':3.32,'O':2.66}
Osterbrock_alpha_R = {'C':4.66e-13*constants.centi**3,'O':3.31e-13*constants.centi**3}#m3/s
Osterbrock_f = {'C':0.5,'O':0.26}
Osterbrock_alpha_d = {'C':np.array([1.91e-13,1.84e-14,1.63e-13])*constants.centi**3,
'O':np.array([7.35e-14,7.62e-14,7.74e-14])*constants.centi**3}
Osterbrock_alpha_d_T = np.array([7500,10000,15000])
class OsterbrockIonisationCrossSection(CrossSection):
def __init__(self,element):
self.element = element
self.a_T = Osterbrock_a_T[self.element]
self.s = Osterbrock_s[self.element]
self.beta = Osterbrock_beta[self.element]
self.nu_T = ionisation_potential[self.element]/constants.h #Hz
def crosssection(self,wavelength):
nu = constants.c/wavelength
cs = self.a_T * (self.beta*(nu/self.nu_T)**-self.s
+ (1-self.beta)*(nu/self.nu_T)**(-self.s-1))
return np.where(nu>=self.nu_T,cs,0) #m2
class OsterbrockRecombination():
def __init__(self,element):
self.element = element
self.alpha_R = Osterbrock_alpha_R[self.element]
self.f = Osterbrock_f[self.element]
self.alpha_d = Osterbrock_alpha_d[self.element]
self.min_T,self.max_T = np.min(Osterbrock_alpha_d_T),np.max(Osterbrock_alpha_d_T)
@staticmethod
def phi(T):
return np.interp(T,[5000,7500,10000,15000,20000],[1.317,1.13,1,.839,.732])
def interpolated_alpha_d(self,T):
assert np.all(self.min_T <= T) and np.all(T <= self.max_T),\
'requested temperature out of interpolation range'
return np.interp(T,Osterbrock_alpha_d_T,self.alpha_d)
def recombination_coeff(self,T):
return self.alpha_R*np.sqrt(10000./T)*(self.f+(1-self.f)*self.phi(T))\
+ self.interpolated_alpha_d(T) #m3/s
Nahar_data_folderpath = os.path.join(this_folder,'Nahar_atomic_data')
class NaharIonisationCrossSection(CrossSection):
def __init__(self,element):
self.element = element
filepath = os.path.join(
Nahar_data_folderpath,
'{:s}I_photoionisation_cs_groundstate.txt'.format(self.element))
cs_data = np.loadtxt(filepath)
#invert with ::-1 to have increasing wavelength
self.energy = cs_data[:,0][::-1] * Rydberg_J
self.cs = cs_data[:,1][::-1] * constants.mega*1e-28 #m2
self.lambda_grid = constants.h*constants.c/self.energy
assert np.all(np.diff(self.lambda_grid)>0)
def crosssection(self,wavelength):
E = constants.h*constants.c/wavelength
above_ionisation = E >= ionisation_potential[self.element]
interp_cs = np.interp(wavelength,self.lambda_grid,self.cs,left=0,right=0)
return np.where(above_ionisation,interp_cs,0) #m2
class NaharRecombination():
def __init__(self,element):
self.element = element
filepath = os.path.join(Nahar_data_folderpath,
'{:s}I_recombination_total.txt'.format(self.element))
recomb_data = np.loadtxt(filepath)
self.logT = recomb_data[:,0]
self.recomb = recomb_data[:,-1]*constants.centi**3 #m3/s
def recombination_coeff(self,T):
logT = np.log10(T)
assert np.all(logT >= np.min(self.logT)) and np.all(logT < np.max(self.logT)),\
'requested T={:g} not within bounds ({:g}-{:g})'.format(
T,10**np.min(self.logT),10**np.max(self.logT))
return | np.interp(logT,self.logT,self.recomb,left=np.nan,right=np.nan) | numpy.interp |
"""This module defines classes which can read image data from WSI formats."""
import copy
import math
import os
import pathlib
import re
import warnings
from datetime import datetime
from numbers import Number
from typing import Iterable, Optional, Tuple, Union
import numpy as np
import openslide
import pandas as pd
import tifffile
import zarr
from tiatoolbox import utils
from tiatoolbox.tools import tissuemask
from tiatoolbox.utils.env_detection import pixman_warning
from tiatoolbox.utils.exceptions import FileNotSupported
from tiatoolbox.wsicore.wsimeta import WSIMeta
pixman_warning()
NumPair = Tuple[Number, Number]
IntPair = Tuple[int, int]
Bounds = Tuple[Number, Number, Number, Number]
IntBounds = Tuple[int, int, int, int]
Resolution = Union[Number, Tuple[Number, Number], np.ndarray]
def is_dicom(path: pathlib.Path) -> bool:
"""Check if the input is a DICOM file.
Args:
path (pathlib.Path): Path to the file to check.
Returns:
bool: True if the file is a DICOM file.
"""
path = pathlib.Path(path)
is_dcm = path.suffix.lower() == ".dcm"
is_dcm_dir = path.is_dir() and any(
p.suffix.lower() == ".dcm" for p in path.iterdir()
)
return is_dcm or is_dcm_dir
def is_tiled_tiff(path: pathlib.Path) -> bool:
"""Check if the input is a tiled TIFF file.
Args:
path (pathlib.Path):
Path to the file to check.
Returns:
bool:
True if the file is a tiled TIFF file.
"""
path = pathlib.Path(path)
try:
tif = tifffile.TiffFile(path)
except tifffile.TiffFileError:
return False
return tif.pages[0].is_tiled
class WSIReader:
"""Base whole slide image (WSI) reader class.
This class defines functions for reading pixel data and metadata
from whole slide image (WSI) files.
Attributes:
input_img (pathlib.Path):
Input path to WSI file.
Args:
input_img (:obj:`str` or :obj:`pathlib.Path` or :class:`numpy.ndarray`):
Input path to WSI.
mpp (:obj:`tuple` or :obj:`list` or :obj:`None`, optional):
The MPP of the WSI. If not provided, the MPP is approximated
from the objective power.
power (:obj:`float` or :obj:`None`, optional):
The objective power of the WSI. If not provided, the power
is approximated from the MPP.
"""
@staticmethod # noqa: A003
def open(
input_img: Union[str, pathlib.Path, np.ndarray],
mpp: Optional[Tuple[Number, Number]] = None,
power: Optional[Number] = None,
) -> "WSIReader":
"""Return an appropriate :class:`.WSIReader` object.
Args:
input_img (str, pathlib.Path, :class:`numpy.ndarray`, or :obj:WSIReader):
Input to create a WSI object from. Supported types of
input are: `str` and `pathlib.Path` which point to the
location on the disk where image is stored,
:class:`numpy.ndarray` in which the input image in the
form of numpy array (HxWxC) is stored, or :obj:WSIReader
which is an already created tiatoolbox WSI handler. In
the latter case, the function directly passes the
input_imge to the output.
mpp (tuple):
(x, y) tuple of the MPP in the units of the input image.
power (float):
Objective power of the input image.
Returns:
WSIReader:
An object with base :class:`.WSIReader` as base class.
Examples:
>>> from tiatoolbox.wsicore.wsireader import WSIReader
>>> wsi = WSIReader.open(input_img="./sample.svs")
"""
if not isinstance(input_img, (WSIReader, np.ndarray, str, pathlib.Path)):
raise TypeError(
"Invalid input: Must be a WSIRead, numpy array, string or pathlib.Path"
)
if isinstance(input_img, np.ndarray):
return VirtualWSIReader(input_img, mpp=mpp, power=power)
if isinstance(input_img, WSIReader):
return input_img
if is_dicom(input_img):
return DICOMWSIReader(input_img, mpp=mpp, power=power)
_, _, suffixes = utils.misc.split_path_name_ext(input_img)
if suffixes[-1] not in [
".svs",
".npy",
".ndpi",
".mrxs",
".tif",
".tiff",
".jp2",
".png",
".jpg",
".jpeg",
]:
raise FileNotSupported(f"File {input_img} is not a supported file format.")
if suffixes[-1] in (".npy",):
input_img = np.load(input_img)
return VirtualWSIReader(input_img, mpp=mpp, power=power)
if suffixes[-2:] in ([".ome", ".tiff"],):
return TIFFWSIReader(input_img, mpp=mpp, power=power)
if suffixes[-1] in (".tif", ".tiff") and is_tiled_tiff(input_img):
try:
return OpenSlideWSIReader(input_img, mpp=mpp, power=power)
except openslide.OpenSlideError:
return TIFFWSIReader(input_img, mpp=mpp, power=power)
if suffixes[-1] in (".jpg", ".jpeg", ".png", ".tif", ".tiff"):
return VirtualWSIReader(input_img, mpp=mpp, power=power)
if suffixes[-1] in (".jp2",):
return OmnyxJP2WSIReader(input_img, mpp=mpp, power=power)
return OpenSlideWSIReader(input_img, mpp=mpp, power=power)
def __init__(
self,
input_img: Union[str, pathlib.Path, np.ndarray],
mpp: Optional[Tuple[Number, Number]] = None,
power: Optional[Number] = None,
) -> None:
if isinstance(input_img, np.ndarray):
self.input_path = None
else:
self.input_path = pathlib.Path(input_img)
self._m_info = None
# Set a manual mpp value
if mpp and isinstance(mpp, Number):
mpp = (mpp, mpp)
if mpp and (not hasattr(mpp, "__len__") or len(mpp) != 2):
raise TypeError("`mpp` must be a number or iterable of length 2.")
self._manual_mpp = tuple(mpp) if mpp else None
# Set a manual power value
if power and not isinstance(power, Number):
raise TypeError("`power` must be a number.")
self._manual_power = power
@property
def info(self) -> WSIMeta:
"""WSI metadata property.
This property is cached and only generated on the first call.
Returns:
WSIMeta:
An object containing normalized slide metadata
"""
# In Python>=3.8 this could be replaced with functools.cached_property
if self._m_info is not None:
return copy.deepcopy(self._m_info)
self._m_info = self._info()
if self._manual_mpp:
self._m_info.mpp = np.array(self._manual_mpp)
if self._manual_power:
self._m_info.objective_power = self._manual_power
return self._m_info
@info.setter
def info(self, meta: WSIMeta) -> None:
"""WSI metadata setter.
Args:
meta (WSIMeta): Metadata object.
"""
self._m_info = meta
def _info(self) -> WSIMeta:
"""WSI metadata internal getter used to update info property.
Missing values for MPP and objective power are approximated and
a warning raised. Objective power is calculated as the mean of
the :func:utils.transforms.mpp2common_objective_power in x and
y. MPP (x and y) is approximated using objective power via
:func:utils.transforms.objective_power2mpp.
Returns:
WSIMeta:
An object containing normalized slide metadata.
"""
raise NotImplementedError
def _find_optimal_level_and_downsample(
self, resolution: Resolution, units: str, precision: int = 3
) -> Tuple[int, np.ndarray]:
"""Find the optimal level to read at for a desired resolution and units.
The optimal level is the most downscaled level of the image
pyramid (or multi-resolution layer) which is larger than the
desired target resolution. The returned scale is the downsample
factor required, post read, to achieve the desired resolution.
Args:
resolution (float or tuple(float)):
Resolution to find optimal read parameters for
units (str):
Units of the scale. Allowed values are the same as for
`WSIReader._relative_level_scales`
precision (int or optional):
Decimal places to use when finding optimal scale. This
can be adjusted to avoid errors when an unnecessary
precision is used. E.g. 1.1e-10 > 1 is insignificant in
most cases. Defaults to 3.
Returns:
tuple:
Optimal read level and scale factor between the optimal
level and the target scale (usually <= 1):
- :py:obj:`int` - Optimal read level.
- :class:`numpy.ndarray` - Scale factor in X and Y.
"""
level_scales = self.info.relative_level_scales(resolution, units)
level_resolution_sufficient = [
all(np.round(x, decimals=precision) <= 1) for x in level_scales
]
# Check if level 0 is lower resolution than required (scale > 1)
if not any(level_resolution_sufficient):
level = 0
else:
# Find the first level with relative scale >= 1.
# Note: np.argmax finds the index of the first True element.
# Here it is used on a reversed list to find the first
# element <=1, which is the same element as the last <=1
# element when counting forward in the regular list.
reverse_index = np.argmax(level_resolution_sufficient[::-1])
# Convert the index from the reversed list to the regular index (level)
level = (len(level_scales) - 1) - reverse_index
scale = level_scales[level]
# Check for requested resolution > than baseline resolution
if any(np.array(scale) > 1):
warnings.warn(
"Read: Scale > 1."
"This means that the desired resolution is higher"
" than the WSI baseline (maximum encoded resolution)."
" Interpolation of read regions may occur."
)
return level, scale
def find_read_rect_params(
self,
location: IntPair,
size: IntPair,
resolution: Resolution,
units: str,
precision: int = 3,
) -> Tuple[int, IntPair, IntPair, NumPair, IntPair]:
"""Find optimal parameters for reading a rect at a given resolution.
Reading the image at full baseline resolution and re-sampling to
the desired resolution would require a large amount of memory
and be very slow. This function checks the other resolutions
stored in the WSI's pyramid of resolutions to find the lowest
resolution (smallest level) which is higher resolution (a larger
level) than the requested output resolution.
In addition to finding this 'optimal level', the scale factor to
apply after reading in order to obtain the desired resolution is
found along with conversions of the location and size into level
and baseline coordinates.
Args:
location (tuple(int)):
Location in terms of the baseline image (level 0)
resolution.
size (tuple(int)):
Desired output size in pixels (width, height) tuple.
resolution (float):
Desired output resolution.
units (str):
Units of scale, default = "level". Supported units are:
- microns per pixel ('mpp')
- objective power ('power')
- pyramid / resolution level ('level')
- pixels per baseline pixel ("baseline")
precision (int, optional):
Decimal places to use when finding optimal scale. See
:func:`find_optimal_level_and_downsample` for more.
Returns:
tuple:
Parameters for reading the requested region
- :py:obj:`int` - Optimal read level.
- :py:obj:`tuple` - Read location in level coordinates.
- :py:obj:`int` - X location.
- :py:obj:`int` - Y location.
- :py:obj:`tuple` - Region size in level coordinates.
- :py:obj:`int` - Width.
- :py:obj:`int` - Height.
- :py:obj:`tuple` - Scaling to apply after level read to
achieve desired output resolution.
- :py:obj:`float` - X scale factor.
- :py:obj:`float` - Y scale factor.
- :py:obj:`tuple` - Region size in baseline coordinates.
- :py:obj:`int` - Width.
- :py:obj:`int` - Height.
"""
read_level, post_read_scale_factor = self._find_optimal_level_and_downsample(
resolution, units, precision
)
info = self.info
level_downsample = info.level_downsamples[read_level]
baseline_read_size = np.round(
np.array(size) * level_downsample / post_read_scale_factor
).astype(int)
level_read_size = np.round(np.array(size) / post_read_scale_factor).astype(int)
level_location = np.round( | np.array(location) | numpy.array |
import matplotlib.pyplot as plt
import matplotlib.dates as mdates
#import stationary_block_bootstrap as sbb
import pandas as pd
import numpy as np
import scipy.stats
import numpy
import time
import random
#import state_variables
import os
import scipy.stats
import sklearn.feature_selection
import matplotlib.gridspec as gridspec
import copy
from argotools.config import *
from argotools.forecastlib.handlers import *
from argotools.forecastlib.functions import *
import argotools.forecastlib.stationary_block_bootstrap as sbb
from argotools.dataFormatter import *
import seaborn as sns
import matplotlib.ticker as mticker
import math
from matplotlib.ticker import MaxNLocator,IndexFormatter, FormatStrFormatter
class OutputVis:
# Variables : top_n = 3, ranking_metric = 'rmse', ranking_season ='ALL_PERIOD', preds (vector/PD containing all predictions), metrics (matrix/PD containing all metrics),
# Load predictions and csvs from file,
# get name of models, number of models, name of metrics, table variable names (season1, season2... allPeriod).
# Get RANKING METRIC or all models in the file. Check if theres more than one first.
# FUNC STATISTICS BETWEEN THE MODELS : MEAN, VARIANCE, BEST MODEL, WORST MODEL
# figure 1 : Time-series, error and percent error
# figure 2: metric / plot
def __init__(self, folder_dir=None, ids=None, overview_folder='_overview'):
# Loading tables and files
if folder_dir is None:
print('WARNING! No main folder directory specified. Add it as an attribute \
specify it on every function call that requires it.')
self.folder_main = folder_dir
self.ids = ids
self.overview_folder = overview_folder
print('Visualizer initialized')
# imported VARS
def plot_SEC(self, series_filepath=None, coeff_filepath=None, target_name='ILI', models=None, color_dict=None, start_period=None, end_period=None, alpha_dict=None, output_filename=None, ext='png', mode='save', n_coeff=20, cmap_color='RdBu_r', error_type='Error', vmin=-1, vmax=1, font_path=None):
if font_path:
from matplotlib import font_manager
prop = font_manager.FontProperties(fname=font_path)
if color_dict is None:
color_dict = dict(zip(models, [tuple(np.random.random(3)) for mod in models]))
if alpha_dict is None:
alpha_dict = dict(zip(models, [1 for mod in models]))
series_df = pd.read_csv(series_filepath, index_col=0)
coeff_df = pd.read_csv(coeff_filepath, index_col=0)
if start_period is None:
start_period = series_df.index[0]
if end_period is None:
end_period = series_df.index[-1]
series_df = series_df[start_period:end_period]
coeff_df = coeff_df[start_period:end_period]
target = series_df[target_name].values
series = {}
errors = {}
for mod in models:
series[mod] = series_df[mod].values
errors[mod] = np.abs(target - series[mod])
indices = list(series_df[target_name].index.values)
#plotting target
f, axarr = plt.subplots(3,2, gridspec_kw = {'height_ratios':[2,1,3], 'width_ratios':[16,1]})
axarr[0,0].fill_between(x=list(range(len(indices))),y1=target, facecolor='gray', alpha=0.5, label=target_name)
#plotting series
for mod in models:
axarr[0,0].plot(series[mod], label=mod, color=color_dict[mod], alpha=alpha_dict[mod])
axarr[1,0].plot(errors[mod], color=color_dict[mod], alpha=alpha_dict[mod])
if n_coeff is None:
n_coeff = coeff_df.shape[1]
means = coeff_df.mean(axis=0)
coeff_names = list(coeff_df)
ordered_names = [ name for v, name in sorted(zip(means, coeff_names), key=lambda x: x[0], reverse=True)]
coeff_df = coeff_df[ordered_names[:n_coeff]]
sns.heatmap(coeff_df.T, vmin=vmin, vmax=vmax, cmap=cmap_color, center=None, \
robust=False, annot=None, fmt='.2g', annot_kws=None, linewidths=0,\
linecolor='white', cbar=True, cbar_kws=None, cbar_ax=axarr[2,1], square=False,\
xticklabels='auto', yticklabels=True, mask=None, ax=axarr[2,0])
plt.gcf().set_size_inches([10, int(n_coeff/2)])
plt.sca(axarr[0,0])
plt.legend(frameon=False, ncol=len(models))
plt.xlim([0, len(indices)])
plt.ylim(bottom=0)
plt.xticks(range(len(indices)),indices, rotation=0)
plt.gca().xaxis.set_major_formatter(IndexFormatter(indices))
plt.gca().xaxis.set_major_locator(mticker.MaxNLocator(6))
plt.gca().set_xticklabels([])
plt.grid(linestyle = 'dotted', linewidth = .6)
plt.sca(axarr[1,0])
plt.xlim([0, len(indices)])
plt.xticks(range(len(indices)),indices, rotation=0)
plt.gca().xaxis.set_major_formatter(IndexFormatter(indices))
plt.gca().xaxis.set_major_locator(mticker.MaxNLocator(6))
plt.gca().set_xticklabels([])
plt.grid(linestyle = 'dotted', linewidth = .6)
plt.sca(axarr[0,1])
plt.axis('off')
plt.sca(axarr[1,1])
plt.axis('off')
plt.sca(axarr[2,0])
plt.xticks(range(len(indices)),indices, rotation=0)
plt.gca().xaxis.set_major_formatter(IndexFormatter(indices))
plt.gca().xaxis.set_major_locator(mticker.MaxNLocator(6))
plt.gca().set_yticklabels(ordered_names[:n_coeff], fontproperties=prop)
# STYLE
axarr[0,0].spines['right'].set_visible(False)
axarr[0,0].spines['top'].set_visible(False)
axarr[1,0].spines['right'].set_visible(False)
axarr[1,0].spines['top'].set_visible(False)
axarr[0,0].set_ylabel(target_name)
axarr[1,0].set_ylabel(error_type)
plt.subplots_adjust(left=.2, bottom=.1, right=.95, top=.9, wspace=.05, hspace=.20)
if mode == 'show':
plt.show()
elif mode == 'save':
if output_filename is None:
output_filename = '{0}_coefficients'.format(model)
plt.savefig('{0}/{1}/{2}.{3}'.format(self.folder_main, id_, output_filename, ext), format=ext)
else:
plt.savefig(output_filename+'.{0}'.format(ext), format=ext)
plt.close()
def plot_coefficients(self, id_=None, model=None, coefficients_filepath=None, cmap_color='RdBu_r',\
n_coeff=None, filename='_coefficients.csv', output_filename=None, ext='png', mode='show'):
if coefficients_filepath:
coefficients = pd.read_csv(coefficients_filepath, index_col=0)
else:
coefficients = pd.read_csv('{0}/{1}/{2}'.format(self.folder_main, id_, model), index_col=0)
coefficients.fillna(0)
if n_coeff is None:
n_coeff = coefficients.shape[1]
means = coefficients.mean(axis=0)
coeff_names = list(coefficients)
ordered_names = [ name for v, name in sorted(zip(means, coeff_names), key=lambda x: x[0], reverse=True)]
coefficients = coefficients[ordered_names[:n_coeff]]
sns.heatmap(coefficients.T, vmin=None, vmax=None, cmap=cmap_color, center=None, \
robust=False, annot=None, fmt='.2g', annot_kws=None, linewidths=0,\
linecolor='white', cbar=True, cbar_kws=None, cbar_ax=None, square=False,\
xticklabels='auto', yticklabels=True, mask=None, ax=None)
plt.gcf().set_size_inches([10, int(n_coeff/3)])
if mode == 'show':
plt.show()
elif mode == 'save':
if output_filename is None:
output_filename = '{0}_coefficients'.format(model)
plt.savefig('{0}/{1}/{2}.{3}'.format(folder_main, id_, output_filename, ext), format=ext)
else:
plt.savefig(output_filename+'.{0}'.format(ext), format=ext)
plt.close()
def inter_group_lollipop_comparison(ids_dict, path_dict, metric, period, models, benchmark, color_dict=None, alpha_dict=None, metric_filename='metrics.csv', bar_separation_multiplier=1.5, mode='show', output_filename='LollipopTest', plot_domain=None, ext='png'):
"""
Plots the ratio of the metric score for each of the models against a benchmark in a lollipop plot to compare between experiments.
Parameters
__________
ids_dict: dict
Dictionary containing the list of ids for each experiment
path_dict: dict
Dictionary containing the path to the experiment folders (must coincide with the keys of ids_dict)
metric: str
String containing the name of the metric to look for in the predictions file
period: str
Column name containing the values to plot
models: List, optional (default None)
String list containing the names of the models to plot
benchmark: str
The name within "models" which will serve as the benchmark
color_dict : dict
Dictionary containing specific colors for the models to plot
metric_filename : str, optional (default metrics.csv)
mode : str, optional (default is 'save')
If 'save', then function saves plot on the id_ specific folder.
if 'show, then function plots and used plt.show to pop up the plot real time'
alpha_dict : dict, optional (default is None)
dictionary specifying the opacity of the bars in the plot (alpha argument in matplotlib).
If set to None, then all opacities are set to 1
output_filename : str, optional (default is None)
If set to None, output_filename is set metricname_barplot
ext : str, optional (default is png)
Extension formal to save the barplot.
plot_domain : list, optional (default is [0,1])
list of two integers that sets the limits in the plot (plt.xlim)
bar_separation_multiplier : float, optional (default is 1)
Parameter that functions as multiplier for the separion between bars in the plot.
if set to 1, then bars are plotted in locations 1,2,3... if set to 2, then 2,4,6, etc
"""
fig, axarr = plt.subplots(len(ids_dict.keys()),1)
axes = axarr.ravel()
if color_dict is None:
color_dict = dict(zip(models, ['b']*len(models)))
if alpha_dict is None:
alpha_dict = dict(zip(models, [1]*len(models)))
for i, (experiment, folder_main) in enumerate(path_dict.items()):
plt.sca(axes[i])
ids = ids_dict[experiment]
values_dict = dict(zip(models, [[] for mod in models]))
min_val = float('inf')
max_val = float('-inf')
indices = []
overview_path = '{0}/{1}'.format(folder_main, '_overview')
for i, id_ in enumerate(ids):
indices.append(i*bar_separation_multiplier)
id_path = '{0}/{1}'.format(folder_main, id_)
df = pd.read_csv('{0}/{1}'.format(id_path, metric_filename))
df = df[df['METRIC']==metric]
for j, mod in enumerate(models):
ratio = copy.copy(df[df['MODEL']==mod][period].values[0]/df[df['MODEL']==benchmark][period].values[0])
if metric in ['ERROR', 'RMSE', 'NRMSE', 'MAPE']:
ratio=(1/ratio)
values_dict[mod].append(ratio)
if ratio < min_val:
min_val = ratio
if ratio > max_val:
max_val = ratio
bar_width = 1/len(models)
indices = np.array(indices)
for i, mod in enumerate(models):
heights = values_dict[mod]
bar_positions = indices + bar_width*i
(markers, stemlines, baseline) = plt.stem(bar_positions, heights, linefmt='--')
plt.setp(markers, marker='o', markersize=7, color=color_dict[mod], alpha=alpha_dict[mod], label=mod)
plt.setp(stemlines, color=color_dict[mod], linewidth=1)
plt.setp(baseline, visible=False)
# Black line
plt.plot([0,bar_positions[-1]], [1,1],'--',color='.6', alpha=.6)
plt.gca().spines['top'].set_visible(False)
plt.gca().spines['right'].set_visible(False)
if experiment == 'State':
ids = [id_[-3:] for id_ in ids]
plt.xticks(indices+bar_width*((len(models)-1)/2), ids)
plt.ylim([min_val*.95, max_val*1.05])
plt.xlim([-.3, bar_positions[-1]+.3])
if i == 0:
axes[i].legend(frameon=False, ncol=len(models))
plt.title('{0} barplot'.format(metric))
if mode == 'show':
plt.show()
elif mode == 'save':
if output_filename is None:
output_filename = '{0}_barplot'.format(metric)
plt.gcf().set_size_inches([6,15])
plt.savefig('{0}/{1}.{2}'.format(overview_path, output_filename, ext), format=ext)
plt.close()
def group_lollipop_ratio(ids, metric, period, models, benchmark, folder_main = None, color_dict=None, alpha_dict=None, metric_filename='metrics.csv', bar_separation_multiplier=1.5, mode='show', output_filename='LollipopTest', plot_domain=None, ext='png'):
"""
Plots the ratio of the metric score for each of the models against a benchmark in a lollipop plot.
Parameters
__________
id_: str
Identifier for the region to look for
metric: str
String containing the name of the metric to look for in the predictions file
period: str
Column name containing the values to plot
models: List, optional (default None)
String list containing the names of the models to plot
benchmark: str
The name within "models" which will serve as the benchmark
color_dict : dict
Dictionary containing specific colors for the models to plot
metric_filename : str, optional (default metrics.csv)
mode : str, optional (default is 'save')
If 'save', then function saves plot on the id_ specific folder.
if 'show, then function plots and used plt.show to pop up the plot real time'
alpha_dict : dict, optional (default is None)
dictionary specifying the opacity of the bars in the plot (alpha argument in matplotlib).
If set to None, then all opacities are set to 1
output_filename : str, optional (default is None)
If set to None, output_filename is set metricname_barplot
ext : str, optional (default is png)
Extension formal to save the barplot.
plot_domain : list, optional (default is [0,1])
list of two integers that sets the limits in the plot (plt.xlim)
bar_separation_multiplier : float, optional (default is 1)
Parameter that functions as multiplier for the separion between bars in the plot.
if set to 1, then bars are plotted in locations 1,2,3... if set to 2, then 2,4,6, etc
"""
if color_dict is None:
color_dict = dict(zip(models, ['b']*len(models)))
if alpha_dict is None:
alpha_dict = dict(zip(models, [1]*len(models)))
if folder_main is None:
folder_main = self.folder_main
values_dict = dict(zip(models, [[] for mod in models]))
min_val = float('inf')
max_val = float('-inf')
indices = []
overview_path = '{0}/{1}'.format(folder_main, '_overview')
for i, id_ in enumerate(ids):
indices.append(i*bar_separation_multiplier)
id_path = '{0}/{1}'.format(folder_main, id_)
df = pd.read_csv('{0}/{1}'.format(id_path, metric_filename))
df = df[df['METRIC']==metric]
for j, mod in enumerate(models):
ratio = copy.copy(df[df['MODEL']==mod][period].values[0]/df[df['MODEL']==benchmark][period].values[0])
if metric in ['ERROR', 'RMSE', 'NRMSE', 'MAPE']:
ratio=(1/ratio)
values_dict[mod].append(ratio)
if ratio < min_val:
min_val = ratio
if ratio > max_val:
max_val = ratio
bar_width = 1/len(models)
indices = np.array(indices)
for i, mod in enumerate(models):
heights = values_dict[mod]
bar_positions = indices + bar_width*i
(markers, stemlines, baseline) = plt.stem(bar_positions, heights, linefmt='--')
plt.setp(markers, marker='o', markersize=7, color=color_dict[mod], alpha=alpha_dict[mod], label=mod)
plt.setp(stemlines, color=color_dict[mod], linewidth=1)
plt.setp(baseline, visible=False)
# Black line
plt.plot([0,bar_positions[-1]], [1,1],'--',color='.6', alpha=.6)
plt.gca().spines['top'].set_visible(False)
plt.gca().spines['right'].set_visible(False)
plt.title('{0} barplot'.format(metric))
plt.xticks(indices+bar_width*((len(models)-1)/2), ids)
plt.ylim([min_val*.95, max_val*1.05])
plt.xlim([-.3, bar_positions[-1]+.3])
plt.legend(frameon=False, ncol=len(models))
if plot_domain:
plt.xlim(plot_domain)
if mode == 'show':
plt.show()
elif mode == 'save':
if output_filename is None:
output_filename = '{0}_barplot'.format(metric)
plt.gcf().set_size_inches([6,15])
plt.savefig('{0}/{1}.{2}'.format(overview_path, output_filename, ext), format=ext)
plt.close()
def inter_season_analysis(self,ids, main_folders, periods, series_names, metric = 'RMSE', filename='metrics_condensed.csv', output_filename='season_analysis', color_dict=None, alpha_dict=None, mode='save', ext='png'):
'''
Performs seasonal analysis of data based on periods decided from the user.
The top part of the plot shows violin plots (https://seaborn.pydata.org/generated/seaborn.violinplot.html)
and display the model's metric scores in a boxplot/distribution schemeself.
Bottom part shows a heatmap representing the distribution of ranking along all periods. I.E. If Each timeseries
case contain 4 periods and there's 4 cases, the total number of periods is 4*4 = 16. Each period has a metric for each model.
inter_season_analysis compare this metric within the period and ranks the models from first to nth place, and each place generats a +1
count onto the heatmap in the column representing the model and the row representing the rank.
__________
ids : dict
The dict of lists containing the identifiers for the regions.
main_folders : dict
The path to the experiments. Dictionary keys have to be consistent with the ids keys
periods : list
list containing the periods (should be available within the metrics table)
filename : str
String containing the filename to read the series from (using pandas).
start_period : str,
timeseries Pandas dataframe starting index.
end_period : str
timeseries ending index in the pandas dataframe.
n_col : int, optional (default is one)
series_names : list, optional (default is None)
Names of the timeseries to plot. If set to None, then model plots all of them.
output_filename : str, optional (default is series)
Name of the graphics file containing the plots.
color_dict : dict
Dictionary containing specific colors for the models to plot.
mode : str, optional (default is 'save')
If 'save', then function saves plot on the id_ specific folder.
if 'show, then function plots and used plt.show to pop up the plot real time'.
alpha_dict : dict, optional (default is None)
dictionary specifying the opacity of the bars in the plot (alpha argument in matplotlib).
If set to None, then all opacities are set to 1.
ext : str, optional (default is png)
Extension formal to save the graphics file.
Defines the number of columns to define the plotting array. Function organizes
plots in n_col then
'''
default_colors = ['royalblue', 'darkorange', 'forestgreen', 'firebrick']
if color_dict is None:
color_dict = dict(zip(series_names, default_colors[0:len(series_names)]))
score_periods = {}
ranks = {}
for title, ids_ in ids.items():
metrics_df = pd.read_csv(main_folders[title] + '/_overview/'+ filename)
score_periods[title] = []
ranks[title] = getRanks(metrics_df, metric, ids_, series_names, periods)
for mod in series_names:
score_periods[title].append(get_all_periods(metrics_df, mod, metric, periods))
score_periods[title] = pd.DataFrame(np.hstack(score_periods[title]), columns=series_names)
f, axarr = plt.subplots(2, len(ids.keys()))
axes = axarr.ravel()
places_dict = get_places(ranks, series_names)
places = ['4th', '3rd', '2nd', '1st']
for i, title in enumerate(ids.keys()):
places_list = places_dict[title]
sns.violinplot(data=score_periods[title], ax=axes[i], cut=0, inner='box')
'''
sns.heatmap(data=ranks[metric], ax=axes[i+len(ids.keys())], cmap='Reds', cbar=False, annot=True)
axes[i+len(ids.keys())].set_yticklabels(['1th', '2th', '3th', '4th', '5th'], rotation='horizontal')
axes[i+len(ids.keys())].set_xticklabels(series_names, rotation='horizontal')
'''
print(title, i)
for j, ord_list in enumerate(reversed(places_list)):
for (mod, height) in ord_list:
axes[i+len(ids.keys())].barh(j, height, color=color_dict[mod])
plt.sca(axes[i+len(ids.keys())])
plt.yticks(range(len(places_list)), places)
axes[i].set_title(title)
axes[i].set_xticklabels(series_names)
if i == 0:
axes[i+len(ids.keys())].set_xlabel('No. of States')
elif i == 1:
axes[i+len(ids.keys())].set_xlabel('No. of Regions')
elif i == 2:
axes[i+len(ids.keys())].set_xlabel('No. of Countries')
if i == 0:
axes[i].set_ylabel('{0}'.format(metric))
axes[+len(ids.keys())].set_ylabel('Ranking Proportions')
if mode == 'show':
plt.show()
if mode == 'save':
plt.gcf().set_size_inches([9, 5])
plt.subplots_adjust(left=.1, bottom=.12, right=.97, top=.91, wspace=.25, hspace=.20)
plt.savefig('{0}/{1}/{2}.{3}'.format(self.folder_main, OVERVIEW_FOLDER, output_filename,ext),fmt=ext)
plt.close()
return
def group_seriesbars(self, ids=None, start_period=None, end_period=None, series_names=None, folder_dir=None, metric_filename='metrics.csv', preds_filename='preds.csv', output_filename='series', color_dict=None, alpha_dict=None, mode='show', ext='png', n_col=1, width_ratios=[6,1], metric=None, metric_period=None, target_name=None):
default_colors = ['g', 'b', 'r', 'indigo']
default_linewidths = [1.5,1.4,1.6,1]
'''
Gathers information from all region and does a group plot using matplotlib, along with a barplot, showing a metric.
regions are ordered based on the original ordering from the ids list from left to right, top to bottom
Parameters
__________
ids : list
The list containing the identifiers for the regions.
preds_filename : str
String containing the preds_filename to read the series from (using pandas).
start_period : str,
timeseries Pandas dataframe starting indices.
end_period : str
timeseries ending indices in the pandas dataframe.
n_col : int, optional (default is one)
series_names : list, optional (default is None)
Names of the timeseries to plot. If set to None, then model plots all of them.
output_preds_filename : str, optional (default is series)
Name of the graphics file containing the plots.
color_dict : dict
Dictionary containing specific colors for the models to plot.
mode : str, optional (default is 'save')
If 'save', then function saves plot on the id_ specific folder.
if 'show, then function plots and used plt.show to pop up the plot real time'.
alpha_dict : dict, optional (default is None)
dictionary specifying the opacity of the bars in the plot (alpha argument in matplotlib).
If set to None, then all opacities are set to 1.
ext : str, optional (default is png)
Extension formal to save the graphics file.
Defines the number of columns to define the plotting array. Function organizes
plots in n_col then
'''
if not ids:
ids = self.ids
if folder_dir is None:
folder_dir = self.folder_main
n_ids = len(ids)
n_rows = math.ceil(n_ids/n_col)
fig, axarr = plt.subplots(n_rows,n_col*2, gridspec_kw = {'width_ratios':width_ratios*n_col})
axes = axarr.ravel()
if color_dict is None:
color_dict = {}
for i, mod in enumerate(series_names):
color_dict[mod] = default_colors[i]
if alpha_dict is None:
alpha_dict = {}
for i, mod in enumerate(series_names):
alpha_dict[mod] = .8
for i, id_ in enumerate(ids):
df = pd.read_csv('{0}/{1}/{2}'.format(folder_dir, id_, preds_filename), index_col=[0])
metric_df = pd.read_csv('{0}/{1}/{2}'.format(folder_dir, id_, metric_filename))
series = []
indices = copy.copy(df[start_period:end_period].index.values)
for kk in range(np.size(indices)):
v = indices[kk][2:7]
indices[kk] = v
col_names = list(df)
if target_name:
zeros=np.zeros(np.size(df[start_period:end_period][target_name].values))
curve_max = np.amax(np.size(df[start_period:end_period][target_name].values))
#axes[i*2].plot(df[start_period:end_period][target_name].values, label=target_name, linewidth=.1)
axes[i*2].fill_between(x=list(range(len(indices))),y1=df[start_period:end_period][target_name].values, facecolor='gray', alpha=0.5, label=target_name)
for k, col in enumerate(series_names):
if col in col_names:
# create top panel
axes[i*2].plot(df[start_period:end_period][col].values, label=col, linewidth=default_linewidths[k])
else:
print('WARNING! {0} not in {1} timeseries list'.format(col, id_))
if color_dict:
for j, l in enumerate(axes[i*2].get_lines()):
l.set_color(color_dict[series_names[j]])
if alpha_dict:
for j, l in enumerate(axes[i*2].get_lines()):
l.set_alpha(alpha_dict[series_names[j]])
######
metric_df = metric_df[metric_df['METRIC']==metric][['MODEL', metric_period]]
bar_width = .5
hs = []
for k, mod in enumerate(series_names):
heights = metric_df[metric_df['MODEL'] == mod][metric_period].values
bar_positions = k
rects = axes[i*2+1].bar(bar_positions, heights, bar_width, label=mod, color=color_dict[mod], alpha=alpha_dict[mod])
hs.append(copy.copy(heights))
max_height = np.amax(hs)
min_height = np.amin(hs)
axes[i*2+1].set_ylim([min_height*.90, max_height*1.1])
axes[i*2+1].set_yticks([min_height, max_height])
axes[i*2+1].yaxis.set_major_formatter(FormatStrFormatter('%.3f'))
#####
if i == 0:
if target_name:
n_cols = len(series_names)+1
else:
n_cols = len(series_names)
axes[i*2].legend(ncol=n_cols, frameon=False, loc='upper left', \
bbox_to_anchor=(.0,1.20))
axes[i*2].text(.10,.9, id_, weight = 'bold', horizontalalignment='left', transform=axes[i*2].transAxes)
#axes[i*2+1].yaxis.set_major_locator(mticker.MaxNLocator(2))
axes[i*2].yaxis.set_major_locator(mticker.MaxNLocator(2))
axes[i*2+1].set_xticks([])
# SPINES
axes[i*2].spines['top'].set_visible(False)
axes[i*2].spines['right'].set_visible(False)
#axes[i*2].spines['left'].set_visible(False)
yticks=axes[i*2].get_yticks()
ylim = axes[i*2].get_ylim()
axes[i*2].spines['left'].set_bounds(0,yticks[2])
axes[i*2+1].spines['left'].set_bounds(min_height,max_height)
axes[i*2].set_ylim(0,ylim[1])
axes[i*2+1].spines['top'].set_visible(False)
axes[i*2+1].spines['right'].set_visible(False)
#axes[i*2+1].spines['left'].set_visible(False)
if i == 0:
plt.ylabel('Estimates')
if i > n_col*(n_rows - 1)-1:
axes[i*2].set_xlabel('Date')
plt.sca(axes[i*2])
plt.xticks(range(len(indices)),indices, rotation=0)
plt.gca().xaxis.set_major_formatter(IndexFormatter(indices))
plt.gca().xaxis.set_major_locator(mticker.MaxNLocator(4))
xticks = axes[i*2].get_xticks()
axes[i*2].spines['bottom'].set_bounds(xticks[1], xticks[-2])
else:
plt.sca(axes[i*2])
plt.xticks(range(len(indices)),indices, rotation=0)
plt.gca().xaxis.set_major_formatter(IndexFormatter(indices))
plt.gca().xaxis.set_major_locator(mticker.MaxNLocator(4))
xticks = axes[i*2].get_xticks()
axes[i*2].spines['bottom'].set_bounds(xticks[1], xticks[-2])
#axes[i*2].set_xticklabels([])
if i < np.size(axes)/2-1:
for j in range(i+1,int(np.size(axes)/2)):
axes[j*2+1].spines['top'].set_visible(False)
axes[j*2+1].spines['right'].set_visible(False)
axes[j*2+1].spines['left'].set_visible(False)
axes[j*2+1].spines['bottom'].set_visible(False)
axes[j*2].spines['top'].set_visible(False)
axes[j*2].spines['right'].set_visible(False)
axes[j*2].spines['left'].set_visible(False)
axes[j*2].spines['bottom'].set_visible(False)
axes[j*2].set_yticks([])
axes[j*2].set_xticks([])
axes[j*2+1].set_yticks([])
axes[j*2+1].set_xticks([])
axes[j*2].set_title('')
axes[j*2+1].set_title('')
plt.subplots_adjust(left=.03, bottom=.05, right=.99, top=.95, wspace=.25, hspace=.15)
if mode == 'show':
plt.show()
plt.close()
if mode == 'save':
fig.set_size_inches([7*n_col,2.5*n_rows])
plt.savefig('{0}/{1}/{2}.{3}'.format(folder_dir, OVERVIEW_FOLDER, output_filename,ext),fmt=ext)
plt.close()
def rank_ids_by_metric(self, ids, models, period, metric='RMSE', reverse=False, metric_filename='metrics.csv'):
'''
rank_ids_by_metric compares the performance of two models specified in the models list and
the selected metric. Function substracts model[0] from model[1] (i.e. model[1]-model[0]) and orders
the results based on decreasing order.
Parameters
__________
ids : list
List of strings containing the region identifiers to rank.
models : list
A list of two models to compare
metric : str, optional (default is RMSE)
The metric to use as comparison
order : Boolean, optional (default is False)
If False, orders in increasing order. If set to True, orders in decreasing order
metric_filename : str, optionall (default is 'metric.csv')
period : str
Specify the period of the metric
Returns
_______
ids = An ordered list of IDs based on the results of the comparison
'''
metric_values = []
for id_ in ids:
metric_df = pd.read_csv('{0}/{1}/{2}'.format(self.folder_main, id_, metric_filename))
mod0_val = metric_df[ (metric_df['METRIC'] == metric) & (metric_df['MODEL'] == models[0])][period].values
mod1_val = metric_df[(metric_df['METRIC'] == metric) & (metric_df['MODEL'] == models[1])][period].values
ratio = mod0_val/mod1_val
if metric in ['RMSE', 'NRMSE', 'ERROR', 'MAPE']:
ratio = 1/ratio
metric_values.append(copy.copy(ratio))
ord_values = []
ord_ids = []
for id_, val in sorted(zip(ids, metric_values), key = lambda x : x[1], reverse=reverse):
ord_values.append(val)
ord_ids.append(id_)
return ord_ids
def group_weekly_winner(self, ids=None, cmap='BuPu', models=None, start_period=None, end_period=None, output_filename='weekly_winners', folder_main=None, filename='preds.csv', mode='show', ext='png'):
"""
Fir each ID, chooses the weekly winner out of the models list in a prediction file and plots all of them
together in heatmap.
Parameters
__________
ids : list
The list containing the identifiers for the regions.
filename : str
String containing the filename to read the series from (using pandas).
start_period : str,
timeseries Pandas dataframe starting index.
end_period : str
timeseries ending index in the pandas dataframe.
output_filename : str, optional (default is series)
Name of the graphics file containing the plots.
mode : str, optional (default is 'save')
If 'save', then function saves plot on the id_ specific folder.
if 'show, then function plots and used plt.show to pop up the plot real time'.
ext : str, optional (default is png)
Extension formal to save the graphics file.
cmap : str, optional (default is 'BuPu')
colormap style to display in the plot. List of colormaps is provided by Matplotlib.
folder_main : str, optiona (default is None)
Path to folder with data. If None, uses default class attribute.
"""
if folder_main is None:
folder_main = self.folder_main
#Getting winners in each id
winners_dict = {}
ind = list(range(len(models)))
map_dict =dict(zip(models, ind))
for i, id_ in enumerate(ids):
df = pd.read_csv('{0}/{1}/{2}'.format(folder_main, id_, filename), index_col=[0])
if i == 0:
if start_period is None:
start_period = df.index[0]
if end_period is None:
end_period = df.index[-1]
df = df[start_period:end_period]
winners = get_winners_from_df(df, models=models)
winners=winners.replace({"winners" : map_dict })
winners_dict[id_] = winners['winners'].values
index = df[start_period:end_period].index.values
winners_df = pd.DataFrame(winners_dict, index=index)
ax= sns.heatmap(data=winners_df.transpose(), linewidths=.6, yticklabels=True, cbar_kws={"ticks":ind})
ax.collections[0].colorbar.ax.set_yticklabels(models)
#plt.matshow(winners_df.transpose(), origin='lower', aspect='auto', cmap='BuPu')
#cb = plt.colorbar(orientation='vertical', ticks=ind, shrink=.5)
#cb.ax.set_yticklabels(models)
#plt.xticks(range(len(index)),index, rotation=45)
#plt.gca().xaxis.set_major_formatter(IndexFormatter(index))
#plt.gca().xaxis.set_major_locator(mticker.MaxNLocator(6))
if mode == 'show':
plt.show()
plt.close()
if mode == 'save':
plt.gcf().set_size_inches([10,6])
plt.subplots_adjust(left=.10, bottom = .15, right = 1, top=.95, wspace=.20, hspace=.20)
plt.savefig('{0}/{1}/{2}.{3}'.format(self.folder_main, self.overview_folder, output_filename, ext),fmt=ext)
plt.close()
def plot_series(self,folder_dir=None, id_=None, filename=None, output_filename='series', series_names=None, color_dict=None, alpha_dict=None, start_period=None, end_period=None, mode='save', ext='png', add_weekly_winner=False, winner_models=None):
if folder_dir is None:
folder_dir = self.folder_main
if filename is None:
filename = ID_PREDS
df = pd.read_csv('{0}/{1}/{2}'.format(self.folder_main, id_, filename), index_col=[0])
if start_period is None:
start_period = df.index[0]
if end_period is None:
end_period = df.index[-2]
series = []
index = df.index.values
if add_weekly_winner:
n_rows = 2
gridspec_kw = {'height_ratios':[6,1]}
else:
n_rows = 1
gridspec_kw = None
fig, axes = plt.subplots(n_rows, 1, gridspec_kw = gridspec_kw)
col_names = list(df)
if series_names is None:
series_names = col_names
for col in series_names:
# create top panel
axes[0].plot(df[start_period:end_period][col].values, label=col)
#a = ax.plot_date(x=dates, y=ILI) # fmt="-",color='.20', linewidth=3.2, label='ILI', alpha = 1)
if color_dict:
for i, l in enumerate(axes[0].get_lines()):
l.set_color(color_dict[series_names[i]])
if alpha_dict:
for i, l in enumerate(axes[0].get_lines()):
l.set_alpha(alpha_dict[series_names[i]])
if add_weekly_winner:
winners = get_winners_from_df(df, models=winner_models)
ind = list(range(len(winner_models)))
map_dict =dict(zip(winner_models, ind))
winners=winners.replace({"winners" : map_dict })
im = axes[1].matshow(winners['winners'].values.reshape([1,-1]), origin='lower', aspect='auto', cmap='BuPu')
cb = plt.colorbar(im, ax=axes[1], orientation='horizontal', ticks=ind)
cb.ax.set_xticklabels(winner_models)
axes[0].legend(ncol=len(series_names), frameon=False)
axes[0].set_title('{0}'.format(id_))
axes[0].set_ylabel('Estimates')
axes[0].set_xlabel('Index')
axes[0].spines['top'].set_visible(False)
axes[0].spines['right'].set_visible(False)
plt.xticks(range(len(index)),index, rotation=45)
axes[0].xaxis.set_major_formatter(IndexFormatter(index))
axes[0].xaxis.set_major_locator(mticker.MaxNLocator(6))
axes[1].set_xticks([])
axes[1].set_yticks([])
axes[0].autoscale(enable=True, axis='x', tight=True)
#plt.locator_params(nbins=8)
if mode == 'show':
plt.show()
plt.close()
if mode == 'save':
fig.set_size_inches([10,5])
plt.savefig('{0}/{1}/{2}.{3}'.format(self.folder_main, id_, output_filename,ext),fmt=ext)
plt.close()
def season_analysis(self, ids, periods, series_names, folder_main=None, metrics = ['PEARSON', 'NRMSE'], filename='metrics_condensed.csv', output_filename='season_analysis', color_dict=None, alpha_dict=None, mode='save', ext='png'):
'''
Gathers information from all region and does a group plot using matplotlib.
regions are ordered in based on the original ordering from the ids list from left to right, top to bottom
Parameters
__________
ids : list
The list containing the identifiers for the regions.
periods : list
list containing the periods (should be available within the metrics table)
filename : str
String containing the filename to read the series from (using pandas).
start_period : str,
timeseries Pandas dataframe starting index.
end_period : str
timeseries ending index in the pandas dataframe.
n_col : int, optional (default is one)
series_names : list, optional (default is None)
Names of the timeseries to plot. If set to None, then model plots all of them.
output_filename : str, optional (default is series)
Name of the graphics file containing the plots.
color_dict : dict
Dictionary containing specific colors for the models to plot.
mode : str, optional (default is 'save')
If 'save', then function saves plot on the id_ specific folder.
if 'show, then function plots and used plt.show to pop up the plot real time'.
alpha_dict : dict, optional (default is None)
dictionary specifying the opacity of the bars in the plot (alpha argument in matplotlib).
If set to None, then all opacities are set to 1.
ext : str, optional (default is png)
Extension formal to save the graphics file.
Defines the number of columns to define the plotting array. Function organizes
plots in n_col then
'''
if not folder_main:
folder_main = self.folder_main
metrics_df = pd.read_csv(folder_main + '/_overview/'+ filename)
score_periods = {}
ranks = {}
for metric in metrics:
score_periods[metric] = []
ranks[metric] = getRanks(metrics_df, metric, ids, series_names, periods)
for mod in series_names:
score_periods[metric].append(get_all_periods(metrics_df, mod, metric, periods))
score_periods[metric] = pd.DataFrame(np.hstack(score_periods[metric]), columns=series_names)
f, axarr = plt.subplots(2, len(metrics))
axes = axarr.ravel()
for i, metric in enumerate(metrics):
sns.violinplot(data=score_periods[metric], ax=axes[i], cut=0)
sns.heatmap(data=ranks[metric], ax=axes[i+2], cmap='Reds', cbar=False, annot=True)
axes[i].set_title(metric)
axes[i+2].set_yticklabels(['1th', '2th', '3th', '4th', '5th'], rotation='horizontal')
axes[i+2].set_xticklabels(series_names, rotation='horizontal')
if mode == 'show':
plt.show()
if mode == 'save':
plt.gcf().set_size_inches([7, 4])
plt.subplots_adjust(left=.08, bottom=.09, right=.97, top=.91, wspace=.25, hspace=.20)
plt.savefig('{0}/{1}/{2}.{3}'.format(self.folder_main, OVERVIEW_FOLDER, output_filename,ext),fmt=ext)
plt.close()
def group_plot_series(self, ids=None, start_period=None, end_period=None, series_names=None, folder_dir=None, filename='preds.csv', output_filename='series', color_dict=None, alpha_dict=None, mode='save', ext='png', n_col=1):
'''
Gathers information from all region and does a group plot using matplotlib.
regions are ordered in based on the original ordering from the ids list from left to right, top to bottom
Parameters
__________
ids : list
The list containing the identifiers for the regions.
filename : str
String containing the filename to read the series from (using pandas).
start_period : str,
timeseries Pandas dataframe starting index.
end_period : str
timeseries ending index in the pandas dataframe.
n_col : int, optional (default is one)
series_names : list, optional (default is None)
Names of the timeseries to plot. If set to None, then model plots all of them.
output_filename : str, optional (default is series)
Name of the graphics file containing the plots.
color_dict : dict
Dictionary containing specific colors for the models to plot.
mode : str, optional (default is 'save')
If 'save', then function saves plot on the id_ specific folder.
if 'show, then function plots and used plt.show to pop up the plot real time'.
alpha_dict : dict, optional (default is None)
dictionary specifying the opacity of the bars in the plot (alpha argument in matplotlib).
If set to None, then all opacities are set to 1.
ext : str, optional (default is png)
Extension formal to save the graphics file.
Defines the number of columns to define the plotting array. Function organizes
plots in n_col then
'''
if folder_dir is None:
folder_dir = self.folder_main
n_ids = len(ids)
n_rows = math.ceil(n_ids/n_col)
fig, axarr = plt.subplots(n_rows,n_col)
axes = axarr.ravel()
for i, id_ in enumerate(ids):
df = pd.read_csv('{0}/{1}/{2}'.format(self.folder_main, id_, filename), index_col=[0])
series = []
index = df[start_period:end_period].index.values
col_names = list(df)
for col in series_names:
if col in col_names:
# create top panel
axes[i].plot(df[start_period:end_period][col].values, label=col)
else:
print('WARNING! {0} not in {1} timeseries list'.format(col, id_))
if color_dict:
for j, l in enumerate(axes[i].get_lines()):
l.set_color(color_dict[series_names[j]])
if alpha_dict:
for j, l in enumerate(axes[i].get_lines()):
l.set_alpha(alpha_dict[series_names[j]])
if i == 0:
axes[i].legend(ncol=len(series_names), frameon=False, loc='upper left', \
bbox_to_anchor=(.0,1.20))
axes[i].text(.80,.9, id_, weight = 'bold', horizontalalignment='left', transform=axes[i].transAxes)
axes[i].spines['top'].set_visible(False)
axes[i].spines['right'].set_visible(False)
if i%n_col == 0:
plt.ylabel('Estimates')
if i > n_col*(n_rows - 1)-1:
time.sleep(3)
axes[i].set_xlabel('Index')
plt.sca(axes[i])
plt.xticks(range(len(index)),index, rotation=45)
plt.gca().xaxis.set_major_formatter(IndexFormatter(index))
plt.gca().xaxis.set_major_locator(mticker.MaxNLocator(6))
#plt.locator_params(nbins=8)
else:
axes[i].set_xticks([])
if mode == 'show':
plt.show()
plt.close()
if mode == 'save':
fig.set_size_inches([5*n_col,3*n_rows])
plt.savefig('{0}/{1}/{2}.{3}'.format(self.folder_main, OVERVIEW_FOLDER, output_filename,ext),fmt=ext)
plt.close()
def merge_models(filename, filename2, output_filename, models=None, start_period=None, end_period=None, erase_duplicates=True):
"""
merges two dataframes for an specified period of time, substracts the models, and stores them in output_filepath.
PARAMETERS:
___________
filename : str,
Path to first dataframe (CSV)
filename2 : str
output_filename : str,
New absolute location of the merged dataframe.
Path to second dataframe
models : list, optional (Default is None)
Name of models to let into the new Dataframe. If set to None, then lets all models in
start_period : str, optional (default is None)
First index in dataframe to merge, if set to None, then grabs the first index of filename's dataframe
end_period : str, optional (default is None)
"""
df1 = pd.read_csv(filename, index_col = [0])
df2 = pd.read_csv(filename2, index_col = [0])
df3 = pd.concat([df1,df2], axis=1)
if start_period and (start_period in df3.index):
pass
elif start_period is None:
start_period = df3.index[0]
else:
print('Unable to identify start_period {0} as valid start reference.\
please review'.format(start_period))
return
if end_period and end_period in df3.index:
pass
elif end_period is None:
end_period = df3.index[-1]
else:
print('Unable to identify end_period {0} as valid start reference.\
please review'.format(start_period))
return
if models is None:
models = df3.columns
df3 = df3[start_period:end_period][models]
if erase_duplicates:
df3=df3.T.drop_duplicates().T
df3.to_csv(output_filename)
def group_barplot_metric(self, ids, metric, period, models, color_dict=None, alpha_dict=None, metric_filename='metrics.csv', bar_separation_multiplier=1.5, mode='save', output_filename=None, plot_domain=None, ext='png', show_values=False, ordering=None):
"""
Produces a bar plot of the desired metric and models for an specific ids.
If looking to make a id group plot, please check group_metric_bargraph()
Parameters
__________
id_: str
Identifier for the region to look for
metric: str
String containing the name of the metric to look for in the predictions file
period: str
Column name containing the values to plot
models: List, optiona (default None)
String list containing the names of the models to plot
color_dict : dict
Dictionary containing specific colors for the models to plot
metric_filename : str, optional (default metrics.csv)
mode : str, optional (default is 'save')
If 'save', then function saves plot on the id_ specific folder.
if 'show, then function plots and used plt.show to pop up the plot real time'
alpha_dict : dict, optional (default is None)
dictionary specifying the opacity of the bars in the plot (alpha argument in matplotlib).
If set to None, then all opacities are set to 1
output_filename : str, optional (default is None)
If set to None, output_filename is set metricname_barplot
ext : str, optional (default is png)
Extension formal to save the barplot.
plot_domain : list, optional (default is [0,1])
list of two integers that sets the limits in the plot (plt.xlim)
show_values : boolean, optional (default is False)
plots the values of the metric within the barplot.
bar_separation_multiplier : float, optional (default is 1)
Parameter that functions as multiplier for the separion between bars in the plot.
if set to 1, then bars are plotted in locations 1,2,3... if set to 2, then 2,4,6, etc
"""
if color_dict is None:
color_dict = dict(zip(models, ['b']*len(models)))
if alpha_dict is None:
alpha_dict = dict(zip(models, [1]*len(models)))
values_dict = dict(zip(models, [[] for mod in models]))
indices = []
overview_path = '{0}/{1}'.format(self.folder_main, OVERVIEW_FOLDER)
for i, id_ in enumerate(ids):
indices.append(i*bar_separation_multiplier)
id_path = '{0}/{1}'.format(self.folder_main, id_)
df = pd.read_csv('{0}/{1}'.format(id_path, metric_filename))
df = df[df['METRIC']==metric]
for j, mod in enumerate(models):
try:
values_dict[mod].append(df[df['MODEL']==mod][period].values[0])
except Exception as t:
print(t)
print('\n Missing data in model:{0} for id:{1}'.format(mod, id_))
return
bar_width = 1/len(models)
indices = np.array(indices)
for i, mod in enumerate(models):
heights = values_dict[mod]
bar_positions = indices + bar_width*i
rects = plt.barh(bar_positions, heights, bar_width, label=mod, color=color_dict[mod], alpha=alpha_dict[mod])
if show_values:
for j, rect in enumerate(rects):
yloc = bar_positions[j]
clr = 'black'
p = heights[j]
xloc = heights[j]
plt.gca().text(xloc, yloc, p, horizontalalignment='center', verticalalignment='center', color=clr, weight='bold')
plt.gca().spines['top'].set_visible(False)
plt.gca().spines['right'].set_visible(False)
plt.title('{0} barplot'.format(metric))
plt.yticks(indices+bar_width*(len(models)/2), ids)
if len(models) > 5:
plt.legend(frameon=False, ncol=1)
else:
plt.legend(frameon=False, ncol=len(models))
if plot_domain:
plt.xlim(plot_domain)
if mode == 'show':
plt.show()
elif mode == 'save':
if output_filename is None:
output_filename = '{0}_barplot'.format(metric)
plt.gcf().set_size_inches([6,15])
plt.savefig('{0}/{1}.{2}'.format(overview_path, output_filename, ext), format=ext)
plt.close()
def barplot_metric(self, id_, metric, period, models=None, color_dict=None, alpha_dict=None, metric_filename='metrics.csv', bar_width=1, bar_separation_multiplier=1, mode='save', output_filename=None, plot_domain=None, ext='png', show_values=True):
"""
Produces a bar plot of the desired metric and models for an specific id.
If looking to make a id group plot, please check group_metric_bargraph()
Parameters
__________
id_: str
Identifier for the region to look for
metric: str
String containing the name of the metric to look for in the predictions file
period: str
Column name containing the values to plot
models: List, optiona (default None)
String list containing the names of the models to plot
color_dict : dict
Dictionary containing specific colors for the models to plot
metric_filename : str, optional (default metrics.csv)
bar_width : float, optional (default is 1)
Bar width in the plots (0 to 1, any more will make bars be on top of each other).
bar_separation_multiplier : float, optional (default is 1)
Parameter that functions as multiplier for the separion between bars in the plot.
if set to 1, then bars are plotted in locations 1,2,3... if set to 2, then 2,4,6, etc
mode : str, optional (default is 'save')
If 'save', then function saves plot on the id_ specific folder.
if 'show, then function plots and used plt.show to pop up the plot real time'
alpha_dict : dict, optional (default is None)
dictionary specifying the opacity of the bars in the plot (alpha argument in matplotlib).
If set to None, then all opacities are set to 1
output_filename : str, optional (default is None)
If set to None, output_filename is set metricname_barplot
ext : str, optional (default is png)
Extension formal to save the barplot.
plot_domain : list, optional (default is [0,1])
list of two integers that sets the limits in the plot (plt.xlim)
show_values : boolean, optional (default is False)
plots the values of the metric within the barplot.
"""
if id_ in self.ids:
id_path = '{0}/{1}'.format(self.folder_main, id_)
df = pd.read_csv('{0}/{1}'.format(id_path, metric_filename))
df = df[df['METRIC']==metric]
if models is None:
models = df['MODEL'].values
if color_dict is None:
color_dict = dict(zip(models, ['b']*len(models)))
if alpha_dict is None:
alpha_dict = dict(zip(models, [1]*len(models)))
indices = []
for i, mod in enumerate(models):
height = df[df['MODEL']==mod][period].values[0]
indices.append(i*bar_separation_multiplier)
rects = plt.barh(indices[i], height, bar_width, color=color_dict[mod], alpha=alpha_dict[mod])
if show_values:
for rect in rects:
yloc = indices[i]
clr = 'black'
p = height
xloc = height
plt.gca().text(xloc, yloc, p, horizontalalignment='center', verticalalignment='center', color=clr, weight='bold')
else:
print(' {1} ID is not in data. current ids : {0}'.format(self.ids, id_))
plt.gca().spines['top'].set_visible(False)
plt.gca().spines['right'].set_visible(False)
plt.title('{0} barplot'.format(metric))
plt.yticks(indices, models)
plt.legend(frameon=False, ncol=len(models))
if plot_domain:
plt.xlim(plot_domain)
if mode == 'show':
plt.show()
elif mode == 'save':
if output_filename is None:
output_filename = '{0}_barplot'.format(metric)
plt.gcf().set_size_inches([10,7])
plt.savefig('{0}/{1}.{2}'.format(id_path, output_filename, ext), format=ext)
plt.close()
def extract_metrics(self, ids = None, filename = None, folder_main = None, \
metrics=['RMSE', 'PEARSON'], models= ['AR12GO'], seasons=None):
if ids is None:
ids = self.ids
if folder_main is None:
folder_main = self.folder_main+'/'+OVERVIEW_FOLDER
if filename is None:
filename = CSV_METRICS_CONDENSED
df = pd.read_csv(folder_main+'/'+filename, index_col=0)
if models is not None:
df = df[main_df['MODEL'].isin(models)]
if metrics is not None:
df = df[main_df['METRIC'].isin(metrics)]
extracted_df.to_csv(folder_main + '/metrics_extracted.csv')
def group_compute_metrics(self, intervals, interval_labels, which_ids = 'all_ids', \
which_metrics = ['PEARSON', 'RMSE'], remove_missing_values=[0.5, 0], input_file_name=None, output_file_name=None, \
verbose=False, target = None, write_to_overview=False):
'''
For each of the ids analyzed, computes a set of metrics (metris available in the metric_handler variable).
Input:
intervals = a binary tuple list with start and end labels.
interval_labels
which_ids = list that specifies which of the ids to compute the metrics for, if not, computes for all.
which_metrics = (dictionary with per-id lists or list) Specifies which metrics to compute for the ids.
remove_missing_values = list of float numbers to assig which values to ignore in the metric computation
'''
if input_file_name is None:
input_file_name = ID_PREDS
if output_file_name is None:
output_file_name = ID_METRICS
if target is None:
target = TARGET
if which_ids == 'all_ids':
which_ids = self.ids
else:
for i, id_ in enumerate(which_ids):
if id_ not in self.ids:
which_ids.remove(id_)
print('{0} not found in experiment object ids. Removing Please check.'.format(id_))
if isinstance(intervals, list) and isinstance(intervals[0], tuple):
print('Non-specified id intervals received. Using intervals in all ids')
intervals = dict(which_ids, [intervals]*len(which_ids))
elif isinstance(intervals, dict) and isinstance(interval_labels, dict):
for i, id_ in enumerate(which_ids):
try:
if len(intervals[id_]) != len(interval_labels[id_]):
print(' WARNING! Mismatch between interval and interval_labels in id: {0}.'.format(id_))
interval_labels[id_] = []
for i in range(0,len(interval_labels)):
interval_labels[id_].append('s{0}'.format(i))
except KeyError:
print('ID not found within interval data. Please review.')
else:
print('Mismatch between intervals and interval labels types (Non-dictionaries). Please check input parameters')
return
if write_to_overview: id_dfs = []
if verbose: print('Reading on {0}'.format(self.folder_main))
for id_folder in os.listdir(self.folder_main):
if id_folder in which_ids:
file_preds = self.folder_main + '/' + id_folder + '/' + input_file_name
if verbose: print('Checking for data in {0}'.format(file_preds))
if os.path.exists(file_preds):
preds_pd = pd.read_csv(file_preds, index_col = 0)
if verbose:
print('Successfully loaded preds \n \n {0}'.format(preds_pd))
time.sleep(10)
id_interval = intervals[id_folder]
id_interval_labels = interval_labels[id_folder]
model_list = list(preds_pd)
model_list.remove(target)
metric_dict = {}
if verbose:
print('id: {0} \nid_intervals: {1}\n id_interval_labels{2}\n Models_available{3}'.format(id_folder, id_interval, id_interval_labels, model_list))
time.sleep(10)
# generating multi index for pandas dataframe
levels = [which_metrics, model_list]
labels = [[], []]
names = ['METRIC', 'MODEL']
for i, (start_interval, end_interval) in enumerate(id_interval):
metric_dict[id_interval_labels[i]] = []
sub_pd = copy.deepcopy(preds_pd[start_interval:end_interval])
for j, metric in enumerate(which_metrics):
for k, model in enumerate(model_list):
model_timeseries = sub_pd[model].values
target_s = sub_pd[target].values
if remove_missing_values:
model_timeseries, target_s = timeseries_rmv_values(model_timeseries, target_s, remove_missing_values)
#print(preds_pd, model_timeseries, target_s)
#time.sleep(100)
val = metric_handler[metric](model_timeseries, target_s)
metric_dict[id_interval_labels[i]].append(val)
if i == 0:
labels[0].append(j)
labels[1].append(k)
ind = pd.MultiIndex(levels=levels, labels=labels, names=names)
metric_pd = pd.DataFrame(metric_dict, index = ind)
#print(metric_pd)
metric_pd.to_csv(self.folder_main + '/' + id_folder + '/' + output_file_name )
metric_pd['ID'] = np.array([id_folder]*len(labels[0]))
if write_to_overview : id_dfs.append( metric_pd.set_index('ID', append=True, inplace=False))
else:
print('Not able to find results file for {0}. Please check your folder'.format(id_folder))
print('Finished iterating over all ids. Writing out condensed file in {0} folder'.format(OVERVIEW_FOLDER))
if write_to_overview:
id_dfs = pd.concat(id_dfs)
id_dfs.to_csv(self.folder_main+ '/' + OVERVIEW_FOLDER + '/' + CSV_METRICS_CONDENSED)
def efficiency_test(self, id_folder, periods, period_labels,\
model_to_compare, ignore_models=['GFT'],\
confidence_interval=.90, samples = 10000, p=1./52, filename = None,
output_file_name=None, remove_values=None, write=True, op='MSE'):
'''
Performs efficiency test based on politis and romano 1994
https://www.tandfonline.com/doi/abs/10.1080/01621459.1994.10476870
'''
# create path to csv file
if filename is None:
file_path = id_folder +'/'+ID_PREDS
else:
file_path = id_folder +'/'+ filename
if output_file_name is None:
output_file_name = ID_EFFTEST
# load columns
preds_pd = pd.read_csv(file_path, index_col=0)
if remove_values is not None:
rmv_values(preds_pd, ignore = remove_values, verbose = True)
bbts = {}
model_names = list(preds_pd)
model_names.remove(TARGET)
# Removing movels in ignore_models
for i, model in enumerate(ignore_models):
if model in model_names:
del preds_pd[model]
model_names.remove(model)
n_models = len(model_names)
#multiindexing
levels = [['BBT', 'lower_bound', 'upper_bound'], model_names]
labels = [ [0]*n_models + [1]*n_models + [2]*n_models, list(range(n_models))*3 ]
names =['Efficiency_test', 'Model']
ind = pd.MultiIndex(levels = levels, labels = labels, names = names)
print('Computing the efficiency test on {0}'.format(id_folder))
#Main computation loop
for i, period in enumerate(periods):
print('Computing the efficiency test on {0}'.format(period))
bbts[period_labels[i]] = self.bbt(preds_pd,\
model_to_compare=model_to_compare,period=period, \
confidence_interval=confidence_interval, \
samples = samples, p = p, op=op)
eff_pd = pd.DataFrame(bbts, index =ind)
if write:
eff_pd.to_csv(id_folder + '/' + output_file_name )
return eff_pd
def bbt(self, preds_pd, model_to_compare='AR12',\
seed='random', period=('2012-01-01', '2016-12-25'), verbose = True,\
samples=10000, p=1./52, confidence_interval=.90, op='MSE'):
'''
Performs timeseries bootstrap to calculate the ratio of MSEs between model_to_compare and all other available models
Inputs:
preds_pd = pandas dataframe containing the data to analyze
model_to_compare = Str naming the model (must be inside the csv file)
seed = specify a numpy and random seed, otherwise use 'random' to not use any
period = a 2 tuple containing the first index and last index comprising the period to analyze
'''
if isinstance(seed,int):
random.seed(seed)
np.random.seed(seed)
model_names = list(preds_pd)
if verbose == True:
print('Successfully loaded dataframe. \n \n Target name in Config : {0} \n \n model_names found: {1}'.format(TARGET, model_names))
model_preds = []
model_names.remove(TARGET)
# Always place target preds at start
for i, model in enumerate(model_names):
if i == 0:
model_preds.append(preds_pd[TARGET][period[0]:period[1]])
model_preds.append(preds_pd[model][period[0]:period[1]])
methods = np.column_stack(model_preds)
n_models = len(model_names)
eff_obs = np.zeros(n_models)
# calculate observed relative efficiency
for i in range(n_models):
eff_obs[i] = metric_handler[op](methods[:, 0], methods[:, i + 1])
eff_obs = eff_obs / eff_obs[model_names.index(model_to_compare)]
# perform bootstrap
scores = np.zeros((samples, n_models))
for iteration in range(samples):
# construct bootstrap resample
new, n1, n2 = sbb.resample(methods, p)
# calculate sample relative efficiencies
for model in range(n_models):
scores[iteration, model] = metric_handler[op](new[:, 0], new[:, model + 1])
scores[iteration] = scores[iteration] / scores[iteration,model_names.index(model_to_compare)]
if op == 'PEARSON':
eff_obs = 1/eff_obs
scores = 1/scores
# define the variable containing the deviations from the observed rel eff
scores_residual = scores - eff_obs
# construct output array
report_array = np.zeros(3*n_models)
for comp in range(n_models):
tmp = scores_residual[:, comp]
# 95% confidence interval by sorting the deviations and choosing the endpoints of the 95% region
tmp = np.sort(tmp)
ignored_tail_size = (1-confidence_interval)/2
report_array[comp] = eff_obs[comp]
report_array[n_models*1+comp] = eff_obs[comp] + tmp[int(round(samples * (0.0+ignored_tail_size)))]
report_array[n_models*2+comp] = eff_obs[comp] + tmp[int(round(samples * (1.0-ignored_tail_size)))]
return report_array
def mat_load(self, state_dir = None, filename = None, dates_bol = True, verbose = True):
if state_dir is not None:
self.state_dir = state_dir
if filename is not None:
self.filename = filename
self.preds_pd = pd.read_csv(self.state_dir + self.filename + '_preds.csv')
self.metrics_pd = pd.read_csv(self.state_dir + self.filename + '_table.csv')
if dates_bol == True:
self.dates = self.preds_pd['Week'].values
del self.preds_pd['Week']
self.model_names = list(self.preds_pd)
self.season_names = list(self.metrics_pd)
# Removing Gold standard from model list and 'metrics' from metrics list
self.target_name = self.model_names[0]
self.model_names.remove(self.target_name)
del self.season_names[0]
self.ranking = self.model_names
self.target_pred = self.preds_pd[self.target_name].values
print('Loaded data for : {0} \n Models found : {1} \n Seasons found : {2} \n \n '.format(self.state_name, self.model_names, self.season_names))
def mat_metricRank(self, metric = None, season = None, verbose = False):
if metric is not None:
self.ranking_metric = metric
if season is not None:
self.ranking_season = season
if verbose == True:
print('Ranking models based on {0} metric for {1} season. \n \n'.format(self.ranking_metric, self.ranking_season))
metrics_pd = self.metrics_pd
model_names = self.model_names
n_models = np.size(model_names)
if verbose == True:
print('Number of models found = {0}'.format(n_models))
season_names = self.season_names
# Check if metric is in
metrics_and_models = list(metrics_pd['Metric'].values)
season_vals = metrics_pd[self.ranking_season].values
if verbose == True:
print('Table metric and models list : \n', metrics_and_models)
print('Season Values : \n ', season_vals)
if self.ranking_metric in metrics_and_models:
i = metrics_and_models.index(self.ranking_metric)
metric_column = season_vals[i+1:i+n_models+1]
self.ranking_values = metric_column
#metric_column = mat_metricColumn(metrics_pd, self.ranking_metric, self.ranking_season, n_models, verbose)
# Sorted default ordering is minimum to maximum. For correlations we look for highest positive (??).
if self.ranking_metric == 'PEARSON':
metric_column *= -1
# To compare RMSEs values have to be normalized based on gold standard's MS value.
if self.ranking_metric == 'RMSE':
metric_column /= np.sqrt(np.mean(np.power(self.target_pred,2)))
ranking = [model for (metric, model) in sorted(zip(metric_column, model_names), key=lambda pair: pair[0])]
if verbose == True:
print('Old Ranking: {0} \n Values for metric: {2} \n New ranking: {1} \n \n'.format(self.ranking, ranking, self.ranking_values ))
self.ranking = ranking
else:
print('Ranking metric not available. Please use another metric')
def mat_predsAndErrors(self, which_rank = None, verbose = False, start_season = 3, n_top_models = None ,dates_bol = True, gft_bol = True):
'''
Makes time series, error, and % error plot for the specified number of models. Models are chosen based on the ranking.
-If there is no ranking, it plots the models in the order they were written on the prediction csv.
-If the specified number is bigger than the number of models available, it plots all. '''
# Get predictions for the top n
if n_top_models is None:
n_top_models = self.n_top_models
ranking = self.ranking
model_names = self.model_names
preds_pd = self.preds_pd
gold_std = self.target_pred
n_lags = self.n_lags
gstd_rmse = np.sqrt(np.mean(np.power(gold_std,2)))
season_indices = self.season_indices - n_lags
season_indices[season_indices<0] = 0
season_names = self.season_names
if season_indices[1] < 1:
print('Warning! Season indices values may conflict with code', season_indices)
ax2_limit = 2
season_names.remove('ALL_PERIOD')
if gft_bol == True:
season_names.remove('GFT_PERIOD')
if which_rank is None:
which_rank = range(0, n_top_models)
if np.amax(which_rank) > len(ranking)-1:
print('Not accessible rank detected {0}. Please modify value'.format( | np.amax(which_rank) | numpy.amax |
import numpy as np
import math
def makeSpiral(N,nRounds,PowCoeff,phiBase,m):
#Simple VD Spiral
if(nRounds<4):
phiBase=0;
t= | np.arange(0,1,1/N) | numpy.arange |
import numpy as np
from tensorflow.contrib.graph_editor import Transformer
def crop(image, bbox, x, y, length):
x, y, bbox = x.astype(np.int), y.astype(np.int), bbox.astype(np.int)
x_min, y_min, x_max, y_max = bbox
w, h = x_max - x_min, y_max - y_min
# Crop image to bbox
image = image[y_min:y_min + h, x_min:x_min + w, :]
# Crop joints and bbox
x -= x_min
y -= y_min
bbox = np.array([0, 0, x_max - x_min, y_max - y_min])
# Scale to desired size
side_length = max(w, h)
f_xy = float(length) / float(side_length)
image, bbox, x, y = Transformer.scale(image, bbox, x, y, f_xy)
# Pad
new_w, new_h = image.shape[1], image.shape[0]
cropped = np.zeros((length, length, image.shape[2]))
dx = length - new_w
dy = length - new_h
x_min, y_min = int(dx / 2.), int(dy / 2.)
x_max, y_max = x_min + new_w, y_min + new_h
cropped[y_min:y_max, x_min:x_max, :] = image
x += x_min
y += y_min
x = np.clip(x, x_min, x_max)
y = np.clip(y, y_min, y_max)
bbox += np.array([x_min, y_min, x_min, y_min])
return cropped, bbox, x.astype(np.int), y.astype(np.int)
def scale(image, bbox, x, y, f_xy):
(h, w, _) = image.shape
h, w = int(h * f_xy), int(w * f_xy)
from numpy import resize
image = resize(image, (h, w), preserve_range=True, anti_aliasing=True, mode='constant').astype(np.uint8)
x = x * f_xy
y = y * f_xy
bbox = bbox * f_xy
x = np.clip(x, 0, w)
y = np.clip(y, 0, h)
return image, bbox, x, y
def flip(image, bbox, x, y):
image = np.fliplr(image).copy()
w = image.shape[1]
x_min, y_min, x_max, y_max = bbox
bbox = np.array([w - x_max, y_min, w - x_min, y_max])
x = w - x
x, y = Transformer.swap_joints(x, y)
return image, bbox, x, y
def rotate(image, bbox, x, y, angle):
# image - -(256, 256, 3)
# bbox - -(4,)
# x - -[126 129 124 117 107 99 128 107 108 105 137 155 122 99]
# y - -[209 176 136 123 178 225 65 47 46 24 44 64 49 54]
# angle - --8.165648811999333
# center of image [128,128]
o_x, o_y = (np.array(image.shape[:2][::-1]) - 1) / 2.
width, height = image.shape[0], image.shape[1]
x1 = x
y1 = height - y
o_x = o_x
o_y = height - o_y
image = rotate(image, angle, preserve_range=True).astype(np.uint8)
r_x, r_y = o_x, o_y
angle_rad = (np.pi * angle) / 180.0
x = r_x + np.cos(angle_rad) * (x1 - o_x) - np.sin(angle_rad) * (y1 - o_y)
y = r_y + np.sin(angle_rad) * (x1 - o_x) + np.cos(angle_rad) * (y1 - o_y)
x = x
y = height - y
bbox[0] = r_x + np.cos(angle_rad) * (bbox[0] - o_x) + np.sin(angle_rad) * (bbox[1] - o_y)
bbox[1] = r_y + -np.sin(angle_rad) * (bbox[0] - o_x) + np.cos(angle_rad) * (bbox[1] - o_y)
bbox[2] = r_x + np.cos(angle_rad) * (bbox[2] - o_x) + np.sin(angle_rad) * (bbox[3] - o_y)
bbox[3] = r_y + - | np.sin(angle_rad) | numpy.sin |
#Author: <NAME>
#Contact: <EMAIL>
#Date: September 11, 2018
# coding: utf-8
# <h1>Table of Contents<span class="tocSkip"></span></h1>
# <div class="toc"><ul class="toc-item"></ul></div>
# In[1]:
# grdecl_read_plot defines the function to read & plot multiple realizations of GRDECL exports (2D layer).
# real_num: the number of realizations.
# file_name: the petrel exporeted grdecl file name, e.g.: Facies_1.GRDECL
# i_dim, j_dim: i and j dimensions of the exported grdecl.
# plot_num: the number of realizations to plot, currently the plot_num must < 16.
# GRDECL export setting in Petrel:
# Local coord system, without Mapaxes, User define cell origin, cell origin at(I=0, J=max J,K), Traverse first along I, then along J
# this returns grid_data in grid_data[k_dim, j_dim, i_dim] formate
import numpy as np
import matplotlib.pyplot as plt
from tqdm import tqdm
def grdecl_read(file_name, i_dim, j_dim, k_dim):
####### MAIN FUNCTION ######
## import the GRDECl files by ignore the first 15 header lines, and then unwrap the unified grid values, then plot them
new_data=[]
f = open(file_name, 'r')
data= f.readlines()[15:]
f.close()
## convert the original data into to a 3D array cube with i rows, j columns, and k layers.
for row in data:
for a in row.split():
index = a.find('*')
if index >=0:
cell_num = int(a[:index])
for i in range(cell_num):
val=float(a[index+1:])
new_data.append(val)
elif a !='/':
val=float(a)
new_data.append(val)
new_data = np.asarray(new_data)
grid_data =new_data.reshape(k_dim,j_dim,i_dim)
# The grid_data will be grid_data[k_dim][j_dim][i_dim]
return grid_data
def grdecl_plot(file_pre, i_dim, j_dim, k_dim, layernum, pro_type):
fig=plt.figure(figsize=(15,14))
count = 1
if pro_type == 'facies':
for realnum in tqdm(range(12)):
grid_data = grdecl_read(file_pre + str(realnum+1) + '.GRDECL',200,100,75)
plot=fig.add_subplot(3, 4, count)
count = count+1
prop_mean = format( | np.mean(grid_data) | numpy.mean |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
""" """
import numpy as np
# ====================== #
# #
# High Level Function #
# #
# ====================== #
def cube_to_spectrum(cube, ):
""" wrapper around extract_star """
pass
# ====================== #
# #
# CORE FUNCTION #
# #
# ====================== #
def extract_star(cube,
lbda_step1=None,
psfmodel="NormalMoffatTilted",fwhm_guess=None,
centroids=None, centroids_err=[5,5],
only_step1=False, spaxel_unit=1, step1_fit_prop={},
final_slice_width=None,
force_ellipse=True, force_centroid=True, force_sigma=True, force_alpha=True,
normalized=False, ncore=None, notebook=False,
verbose=True):
"""
Returns
-------
spectrum, model (cube), psfmodel (cube), bkgdmodel (cube)
"""
from pyifu import get_spectrum, get_cube
if lbda_step1 is None:
# 6 Optical bins
lbda_step1 = lbda_and_bin_to_lbda_step1([4000,8000], 6)
# Step 1
psffit = fit_metaslices(cube, lbda_step1,
psfmodel=psfmodel,
centroids=centroids, centroids_err=centroids_err,
spaxel_unit=spaxel_unit,fwhm_guess=fwhm_guess,
verbose=verbose,
**step1_fit_prop)
if only_step1:
return psffit
# Step 2
# ellipse_parameters
ab, aberr, theta, thetaerr = psffit.get_ellipse_parameters()
cmodel = psffit.get_chromatic_profile_model()
slfits = cmodel.force_fit(cube, ab=ab, theta=theta,
aberr=aberr*2, thetaerr=thetaerr*2,
psfmodel=psfmodel,
force_ellipse=force_ellipse,
force_centroid=force_centroid,
force_sigma=force_sigma, force_alpha=force_alpha,
slice_width=final_slice_width,
verbose=verbose
)
lbdas = np.asarray([slfits[i].lbda for i in range(len(slfits))])
# Returns all structures
cube_prop = dict(header=cube.header, lbda=lbdas,
spaxel_mapping = cube.spaxel_mapping, spaxel_vertices=cube.spaxel_vertices)
# Background
databkgd = np.asarray([slfits[i].model.get_background(slfits[i]._xfitted, slfits[i]._yfitted)
for i in range(len(lbdas))])
if len(np.shape(databkgd)) ==1: # means "Flat"
databkgd = np.asarray([databkgd for i in range(len(cube.indexes))]).T
bkgdmodel = get_cube( databkgd, **cube_prop)
# PSF
psfmodel_ = get_cube( np.asarray([slfits[i].model.get_profile(slfits[i]._xfitted, slfits[i]._yfitted)
for i in range(len(lbdas))]),
**cube_prop)
# Complit Model
model = get_cube( np.asarray([slfits[i].model.get_model(slfits[i]._xfitted, slfits[i]._yfitted)
for i in range(len(lbdas))]),
**cube_prop)
# = The spectrum
flux,err = np.asarray([[slfits[i].fitvalues["amplitude"]*slfits[i].fitvalues["used_amplscale"],
slfits[i].fitvalues["amplitude.err"]*slfits[i].fitvalues["used_amplscale"]]
for i in range(len(lbdas))]).T
# Normalization
if normalized:
print(" ******************* ")
print("WARNING: normalized tools is not accurate yet.")
print(" ******************* ")
normalization = _get_spectrum_normalization_(slfits, psfmodel=psfmodel,
ncore=ncore, notebook=notebook, verbose=verbose)
norm = np.asarray([normalization[i][0] for i in range( len(normalization) ) ])
flux /= norm
err /= norm
spectrum = get_spectrum(lbdas, flux, variance=err**2, header=cube.header)
return spectrum, model, psfmodel_, bkgdmodel, psffit, slfits
def lbda_and_bin_to_lbda_step1(lbdaranges, bins):
""" """
STEP_LBDA_RANGE = np.linspace(lbdaranges[0],lbdaranges[1], bins+1)
return np.asarray([STEP_LBDA_RANGE[:-1], STEP_LBDA_RANGE[1:]]).T
def _get_spectrum_normalization_(slfits, ncore=None, notebook=False, verbose=True,
psfmodel="NormalMoffatTilted"):
""" """
if "NormalMoffat" in psfmodel:
from .model import get_normalmoffat_normalisation
normalisation_func = get_normalmoffat_normalisation
else:
raise NotImplementedError("Only NormalMoffat profile normalization implemented")
import multiprocessing
from astropy.utils.console import ProgressBar
if ncore is None:
if multiprocessing.cpu_count()>20:
ncore = multiprocessing.cpu_count() - 8
elif multiprocessing.cpu_count()>8:
ncore = multiprocessing.cpu_count() - 5
else:
ncore = multiprocessing.cpu_count() - 2
if ncore==0:
ncore = 1
if verbose: print("Measuring Spectrum Normalization, using %d cores"%ncore)
with ProgressBar( len(slfits), ipython_widget=notebook) as bar:
p = multiprocessing.Pool(ncore)
res = {}
for j, result in enumerate( p.imap( normalisation_func, [slfits[i].model.param_profile for i in range(len( slfits))] )):
res[j] = result
bar.update(j)
return res
def build_parameter_prior(filenames, centroids=None, psfmodel="NormalMoffatTilted", lbdaranges=[4000,8500], bins=10):
""" """
prop_fit = {}
STEP_LBDA_RANGE = np.linspace(lbdaranges[0],lbdaranges[1], bins+1)
lbdas = np.asarray([STEP_LBDA_RANGE[:-1], STEP_LBDA_RANGE[1:]]).T
def _fit_cube_(cube):
psffit = SlicePSFCollection()
psffit.set_cube(cube)
for i,lbdar in enumerate(lbdas):
psffit.extract_slice(i, *lbdar)
slpsf = psffit.fit_slice(i, psfmodel=psfmodel,
**prop_fit)
return slpsf
return {filename_: _fit_cube_(pysedm.get_sedmcube(filename_)) for filename_ in filenames}
def fit_metaslices(cube, lbdas, psfmodel="NormalMoffatTilted",
centroids=None, centroids_err=[5,5],
spaxel_unit=1, fwhm_guess=None, verbose=True,
**kwargs):
""" """
from .fitter import SlicePSFCollection, guess_fwhm
psffit = SlicePSFCollection()
psffit.set_cube(cube)
# Provide Initial guess on FWHM
if fwhm_guess is None:
slice = cube.get_slice(lbda_min=6000, lbda_max=7000, slice_object=True)
fwhm_guess = guess_fwhm(slice, verbose=verbose)
for i,lbdar in enumerate(lbdas):
psffit.extract_slice(i, *lbdar)
slpsf = psffit.fit_slice(i, psfmodel=psfmodel,
centroids=centroids,
centroids_err=centroids_err,
fwhm_guess=fwhm_guess,
**kwargs)
psffit.load_adrfitter(spaxel_unit=spaxel_unit)
psffit.fit_adr()
return psffit
def automatic_fit_psf(cube, centroids=[0,0],
centroids_err=[3,3],
psfmodel="NormalMoffatTilted", step_bins=[3,10]):
""" """
prop_fit = dict(ab_boundaries=[0.01,0.5])
# ================== #
# Step 1: 5 slices #
# All Free #
# ================== #
STEP1_LBDA_RANGE = np.linspace(4500,8000, step_bins[0]+1)
step1_lbdas = np.asarray([STEP1_LBDA_RANGE[:-1], STEP1_LBDA_RANGE[1:]]).T
psffit_step1 = SlicePSFCollection()
psffit_step1.set_cube(cube)
for i,lbdar in enumerate(step1_lbdas):
psffit_step1.extract_slice(i, *lbdar)
slpsf = psffit_step1.fit_slice(i, psfmodel=psfmodel,
centroids=centroids,
centroids_err=centroids_err, **prop_fit)
psffit_step1.load_adrfitter()
psffit_step1.fit_adr()
if len(step_bins)==1 or step_bins[1] is None:
return psffit_step1, None
# ================== #
# Step 2: 15 slices #
# Strong centroid #
# ================== #
# - Helping on the ellipticity
[mean_ab, mean_aberr, mean_theta, mean_thetaerr], mask_removed = psffit_step1.get_ellipse_parameters()
stddev_ratio,stddev_ratioerr = psffit_step1.get_stddev_ratio()
STEP2_LBDA_RANGE = np.linspace(4500,8000, step_bins[1]+1)
step2_lbdas = np.asarray([STEP2_LBDA_RANGE[:-1], STEP2_LBDA_RANGE[1:]]).T
STEP2_CENTROID_ERROR = [0.2, 0.2]
psffit_step2 = SlicePSFCollection()
psffit_step2.set_cube(cube)
prop_fit["ab_guess"] = mean_ab
prop_fit["ab_boundaries"] = [mean_ab-mean_aberr, mean_ab+mean_aberr]
prop_fit["theta_guess"] = mean_theta
prop_fit["theta_boundaries"] = [mean_theta-mean_thetaerr, mean_theta+mean_thetaerr]
prop_fit["stddev_ratio_guess"] = stddev_ratio
prop_fit["stddev_ratio_boundaries"] = [stddev_ratio-stddev_ratioerr, stddev_ratio+stddev_ratioerr]
for i,lbdar in enumerate(step2_lbdas):
centroids = psffit_step1.get_adr_centroid( | np.mean(lbdar) | numpy.mean |
# ldpc.py
# Copyright 2020 <NAME>
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import timeit
from os import path
import warnings
import numpy as np
from scipy.sparse import csr_matrix, save_npz, load_npz
import numba as nb
from numba import njit, prange
from numba.typed import List, Dict
def generate_code(n, q, r, load):
"""
Creates or loads a random regular low-density parity-check (LDPC) code.
:param n: The number of columns of the regular LDPC code.
:param q: The Galois field exponent.
:param r: The code rate.
:param load: Determines whether the LDPC code is loaded from disk or a new code is created.
:return: The regular LDPC code in its dense and sparse form and the dictionary of its values.
"""
wc = 2 # Column weight of the low-density parity-check matrix (usually 2 <= wc <= 4)
wr = int(np.round(wc / (1 - r))) # Row weight of the low-density parity-check matrix
m = int(np.round(n * wc / wr)) # Number of rows of the low-density parity-check matrix
k = n - m # Information bits of the low-density parity-check matrix
r_design = 1 - wc / wr # The true rate of the code, which should be very close to the code rate from the data
print("Code Rate R_code:", r, "Design Rate R_des:", r_design)
np.testing.assert_almost_equal(r_design, r, decimal=1, err_msg="The error between the LDPC code rate and the "
"code rate from the data is too high.")
if load:
filename = 'codes/' + str(n) + "-" + str(r_design)[0:5] + "-" + str(wc) + "-" + str(wr) + '.npz'
# Check if the file name used matches the characteristics for the low-density parity-check code
vals = filename.split("-")
if int(vals[0].replace("codes/", "")) != n:
raise RuntimeWarning("The column number specified is not the same as the column number of the loaded code.")
elif vals[1] != str(r_design)[0:5]:
raise RuntimeWarning("The code rate of the data is not the same as the rate of the loaded code.")
elif int(vals[2]) != wc:
raise RuntimeWarning("The column weight specified is not the same as the column weight of the loaded code.")
elif int(vals[3].replace(".npz", "")) != wr:
raise RuntimeWarning("The row weight of the data is not the same as the row weight of the loaded code.")
else:
try:
code_sparse = load_npz(filename)
print("The following LDPC parity check matrix was successfully loaded from disk:", filename)
except (FileNotFoundError, IOError):
raise FileNotFoundError("The file", filename, "does not exist. A simulation with the given parameters "
"must be first run in order to create the code array.")
except ValueError:
raise ValueError("Pickled=false error, need to fix")
code = code_sparse.toarray()
m = code.shape[0]
vals = get_values(m, code_sparse)
else:
print("Creating a new LDPC code of size", m, "x", n, "with column weight", wc, "and row weight", wr, "...")
code, vals = create_random_regular_code(n, m, wc, wr, q)
code_sparse = csr_matrix(code, dtype=np.uint8)
if path.exists('codes/' + str(n) + "-" + str(r_design)[0:5] + "-" + str(wc) + "-" + str(wr) + '.npz'):
warnings.warn("An LDPC code with the specified specs already exists. A new one was still created.")
save_npz('codes/' + str(n) + "-" + str(r_design)[0:5] + "-" + str(wc) + "-" + str(wr) + '-new.npz',
code_sparse, compressed=True)
else:
save_npz('codes/' + str(n) + "-" + str(r_design)[0:5] + "-" + str(wc) + "-" + str(wr) + '.npz', code_sparse,
compressed=True)
return k, m, code, code_sparse, vals, r_design, wc, wr
@njit(fastmath=True, parallel=False, cache=True)
def set_ldpc_values(h, m, n, q):
"""
Replaces the nonzero units of an array with random values from a chosen Galois field.
:param h: The LDPC matrix.
:param m: The number of rows of the LDPC matrix.
:param n: The number of columns of the LDPC matrix.
:param q: The Galois Field exponent.
:return: The LDPC code array whose nonzero values belong to a Galois Field and the dictionary of these values.
"""
v = Dict()
for i in range(0, m):
for j in range(0, n):
if h[i][j] != 0:
h[i][j] = np.random.randint(low=1, high=2 ** q)
v[(i, j)] = h[i][j]
return h, v
@njit(fastmath=True, cache=True)
def check_matrix_rank(h, m, n):
"""
Ensures that the LDPC code array has full rank. If the array does not have full rank, its true code rate is shown.
:param h: The LDPC matrix.
:param m: The number of rows of the LDPC matrix.
:param n: The number of columns of the LDPC matrix.
"""
rank_h = np.linalg.matrix_rank(h.astype(np.float32)) # Input required to be in float format
if m < n:
if rank_h == h.shape[0]:
print("The matrix has full rank.")
else:
print("Warning: The matrix does not have full rank. The code rate is R_code =", (n - rank_h) / n)
else:
if rank_h == h.shape[1]:
print("The matrix has full rank.")
else:
print("Warning: The matrix does not have full rank.")
@njit(fastmath=True, parallel=False, cache=True)
def check_column_overlap(h, n):
"""
Checks if the overlap (inner product) of two consecutive columns of an LDPC code is larger than one and reports the
columns that have this trait.
:param h: The LDPC matrix.
:param n: The number of columns of the LDPC matrix.
"""
hT_float = np.ascontiguousarray(h.T.astype(np.float32))
for i in prange(n - 1):
h1 = hT_float[i]
h2 = hT_float[i + 1]
dot = np.dot(h1, h2)
if dot > 1.0:
print("Warning: Inner product larger than one found between columns", i, "and", i + 1, "(", dot, ")")
@njit(fastmath=True, parallel=False, cache=True)
def check_row_weights(h, m, r):
"""
Checks if the rows of the an LDPC code have the specified column weight and reports the rows that deviate from it.
:param h: The LDPC matrix.
:param m: The number of rows of the LDPC matrix.
:param r: The specified row weight.
"""
row_error = 0
for i in prange(m):
if np.count_nonzero(h[i]) != r:
row_error = row_error + 1
print("Row weight error in row", i, "- has", np.count_nonzero(h[i]), "bits")
if row_error == 0:
print("No row weight error found.")
else:
print("Row count with weight error:", row_error)
@njit(fastmath=True, parallel=False, cache=True)
def check_column_weights(h, n, c):
"""
Checks if the columns of an LDPC code have the specified column weight and reports the columns that deviate from it.
:param h: The LDPC matrix.
:param n: The number of columns of the LDPC matrix.
:param c: The specified column weight.
"""
col_error = 0
for i in prange(n):
if np.count_nonzero(h.T[i]) != c:
col_error = col_error + 1
print("Column weight error in row", i, "- has", np.count_nonzero(h.T[i]), "bits")
if col_error == 0:
print("No column weight error found.")
else:
print("Column count with weight error:", col_error)
def get_values(m, h: csr_matrix):
"""
Returns the nonzero values of an array, along with their indices. The indices are stored in Numba typed dictionaries
so that they are able to be interpreted by Numba.
:param m: The number of rows of the LDPC matrix.
:param h: The sparse LDPC matrix.
:return: The dictionary of indices and nonzero values of an array.
"""
# If the row number is too large, extra memory will be required
if m < 2 ** 16:
data_type = np.uint16
key_type = nb.uint16
else:
data_type = np.uint32
key_type = nb.uint32
r = h.tocoo().row.astype(data_type)
c = h.tocoo().col.astype(np.uint32)
v = Dict.empty(key_type=nb.types.Tuple((key_type, nb.uint32)), value_type=nb.types.uint8)
for i in range(len(r)):
v[(r[i], c[i])] = h[r[i], c[i]]
return v
# @njit()
def get_dict_nodes(vals, rows_exc, cols_exc, c_lil, c_lil_t):
for key in vals:
rows_exc[key] = np.array([n for n in c_lil[key[0]] if n != key[1]], dtype=np.int32)
cols_exc[key] = np.array([n for n in c_lil_t[key[1]] if n != key[0]], dtype=np.int32)
return rows_exc, cols_exc
def get_nodes(n, m, h: csr_matrix, ext):
"""
Gets the nonzero row and column indices of a sparse array. The indices are stored in Numba typed lists so that they
are able to be interpreted by Numba.
:param n: The number of columns of the LDPC matrix.
:param m: The number of rows of the LDPC matrix.
:param h: The sparse LDPC matrix.
:param ext:
:return: The variable and check nodes of the LDPC matrix.
"""
rows = List()
cols = List()
cols_exc = List()
# Convert the sparse matrix from a csr form to others to quickly obtain the necessary values
c_lil = h.tolil().astype(dtype=np.uint8).rows
c_lil_t = h.transpose().tolil().astype(dtype=np.uint8).rows
# Get the indices of CN-to-VN messages
if not ext:
for r in range(m): # For every row of the VN-to-CN messages array
rows.append(List(c_lil[r])) # Get the VNs connected to a certain CN
else:
rows_exc = List()
for r in range(m): # For every row of the VN-to-CN messages array
rows.append(List(c_lil[r])) # Get the VNs connected to a certain CN
lst = List()
for j in range(len(rows[r])):
y = rows[r][:]
y.remove(rows[r][j])
lst.append(y)
rows_exc.append(lst)
# Get the indices of VN-to-CN messages and the indices of VN-to-CN messages, excluding the current VN
for c in range(n):
cols.append(List(c_lil_t[c]))
lst = List()
for j in range(len(cols[c])):
y = cols[c][:]
y.remove(cols[c][j])
lst.append(y)
cols_exc.append(lst)
if not ext:
return rows, cols, cols_exc
else:
return rows, rows_exc, cols, cols_exc
@njit(fastmath=True, parallel=False, cache=True)
def create_random_regular_code(n, m, c, r, q):
"""
Low-density parity-check (LDPC) codes can be specified by a non-systematic sparse parity-check matrix H, having a
uniform column weight and a uniform row weight. H is constructed at random to these constraints. A (n,c,r) LDPC code
is specified by a parity-check matrix H having m rows and n columns, with r 1's per row and c 1's per column.
The code formed from such a parity check matrix is known as a regular Gallagher code.
:param n: The code block length (number of columns of H).
:param m: The number of rows of the LDPC code.
:param c: The column weight of H (number of non zeros per column).
:param r: The row weight of H (number of non zeros per row).
:param q: The Galois field exponent.
:return: The LDPC matrix along with its values dictionary.
"""
# Step 0: Validity checks
if n <= r: # n must be larger than r
raise ValueError("The number of rows of an LDPC code must always be smaller than its number of columns.")
if r < 2: # r must be at least 2
raise ValueError("The row weight of an LDPC code must be at least 2.")
if c < 2:
raise ValueError("The column weight of an LDPC code must be at least 2.")
# Step 1: An all-zero matrix H of dimension (m x n) is created.
h = np.zeros((m, n), dtype=np.uint8)
# Step 2: For each column in H, c 1s are placed in rows chosen at random.
for i in prange(n):
cols = np.random.choice(m, c, replace=False)
h.T[i][cols] = 1
# Step 3: The software then runs through the matrix searching for a row with zero 1's or just one 1.
for i in prange(m):
# If a row has no 1's in it, then it is a redundant row.
# So the software chooses 2 columns in the same row at random and places 1's in those columns.
if np.count_nonzero(h[i]) == 0:
a = np.random.choice(n, 2, replace=False)
h[i][a[0]] = 1
h[i][a[1]] = 1
# If a row just has one 1 in a row it means that the codeword bit in that column is always zero.
# So whenever the software finds a row with just one 1 in it, it randomly picks another column in the same row
# and places a 1 there.
elif np.count_nonzero(h[i]) == 1:
h[i][np.random.randint(0, n)] = 1
# Step 4: The software then calculates the number of 1's per row.
# If this is not an integer, the software rounds the value to the next higher integer.
threshold = int(np.round(r))
# Check if the code can be regular with the given parameters (only for n <= 10 ** 3 to save time)
if n <= 10 ** 3:
if np.count_nonzero(h[:]) % n == 0 and np.count_nonzero(h[:]) % m == 0:
print("The code can be regular - Total count of bits:", np.count_nonzero(h[:]))
else:
print("The code will be irregular - Total count of bits:", np.count_nonzero(h[:]))
# Note down the rows, whose nonzero elements are below the threshold, to achieve faster computation in Step 5
rows_below_threshold_list = []
for row in range(0, m):
if np.count_nonzero(h[row]) < threshold:
rows_below_threshold_list.append(row)
rows_below_threshold = np.array(rows_below_threshold_list, dtype=np.uint32)
# Step 5: The software then runs through the matrix trying to make the number of 1's per row as uniform as possible.
for i in range(m):
# For any row i containing more number of ones than the value calculated in Step 4
while np.count_nonzero(h[i]) > threshold:
# print(i, np.count_nonzero(h[i]), rows_below_threshold.size, m)
# The software picks a column containing a 1 at random and tries to move that 1 to a different row
# (randomly chosen such that has it a lower number of 1's than the value in step 4) in the same
# column. The software makes sure that the row chosen does not have a 1 in that particular column.
non_zeros = np.nonzero(h[i]) # Available columns to choose from
chosen_column = np.random.choice(non_zeros[0]) # Randomly choose one of the available columns
if rows_below_threshold.size == 0:
break
random_row = np.random.choice(rows_below_threshold) # Randomly choose one of the saved rows below threshold
if np.count_nonzero(h[random_row]) <= threshold and h[random_row][chosen_column] == 0:
h[random_row][chosen_column] = 1
h[i][chosen_column] = 0
# If the nonzero elements of the row are now equal to the threshold, remove the row from the list
if np.count_nonzero(h[random_row]) == threshold:
index = np.where(rows_below_threshold == random_row)
rows_below_threshold = | np.delete(rows_below_threshold, index[0][0]) | numpy.delete |
# coding: utf-8
# DO NOT EDIT
# Autogenerated from the notebook glm_weights.ipynb.
# Edit the notebook and then sync the output with this file.
#
# flake8: noqa
# DO NOT EDIT
# # 加权广义线性模型
import numpy as np
import pandas as pd
import statsmodels.formula.api as smf
import statsmodels.api as sm
# ## 加权GLM:泊松响应数据
#
# ### 加载数据
#
# 在此示例中,我们将使用少数几个外生变量的婚外恋数据集来预测婚外恋比率。
#
#
# 权重将被生成以表明 `freq_weights` 等同于重复记录数据。 另一方面, `var_weights` 等效于汇总数据。
print(sm.datasets.fair.NOTE)
# 将数据加载到 pandas 数据框中。
data = sm.datasets.fair.load_pandas().data
# 因变量(内生变量)是``外遇''
data.describe()
data[:3]
# 在下文中,我们将主要与Poisson合作。 当使用十进制事务时,我们把它们转换为整数使他们具有计数分布。
#
data["affairs"] = np.ceil(data["affairs"])
data[:3]
(data["affairs"] == 0).mean()
np.bincount(data["affairs"].astype(int))
# ## 浓缩和汇总观测
#
# 我们的原始数据集中有6366个观测值。 当我们只考虑一些选定的变量时,则只有较少的观察值。
# 在下文中,我们以两种方式组合观察值,首先,将所有变量值相同的观察值相结合,
# 其次,我们将解释性变量相同的观察值相结合。
#
# ### 具有唯一观测值的数据集
#
# 我们使用 pandas 的 groupby 来组合相同的观测值,并创建一个新的变量 `freq` ,该变量计算对应行中有多少观测值。
data2 = data.copy()
data2['const'] = 1
dc = data2['affairs rate_marriage age yrs_married const'.split()].groupby(
'affairs rate_marriage age yrs_married'.split()).count()
dc.reset_index(inplace=True)
dc.rename(columns={'const': 'freq'}, inplace=True)
print(dc.shape)
dc.head()
# ### 具有唯一解释变量的数据集(exog)
#
# 对于下一个数据集,我们将合并具有相同解释变量值的观察值。 但是,由于响应变量在组合观测之间可能有所不同,
# 因此我们计算所有组合观测响应变量的平均值和总和。
# 我们再次使用 pandas 的 ``groupby`` 来组合观察值并创建新变量。 我们还将 ``MultiIndex`` 展平为一个简单的索引。
gr = data['affairs rate_marriage age yrs_married'.split()].groupby(
'rate_marriage age yrs_married'.split())
df_a = gr.agg(['mean', 'sum', 'count'])
def merge_tuple(tpl):
if isinstance(tpl, tuple) and len(tpl) > 1:
return "_".join(map(str, tpl))
else:
return tpl
df_a.columns = df_a.columns.map(merge_tuple)
df_a.reset_index(inplace=True)
print(df_a.shape)
df_a.head()
# 组合观察值之后,将有467个唯一观察值的数据框 `dc` 和具有130个观察值的解释性变量的唯一值的数据框 `df_a`
#
print('number of rows: \noriginal, with unique observations, with unique exog')
data.shape[0], dc.shape[0], df_a.shape[0]
# ## 分析
#
# 在下文中,我们将原始数据 GLM-泊松 的结果与组合观测值的模型进行比较,在组合观测值中,多重性或聚集性是由权重或暴露给出的。
#
#
# ### 原始数据
glm = smf.glm(
'affairs ~ rate_marriage + age + yrs_married',
data=data,
family=sm.families.Poisson())
res_o = glm.fit()
print(res_o.summary())
res_o.pearson_chi2 / res_o.df_resid
# ### 压缩数据(具有频率的唯一观测值)
#
# 组合相同的观察值并使用频率权重考虑多个观察值,即可得出完全相同的结果。
# 当我们想要获得有关的观测信息而不是所有相同观测的汇总时,某些结果属性将有所不同。
# 例如,不考虑残差 ``freq_weights`` 的情况。
glm = smf.glm(
'affairs ~ rate_marriage + age + yrs_married',
data=dc,
family=sm.families.Poisson(),
freq_weights=np.asarray(dc['freq']))
res_f = glm.fit()
print(res_f.summary())
res_f.pearson_chi2 / res_f.df_resid
# ### 使用 ``var_weights`` 而不是 ``freq_weights`` 来压缩
#
# 下一步,我们将 ``var_weights`` 与 ``freq_weights`` 进行比较。 当内生变量反映平均值而不是相同的观察值时,通常是包括“ var_weights”。
# 我看不出产生相同结果的理论原因(大体上)。
# 这会产生相同的结果,但 ``df_resid`` 与 ``freq_weights`` 示例有所不同,因为 ``var_weights`` 无法改变有效观测值的数量。
#
glm = smf.glm(
'affairs ~ rate_marriage + age + yrs_married',
data=dc,
family=sm.families.Poisson(),
var_weights=np.asarray(dc['freq']))
res_fv = glm.fit()
print(res_fv.summary())
# 由于错误的``df_resid'',从结果计算出的 dispersion 不正确。
# 如果我们使用原始的df_resid是正确的。
res_fv.pearson_chi2 / res_fv.df_resid, res_f.pearson_chi2 / res_f.df_resid
# ### 聚合或平均数据(解释变量的唯一值)
#
# 对于这些情况,我们合并了具有相同解释变量值的观察值。 相应的响应变量是总和或平均值。
#
# #### 使用 ``exposure``
#
# 如果我们的因变量是所有组合观测值的响应之和,则在泊松假设下,分布保持不变,
# 但是我们通过聚合观测值表示的个体数量给定可变的 `exposure`。
#
# 参数估计值和参数的协方差与原始数据相同,但对数似然,偏差和 Pearson 卡方不同
#
glm = smf.glm(
'affairs_sum ~ rate_marriage + age + yrs_married',
data=df_a,
family=sm.families.Poisson(),
exposure=np.asarray(df_a['affairs_count']))
res_e = glm.fit()
print(res_e.summary())
res_e.pearson_chi2 / res_e.df_resid
# #### 使用 var_weights
#
# 我们还可以使用因变量的所有组合值的平均值。 在这种情况下,方差与一个组合观察所反映的总暴露量的倒数有关。
glm = smf.glm(
'affairs_mean ~ rate_marriage + age + yrs_married',
data=df_a,
family=sm.families.Poisson(),
var_weights=np.asarray(df_a['affairs_count']))
res_a = glm.fit()
print(res_a.summary())
# ### 比较
#
# 我们在上面的摘要打印中看到,带有相关Wald推断的 ``params'' 和 ``cov_params'' 在各个版本之间是一致的。
# 在下面比较各个版本的各个结果属性时,我们对此进行了总结。
#
# 参数估计 `params`,参数 `bse` 的标准误差和参数 `pvalues`(对于参数为零的检验)全部一致。
# 但是,似然和拟合优度统计分析。 `llf`,`deviance` 和 `pearson_chi2` 仅部分一致。
# 具体而言,汇总版本与使用原始数据的结果不一致。
#
# **警告**: 在以后的版本中, `llf`, `deviance` 和 `pearson_chi2` 可能仍会。改变
#
# 对于解释变量唯一值之和与平均值响应变量都有正确的似然解释。 但是,此解释未反映在这三个统计数据中。
# 从计算上讲,这可能是由于使用聚合数据时没有调整。
# 但是,从理论上讲,我们可以考虑在这些情况下,特别是对于错误指定情况下的 `var_weights` ,(当似然分析不合适时,应将结果解释为准似然估计)。
# ``var_weights'' 的定义不明确,因为它们可以用于具有正确指定的似然性的平均值以及在准可能性情况下的方差调整。 我们目前不尝试匹配似然性规范。
# 但是,在下一节中,我们表明当假设正确指定了基础模型时,似然比类型检验对于所有聚合版本仍会产生相同的结果。
results_all = [res_o, res_f, res_e, res_a]
names = 'res_o res_f res_e res_a'.split()
pd.concat([r.params for r in results_all], axis=1, keys=names)
pd.concat([r.bse for r in results_all], axis=1, keys=names)
pd.concat([r.pvalues for r in results_all], axis=1, keys=names)
pd.DataFrame(
np.column_stack(
[[r.llf, r.deviance, r.pearson_chi2] for r in results_all]),
columns=names,
index=['llf', 'deviance', 'pearson chi2'])
# ### 似然比类型检验
#
# 我们从上看到,聚合数据和原始的个体数据之间的似然和相关统计数据不一致。 下面我们将说明
# 似然比检验和偏差差异在各个版本中是一致的,但是Pearson卡方却不同的情况。
#
# 和以前一样:这还不够清楚,可能会改变。
#
# 作为测试用例,我们删除 `age` 变量,并计算似然比类型统计量作为缩小或约束模型与完全或非约束模型之间的差异。
# #### 原始观测值和频率权重
glm = smf.glm(
'affairs ~ rate_marriage + yrs_married',
data=data,
family=sm.families.Poisson())
res_o2 = glm.fit()
#print(res_f2.summary())
res_o2.pearson_chi2 - res_o.pearson_chi2, res_o2.deviance - res_o.deviance, res_o2.llf - res_o.llf
glm = smf.glm(
'affairs ~ rate_marriage + yrs_married',
data=dc,
family=sm.families.Poisson(),
freq_weights=np.asarray(dc['freq']))
res_f2 = glm.fit()
#print(res_f2.summary())
res_f2.pearson_chi2 - res_f.pearson_chi2, res_f2.deviance - res_f.deviance, res_f2.llf - res_f.llf
# #### 聚合数据: ``exposure`` 和 ``var_weights``
#
# 警告: LR 检验与原始观测值一致的情形, ``pearson_chi2`` 却有所不同且有错误标识。
glm = smf.glm(
'affairs_sum ~ rate_marriage + yrs_married',
data=df_a,
family=sm.families.Poisson(),
exposure=np.asarray(df_a['affairs_count']))
res_e2 = glm.fit()
res_e2.pearson_chi2 - res_e.pearson_chi2, res_e2.deviance - res_e.deviance, res_e2.llf - res_e.llf
glm = smf.glm(
'affairs_mean ~ rate_marriage + yrs_married',
data=df_a,
family=sm.families.Poisson(),
var_weights=np.asarray(df_a['affairs_count']))
res_a2 = glm.fit()
res_a2.pearson_chi2 - res_a.pearson_chi2, res_a2.deviance - res_a.deviance, res_a2.llf - res_a.llf
# ### 探讨 Pearson卡方统计
#
# 首先,我们进行一些合理性检验,以确保在计算 `pearson_chi2` 和 `resid_pearson` 时没有基本的错误。
res_e2.pearson_chi2, res_e.pearson_chi2, (res_e2.resid_pearson
**2).sum(), (res_e.resid_pearson
**2).sum()
res_e._results.resid_response.mean(), res_e.model.family.variance(
res_e.mu)[:5], res_e.mu[:5]
(res_e._results.resid_response**2 / res_e.model.family.variance(
res_e.mu)).sum()
res_e2._results.resid_response.mean(), res_e2.model.family.variance(
res_e2.mu)[:5], res_e2.mu[:5]
(res_e2._results.resid_response**2 / res_e2.model.family.variance(
res_e2.mu)).sum()
(res_e2._results.resid_response**2).sum(), (res_e._results.resid_response
**2).sum()
# 错误标识的一种可能原因是我们要减去的二次项被不同分母作除法。在某些相关情况下,文献中的建议是使用公分母。
# 我们可以在完全模型和简化模型中使用相同方差假设来比较皮尔逊卡方统计量。
#
#
# 在这种情况下,我们在所有版本的简化模型和完整模型之间都获得了相同的皮尔逊卡方标度差异。 (问题 [#3616](https://github.com/statsmodels/statsmodels/issues/3616) is
# 将做进一步的追踪。)
((res_e2._results.resid_response**2 - res_e._results.resid_response**2) /
res_e2.model.family.variance(res_e2.mu)).sum()
((res_a2._results.resid_response**2 - res_a._results.resid_response**2) /
res_a2.model.family.variance(res_a2.mu) * res_a2.model.var_weights).sum()
((res_f2._results.resid_response**2 - res_f._results.resid_response**2) /
res_f2.model.family.variance(res_f2.mu) * res_f2.model.freq_weights).sum()
((res_o2._results.resid_response**2 - res_o._results.resid_response**2) /
res_o2.model.family.variance(res_o2.mu)).sum()
# ## 其余内容
#
# 笔记的其余部分包含一些其他检查,可以忽略。
| np.exp(res_e2.model.exposure) | numpy.exp |
import warnings
import numpy as np
import vtk
import vedo
import vedo.addons as addons
import vedo.colors as colors
import vedo.shapes as shapes
import vedo.utils as utils
from vedo.assembly import Assembly
from vedo.mesh import merge
from vedo.mesh import Mesh
__doc__ = """
.. image:: https://vedo.embl.es/images/pyplot/fitPolynomial2.png
Advanced plotting utility functions
"""
__all__ = [
"Figure",
#"Histogram1D", # uncomment to generate docs
#"Histogram2D",
#"PlotXY",
#"PlotBars",
"plot",
"histogram",
"fit",
"donut",
"violin",
"whisker",
"streamplot",
"matrix",
"DirectedGraph",
]
##########################################################################
class LabelData(object):
def __init__(self):
self.text = 'dataset'
self.tcolor = 'black'
self.marker = 's'
self.mcolor = 'black'
##########################################################################
class Figure(Assembly):
"""
Parameters
----------
xlim : list
range of the x-axis as [x0, x1]
ylim : list
range of the y-axis as [y0, y1]
aspect : float, str
the desired aspect ratio of the histogram. Default is 4/3.
Use `aspect="equal"` to force the same units in x and y.
padding : float, list
keep a padding space from the axes (as a fraction of the axis size).
This can be a list of four numbers.
xtitle : str
title for the x-axis, can also be set using `axes=dict(xtitle="my x axis")`
ytitle : str
title for the y-axis, can also be set using `axes=dict(ytitle="my y axis")`
grid : bool
show the backgound grid for the axes, can also be set using `axes=dict(xyGrid=True)`
axes : dict
an extra dictionary of options for the axes
"""
def __init__(
self,
xlim,
ylim,
aspect=4/3,
padding=[0.05, 0.05, 0.05, 0.05],
**kwargs,
):
self.xlim = np.array(xlim)
self.ylim = np.array(ylim)
self.aspect = aspect
self.padding = padding
if not utils.isSequence(self.padding):
self.padding = [self.padding, self.padding,self.padding, self.padding]
self.force_scaling_types = (
shapes.Glyph,
shapes.Line,
shapes.Rectangle,
shapes.DashedLine,
shapes.Tube,
shapes.Ribbon,
shapes.GeoCircle,
shapes.Arc,
shapes.Grid,
# shapes.Arrows, # todo
# shapes.Arrows2D, # todo
shapes.Brace, # todo
)
options = dict(kwargs)
self.title = options.pop("title", "")
self.xtitle = options.pop("xtitle", " ")
self.ytitle = options.pop("ytitle", " ")
numberOfDivisions = 6
self.legend = None
self.labels = []
self.label = options.pop("label", None)
if self.label:
self.labels = [self.label]
self.axopts = options.pop("axes", {})
if isinstance(self.axopts, (bool,int,float)):
if self.axopts:
self.axopts = {}
if self.axopts or isinstance(self.axopts, dict):
numberOfDivisions = self.axopts.pop('numberOfDivisions', numberOfDivisions)
self.axopts['xtitle'] = self.xtitle
self.axopts['ytitle'] = self.ytitle
if 'xyGrid' not in self.axopts: ## modify the default
self.axopts['xyGrid'] = options.pop("grid", False)
if 'xyGridTransparent' not in self.axopts: ## modify the default
self.axopts['xyGridTransparent'] = True
if 'xTitlePosition' not in self.axopts: ## modify the default
self.axopts['xTitlePosition'] = 0.5
self.axopts['xTitleJustify'] = "top-center"
if 'yTitlePosition' not in self.axopts: ## modify the default
self.axopts['yTitlePosition'] = 0.5
self.axopts['yTitleJustify'] = "bottom-center"
if self.label:
if 'c' in self.axopts:
self.label.tcolor = self.axopts['c']
x0, x1 = self.xlim
y0, y1 = self.ylim
dx = x1 - x0
dy = y1 - y0
x0lim, x1lim = (x0-self.padding[0]*dx, x1+self.padding[1]*dx)
y0lim, y1lim = (y0-self.padding[2]*dy, y1+self.padding[3]*dy)
dy = y1lim - y0lim
self.axes = None
if xlim[0] >= xlim[1] or ylim[0] >= ylim[1]:
vedo.logger.warning(f"Null range for Figure {self.title}... returning an empty Assembly.")
Assembly.__init__(self)
self.yscale = 0
return
if aspect == "equal":
self.aspect = dx / dy # so that yscale becomes 1
self.yscale = dx / dy / self.aspect
y0lim *= self.yscale
y1lim *= self.yscale
self.x0lim = x0lim
self.x1lim = x1lim
self.y0lim = y0lim
self.y1lim = y1lim
self.ztolerance = options.pop("ztolerance", None)
if self.ztolerance is None:
self.ztolerance = dx / 5000
############## create axes
if self.axopts:
axesopts = self.axopts
if self.axopts is True or self.axopts == 1:
axesopts = {}
tp, ts = utils.makeTicks(
y0lim / self.yscale,
y1lim / self.yscale,
numberOfDivisions,
)
labs = []
for i in range(1, len(tp) - 1):
ynew = utils.linInterpolate(tp[i], [0, 1], [y0lim, y1lim])
labs.append([ynew, ts[i]])
if self.title:
axesopts["htitle"] = self.title
axesopts["yValuesAndLabels"] = labs
axesopts["xrange"] = (x0lim, x1lim)
axesopts["yrange"] = (y0lim, y1lim)
axesopts["zrange"] = (0, 0)
axesopts["yUseBounds"] = True
if 'c' not in axesopts and 'ac' in options:
axesopts["c"] = options['ac']
self.axes = addons.Axes(**axesopts)
Assembly.__init__(self, [self.axes])
self.name = "Figure"
return
def __add__(self, *obj):
# just to avoid confusion, superseed Assembly.__add__
return self.__iadd__(*obj)
def __iadd__(self, *obj):
if len(obj) == 1 and isinstance(obj[0], Figure):
return self._check_unpack_and_insert(obj[0])
else:
obj = utils.flatten(obj)
return self.insert(*obj)
def _check_unpack_and_insert(self, fig):
if fig.label:
self.labels.append(fig.label)
if abs(self.yscale - fig.yscale) > 0.0001:
colors.printc("ERROR: adding incompatible Figure. Y-scales are different:",
c='r', invert=True)
colors.printc(" first figure:", self.yscale, c='r')
colors.printc(" second figure:", fig.yscale, c='r')
colors.printc("One or more of these parameters can be the cause:", c='r')
if list(self.xlim) != list(fig.xlim):
colors.printc("xlim --------------------------------------------\n",
" first figure:", self.xlim, "\n",
" second figure:", fig.xlim, c='r')
if list(self.ylim) != list(fig.ylim):
colors.printc("ylim --------------------------------------------\n",
" first figure:", self.ylim, "\n",
" second figure:", fig.ylim, c='r')
if list(self.padding) != list(fig.padding):
colors.printc("padding -----------------------------------------\n",
" first figure:", self.padding,
" second figure:", fig.padding, c='r')
if self.aspect != fig.aspect:
colors.printc("aspect ------------------------------------------\n",
" first figure:", self.aspect,
" second figure:", fig.aspect, c='r')
colors.printc("\n*Consider using fig2 = histogram(..., like=fig1)", c='r')
colors.printc(" Or fig += histogram(..., like=fig)\n", c='r')
return self
offset = self.zbounds()[1] + self.ztolerance
for ele in fig.unpack():
if "Axes" in ele.name:
continue
ele.z(offset)
self.insert(ele, rescale=False)
return self
def insert(self, *objs, rescale=True, as3d=True, adjusted=True, cut=True):
"""
Insert objects into a Figure.
The reccomended syntax is to use "+=", which calls `insert()` under the hood.
If a whole Figure is added with "+=", it is unpacked and its objects are added
one by one.
Parameters
----------
rescale : bool
rescale the y axis position while inserting the object.
as3d : bool
if True keep the aspect ratio of the 3d obect, otherwise stretch it in y.
adjusted : bool
adjust the scaling according to the shortest axis
cut : bool
cut off the parts of the object which go beyond the axes frame.
"""
for a in objs:
if a in self.actors:
# should not add twice the same object in plot
continue
if isinstance(a, vedo.Points): # hacky way to identify Points
if a.NCells()==a.NPoints():
poly = a.polydata(False)
if poly.GetNumberOfPolys()==0 and poly.GetNumberOfLines()==0:
as3d = False
rescale = True
if isinstance(a, (shapes.Arrow, shapes.Arrow2D)):
# discard input Arrow and substitute it with a brand new one
# (because scaling would fatally distort the shape)
prop = a.GetProperty()
prop.LightingOff()
py = a.base[1]
a.top[1] = (a.top[1]-py) * self.yscale + py
b = shapes.Arrow2D(a.base, a.top, s=a.s, fill=a.fill).z(a.z())
b.SetProperty(prop)
b.y(py * self.yscale)
a = b
# elif isinstance(a, shapes.Rectangle) and a.radius is not None:
# # discard input Rectangle and substitute it with a brand new one
# # (because scaling would fatally distort the shape of the corners)
# py = a.corner1[1]
# rx1,ry1,rz1 = a.corner1
# rx2,ry2,rz2 = a.corner2
# ry2 = (ry2-py) * self.yscale + py
# b = shapes.Rectangle([rx1,0,rz1], [rx2,ry2,rz2], radius=a.radius).z(a.z())
# b.SetProperty(a.GetProperty())
# b.y(py / self.yscale)
# a = b
else:
if rescale:
if not isinstance(a, Figure):
if as3d and not isinstance(a, self.force_scaling_types):
if adjusted:
scl = np.min([1, self.yscale])
else:
scl = self.yscale
a.scale(scl)
else:
a.scale([1, self.yscale, 1])
# shift it in y
a.y(a.y() * self.yscale)
if cut:
try:
bx0, bx1, by0, by1, _, _ = a.GetBounds()
if self.y0lim > by0:
a.cutWithPlane([0, self.y0lim, 0], [ 0, 1, 0])
if self.y1lim < by1:
a.cutWithPlane([0, self.y1lim, 0], [ 0,-1, 0])
if self.x0lim > bx0:
a.cutWithPlane([self.x0lim, 0, 0], [ 1, 0, 0])
if self.x1lim < bx1:
a.cutWithPlane([self.x1lim, 0, 0], [-1, 0, 0])
except:
# print("insert(): cannot cut", [a])
pass
self.AddPart(a)
self.actors.append(a)
return self
def addLabel(self, text, c=None, marker="", mc='black'):
"""
Manually add en entry label to the legend.
Parameters
----------
text : str
text string for the label.
c : str
color of the text
marker : str, Mesh
a marker char or a Mesh object to be used as marker
mc : str, optional
color for the marker
"""
newlabel = LabelData()
newlabel.text = text.replace("\n"," ")
newlabel.tcolor = c
newlabel.marker = marker
newlabel.mcolor = mc
self.labels.append(newlabel)
return self
def addLegend(self,
pos="top-right",
relative=True,
font=None,
s=1,
c=None,
vspace=1.75,
padding=0.1,
radius=0,
alpha=1,
bc='k7',
lw=1,
lc='k4',
z=0,
):
"""
Add existing labels to form a legend box.
Labels have been previously filled with eg: `plot(..., label="text")`
Parameters
----------
pos : str, list
A string or 2D coordinates. The default is "top-right".
relative : bool
control whether `pos` is absolute or relative, e.i. normalized
to the x and y ranges so that x and y in `pos=[x,y]` should be
both in the range [0,1].
This flag is ignored if a string despcriptor is passed.
Default is True.
font : str, int
font name or number
s : float
global size of the legend
c : str
color of the text
vspace : float
vertical spacing of lines
padding : float
padding of the box as a fraction of the text size
radius : float
border radius of the box
alpha : float
opacity of the box. Values below 1 may cause poor rendering
bacause of antialiasing.
Use alpha = 0 to remove the box.
bc : str
box color
lw : int
border line width of the box in pixel units
lc : int
border line color of the box
z : float
set the zorder as z position (useful to avoid overlap)
"""
sx = self.x1lim - self.x0lim
s = s * sx / 55 # so that input can be about 1
ds = 0
texts = []
mks = []
for i, t in enumerate(self.labels):
label = self.labels[i]
t = label.text
if label.tcolor is not None:
c = label.tcolor
tx = vedo.shapes.Text3D(t, s=s, c=c, justify="center-left", font=font)
y0, y1 = tx.ybounds()
ds = max( y1 - y0, ds)
texts.append(tx)
mk = label.marker
if isinstance(mk, vedo.Points):
mk = mk.clone(deep=False).lighting('off')
cm = mk.centerOfMass()
ty0, ty1 = tx.ybounds()
oby0, oby1 = mk.ybounds()
mk.shift(-cm)
mk.origin(cm)
mk.scale((ty1-ty0)/(oby1-oby0))
mk.scale([1.1,1.1,0.01])
elif mk == '-':
mk = vedo.shapes.Marker(mk, s=s*2)
mk.color(label.mcolor)
else:
mk = vedo.shapes.Marker(mk, s=s)
mk.color(label.mcolor)
mks.append(mk)
for i, tx in enumerate(texts):
tx.shift(0,-(i+0)*ds* vspace)
for i, mk in enumerate(mks):
mk.shift(-ds*1.75,-(i+0)*ds* vspace,0)
acts = texts + mks
aleg = Assembly(acts)#.show(axes=1).close()
x0,x1, y0,y1, _,_ = aleg.GetBounds()
if alpha:
dx = x1-x0
dy = y1-y0
if not utils.isSequence(padding):
padding = [padding] * 4
padding = min(padding)
padding = min(padding*dx, padding*dy)
if len(self.labels) == 1:
padding *= 4
x0 -= padding
x1 += padding
y0 -= padding
y1 += padding
box = shapes.Rectangle(
[x0,y0], [x1,y1], radius=radius, c=bc, alpha=alpha,
)
box.shift(0, 0, -dy/100).pickable(False)
if lc:
box.lc(lc).lw(lw)
aleg.AddPart(box)
xlim = self.xlim
ylim = self.ylim
if isinstance(pos, str):
px, py = 0, 0
rx, ry = (xlim[1]+xlim[0])/2, (ylim[1]+ylim[0])/2
shx, shy = 0, 0
if "top" in pos:
if "cent" in pos:
px, py = rx, ylim[1]
shx, shy = (x0+x1)/2, y1
elif "left" in pos:
px, py = xlim[0], ylim[1]
shx, shy = x0, y1
else: # "right"
px, py = xlim[1], ylim[1]
shx, shy = x1, y1
elif "bot" in pos:
if "left" in pos:
px, py = xlim[0], ylim[0]
shx, shy = x0, y0
elif "right" in pos:
px, py = xlim[1], ylim[0]
shx, shy = x1, y0
else: # "cent"
px, py = rx, ylim[0]
shx, shy = (x0+x1)/2, y0
elif "cent" in pos:
if "left" in pos:
px, py = xlim[0], ry
shx, shy = x0, (y0+y1)/2
elif "right" in pos:
px, py = xlim[1], ry
shx, shy = x1, (y0+y1)/2
else:
vedo.logger.error(f"in addLegend(), cannot understand {pos}")
raise RuntimeError
else:
if relative:
rx, ry = pos[0], pos[1]
px = (xlim[1] - xlim[0]) * rx + xlim[0]
py = (ylim[1] - ylim[0]) * ry + ylim[0]
z *= xlim[1] - xlim[0]
else:
px, py = pos[0], pos[1]
shx, shy = x0, y1
aleg.pos(px - shx, py * self.yscale - shy, self.z() + sx/50 + z)
self.insert(aleg, rescale=False, cut=False)
self.legend = aleg
aleg.name = "Legend"
return self
#########################################################################################
class Histogram1D(Figure):
"""
Creates a `Histogram1D(Figure)` object.
Parameters
----------
weights : list
An array of weights, of the same shape as `data`. Each value in `data`
only contributes its associated weight towards the bin count (instead of 1).
bins : int
number of bins
density : bool
normalize the area to 1 by dividing by the nr of entries and bin size
logscale : bool
use logscale on y-axis
fill : bool
fill bars woth solid color `c`
gap : float
leave a small space btw bars
radius : float
border radius of the top of the histogram bar. Default value is 0.1.
texture : str
url or path to an image to be used as texture for the bin
outline : bool
show outline of the bins
errors : bool
show error bars
xtitle : str
title for the x-axis, can also be set using `axes=dict(xtitle="my x axis")`
ytitle : str
title for the y-axis, can also be set using `axes=dict(ytitle="my y axis")`
padding : float, list
keep a padding space from the axes (as a fraction of the axis size).
This can be a list of four numbers.
aspect : float
the desired aspect ratio of the histogram. Default is 4/3.
grid : bool
show the backgound grid for the axes, can also be set using `axes=dict(xyGrid=True)`
ztolerance : float
a tolerance factor to superimpose objects (along the z-axis).
.. hint:: examples/pyplot/histo_1d_a.py histo_1d_b.py histo_1d_c.py histo_1d_d.py
.. image:: https://vedo.embl.es/images/pyplot/histo_1D.png
"""
def __init__(
self,
data,
weights=None,
bins=None,
errors=False,
density=False,
logscale=False,
fill=True,
radius=0.1,
c="olivedrab",
gap=0.02,
alpha=1,
outline=False,
lw=2,
lc="k",
texture="",
marker="",
ms=None,
mc=None,
ma=None,
# Figure and axes options:
like=None,
xlim=None,
ylim=(0, None),
aspect=4/3,
padding=[0., 0., 0., 0.05],
title="",
xtitle=" ",
ytitle=" ",
ac="k",
grid=False,
ztolerance=None,
label="",
**fig_kwargs,
):
if like is not None:
xlim = like.xlim
ylim = like.ylim
aspect = like.aspect
padding = like.padding
if bins is None:
bins = like.bins
if bins is None:
bins = 20
if utils.isSequence(xlim):
# deal with user passing eg [x0, None]
_x0, _x1 = xlim
if _x0 is None:
_x0 = data.min()
if _x1 is None:
_x1 = data.max()
xlim = [_x0, _x1]
# purge NaN from data
validIds = np.all(np.logical_not(np.isnan(data)))
data = np.array(data[validIds]).ravel()
fs, edges = np.histogram(data, bins=bins, weights=weights, range=xlim)
binsize = edges[1] - edges[0]
ntot = data.shape[0]
fig_kwargs['title'] = title
fig_kwargs['xtitle'] = xtitle
fig_kwargs['ytitle'] = ytitle
fig_kwargs['ac'] = ac
fig_kwargs['ztolerance'] = ztolerance
fig_kwargs['grid'] = grid
unscaled_errors = np.sqrt(fs)
if density:
scaled_errors = unscaled_errors / (ntot*binsize)
fs = fs / (ntot*binsize)
if ytitle == ' ':
ytitle = f"counts / ( {ntot}~x~{utils.precision(binsize,3)} )"
fig_kwargs['ytitle'] = ytitle
elif logscale:
se_up = np.log10(fs + unscaled_errors/2 + 1)
se_dw = np.log10(fs - unscaled_errors/2 + 1)
scaled_errors = np.c_[se_up, se_dw]
fs = np.log10(fs + 1)
if ytitle == ' ':
ytitle = 'log_10 (counts+1)'
fig_kwargs['ytitle'] = ytitle
x0, x1 = np.min(edges), np.max(edges)
y0, y1 = ylim[0], np.max(fs)
_errors = []
if errors:
if density:
y1 += max(scaled_errors) / 2
_errors = scaled_errors
elif logscale:
y1 = max(scaled_errors[:, 0])
_errors = scaled_errors
else:
y1 += max(unscaled_errors) / 2
_errors = unscaled_errors
if like is None:
ylim = list(ylim)
if xlim is None:
xlim = [x0, x1]
if ylim[1] is None:
ylim[1] = y1
if ylim[0] != 0:
ylim[0] = y0
self.entries = ntot
self.frequencies = fs
self.errors = _errors
self.edges = edges
self.centers = (edges[0:-1] + edges[1:]) / 2
self.mean = data.mean()
self.std = data.std()
self.bins = edges # internally used by "like"
############################### stats legend as htitle
addstats = False
if not title:
if 'axes' not in fig_kwargs:
addstats = True
axesopts = dict()
fig_kwargs['axes'] = axesopts
elif fig_kwargs['axes'] is False:
pass
else:
axesopts = fig_kwargs['axes']
if "htitle" not in axesopts:
addstats = True
if addstats:
htitle = f"Entries:~~{int(self.entries)} "
htitle += f"Mean:~~{utils.precision(self.mean, 4)} "
htitle += f"STD:~~{utils.precision(self.std, 4)} "
axesopts["htitle"] = htitle
axesopts["hTitleJustify"] = "bottom-left"
axesopts["hTitleSize"] = 0.016
axesopts["hTitleOffset"] = [-0.49, 0.01, 0]
if mc is None:
mc = lc
if ma is None:
ma = alpha
if label:
nlab = LabelData()
nlab.text = label
nlab.tcolor = ac
nlab.marker = marker
nlab.mcolor = mc
if not marker:
nlab.marker = 's'
nlab.mcolor = c
fig_kwargs['label'] = nlab
############################################### Figure init
Figure.__init__(self, xlim, ylim, aspect, padding, **fig_kwargs)
if not self.yscale:
return None
if utils.isSequence(bins):
myedges = np.array(bins)
bins = len(bins) - 1
else:
myedges = edges
bin_centers = []
for i in range(bins):
x = (myedges[i] + myedges[i + 1]) / 2
bin_centers.append([x, fs[i], 0])
rs = []
maxheigth = 0
if not fill and not outline and not errors and not marker:
outline = True # otherwise it's empty..
if fill: #####################
if outline:
gap = 0
for i in range(bins):
F = fs[i]
if not F:
continue
p0 = (myedges[i] + gap * binsize, 0, 0)
p1 = (myedges[i + 1] - gap * binsize, F, 0)
if radius:
if gap:
rds = np.array([0, 0, radius, radius])
else:
rd1 = 0 if i < bins-1 and fs[i+1] >= F else radius/2
rd2 = 0 if i > 0 and fs[i-1] >= F else radius/2
rds = np.array([0, 0, rd1, rd2])
p1_yscaled = [p1[0], p1[1]*self.yscale, 0]
r = shapes.Rectangle(p0, p1_yscaled, radius=rds*binsize, res=6)
r.scale([1, 1/self.yscale, 1])
r.radius = None # so it doesnt get recreated and rescaled by insert()
else:
r = shapes.Rectangle(p0, p1)
if texture:
r.texture(texture)
c = 'w'
# if texture: # causes Segmentation fault vtk9.0.3
# if i>0 and rs[i-1].GetTexture(): # reuse the same texture obj
# r.texture(rs[i-1].GetTexture())
# else:
# r.texture(texture)
# c = 'w'
r.PickableOff()
maxheigth = max(maxheigth, p1[1])
if c in colors.cmaps_names:
col = colors.colorMap((p0[0]+p1[0])/2, c, myedges[0], myedges[-1])
else:
col = c
r.color(col).alpha(alpha).lighting('off')
r.z(self.ztolerance)
rs.append(r)
if outline: #####################
lns = [[myedges[0], 0, 0]]
for i in range(bins):
lns.append([myedges[i], fs[i], 0])
lns.append([myedges[i + 1], fs[i], 0])
maxheigth = max(maxheigth, fs[i])
lns.append([myedges[-1], 0, 0])
outl = shapes.Line(lns, c=lc, alpha=alpha, lw=lw)
outl.z(self.ztolerance*2)
rs.append(outl)
if errors: #####################
for i in range(bins):
x = self.centers[i]
f = fs[i]
if not f:
continue
err = _errors[i]
if utils.isSequence(err):
el = shapes.Line([x, err[0], 0], [x, err[1], 0], c=lc, alpha=alpha, lw=lw)
else:
el = shapes.Line([x, f-err/2, 0], [x, f+err/2, 0], c=lc, alpha=alpha, lw=lw)
el.z(self.ztolerance*3)
rs.append(el)
if marker: #####################
# remove empty bins (we dont want a marker there)
bin_centers = np.array(bin_centers)
bin_centers = bin_centers[bin_centers[:, 1] > 0]
if utils.isSequence(ms): ### variable point size
mk = shapes.Marker(marker, s=1)
mk.scale([1, 1/self.yscale, 1])
msv = np.zeros_like(bin_centers)
msv[:, 0] = ms
marked = shapes.Glyph(
bin_centers, glyphObj=mk, c=mc,
orientationArray=msv, scaleByVectorSize=True
)
else: ### fixed point size
if ms is None:
ms = (xlim[1]-xlim[0]) / 100.0
else:
ms = (xlim[1]-xlim[0]) / 100.0 * ms
if utils.isSequence(mc):
mk = shapes.Marker(marker, s=ms)
mk.scale([1, 1/self.yscale, 1])
msv = np.zeros_like(bin_centers)
msv[:, 0] = 1
marked = shapes.Glyph(
bin_centers, glyphObj=mk, c=mc,
orientationArray=msv, scaleByVectorSize=True
)
else:
mk = shapes.Marker(marker, s=ms)
mk.scale([1, 1/self.yscale, 1])
marked = shapes.Glyph(bin_centers, glyphObj=mk, c=mc)
marked.alpha(ma)
marked.z(self.ztolerance*4)
rs.append(marked)
self.insert(*rs, as3d=False)
self.name = "Histogram1D"
#########################################################################################
class Histogram2D(Figure):
"""
Input data formats `[(x1,x2,..), (y1,y2,..)] or [(x1,y1), (x2,y2),..]`
are both valid.
Use keyword `like=...` if you want to use the same format of a previously
created Figure (useful when superimposing Figures) to make sure
they are compatible and comparable. If they are not compatible
you will receive an error message.
Parameters
----------
bins : list
binning as (nx, ny)
weights : list
array of weights to assign to each entry
cmap : str, lookuptable
color map name or look up table
alpha : float
opacity of the histogram
gap : float
separation between adjacent bins as a fraction for their size
scalarbar : bool
add a scalarbar to right of the histogram
like : Figure
grab and use the same format of the given Figure (for superimposing)
xlim : list
[x0, x1] range of interest. If left to None will automatically
choose the minimum or the maximum of the data range.
Data outside the range are completely ignored.
ylim : list
[y0, y1] range of interest. If left to None will automatically
choose the minimum or the maximum of the data range.
Data outside the range are completely ignored.
aspect : float
the desired aspect ratio of the figure.
title : str
title of the plot to appear on top.
If left blank some statistics will be shown.
xtitle : str
x axis title
ytitle : str
y axis title
ztitle : str
title for the scalar bar
ac : str
axes color, additional keyword for Axes can also be added
using e.g. `axes=dict(xyGrid=True)`
.. hint:: examples/pyplot/histo_2d.py
.. image:: https://vedo.embl.es/images/pyplot/histo_2D.png
"""
def __init__(
self,
xvalues,
yvalues=None,
bins=25,
weights=None,
cmap="cividis",
alpha=1,
gap=0,
scalarbar=True,
# Figure and axes options:
like=None,
xlim=None,
ylim=(None, None),
zlim=(None, None),
aspect=1,
title="",
xtitle=" ",
ytitle=" ",
ztitle="",
ac="k",
**fig_kwargs,
):
if yvalues is None:
# assume [(x1,y1), (x2,y2) ...] format
yvalues = xvalues[:, 1]
xvalues = xvalues[:, 0]
padding=[0,0,0,0]
if like is not None:
xlim = like.xlim
ylim = like.ylim
aspect = like.aspect
padding = like.padding
if bins is None:
bins = like.bins
if bins is None:
bins = 20
if isinstance(bins, int):
bins = (bins, bins)
if utils.isSequence(xlim):
# deal with user passing eg [x0, None]
_x0, _x1 = xlim
if _x0 is None:
_x0 = xvalues.min()
if _x1 is None:
_x1 = xvalues.max()
xlim = [_x0, _x1]
if utils.isSequence(ylim):
# deal with user passing eg [x0, None]
_y0, _y1 = ylim
if _y0 is None:
_y0 = yvalues.min()
if _y1 is None:
_y1 = yvalues.max()
ylim = [_y0, _y1]
H, xedges, yedges = np.histogram2d(
xvalues, yvalues, weights=weights,
bins=bins, range=(xlim, ylim),
)
xlim = np.min(xedges), np.max(xedges)
ylim = np.min(yedges), np.max(yedges)
dx, dy = xlim[1] - xlim[0], ylim[1] - ylim[0]
fig_kwargs['title'] = title
fig_kwargs['xtitle'] = xtitle
fig_kwargs['ytitle'] = ytitle
fig_kwargs['ac'] = ac
self.entries = len(xvalues)
self.frequencies = H
self.edges = (xedges, yedges)
self.mean = (xvalues.mean(), yvalues.mean())
self.std = (xvalues.std(), yvalues.std())
self.bins = bins # internally used by "like"
############################### stats legend as htitle
addstats = False
if not title:
if 'axes' not in fig_kwargs:
addstats = True
axesopts = dict()
fig_kwargs['axes'] = axesopts
elif fig_kwargs['axes'] is False:
pass
else:
axesopts = fig_kwargs['axes']
if "htitle" not in fig_kwargs['axes']:
addstats = True
if addstats:
htitle = f"Entries:~~{int(self.entries)} "
htitle += f"Mean:~~{utils.precision(self.mean, 3)} "
htitle += f"STD:~~{utils.precision(self.std, 3)} "
axesopts["htitle"] = htitle
axesopts["hTitleJustify"] = "bottom-left"
axesopts["hTitleSize"] = 0.0175
axesopts["hTitleOffset"] = [-0.49, 0.01, 0]
############################################### Figure init
Figure.__init__(self, xlim, ylim, aspect, padding, **fig_kwargs)
if not self.yscale:
return None
##################### the grid
acts = []
g = shapes.Grid(
pos=[(xlim[0] + xlim[1]) / 2, (ylim[0] + ylim[1]) / 2, 0],
s=(dx, dy),
res=bins[:2],
)
g.alpha(alpha).lw(0).wireframe(False).flat().lighting('off')
g.cmap(cmap, np.ravel(H.T), on='cells', vmin=zlim[0], vmax=zlim[1])
if gap:
g.shrink(abs(1-gap))
if scalarbar:
sc = g.addScalarBar3D(ztitle, c=ac).scalarbar
sc.scale([self.yscale,1,1]) ## prescale trick
sbnds = sc.xbounds()
sc.x(self.x1lim + (sbnds[1]-sbnds[0])*0.75)
acts.append(sc)
acts.append(g)
self.insert(*acts, as3d=False)
self.name = "Histogram2D"
#########################################################################################
class PlotBars(Figure):
"""
Creates a `PlotBars(Figure)` object.
Input must be in format `[counts, labels, colors, edges]`.
Either or both `edges` and `colors` are optional and can be omitted.
Use keyword `like=...` if you want to use the same format of a previously
created Figure (useful when superimposing Figures) to make sure
they are compatible and comparable. If they are not compatible
you will receive an error message.
Parameters
----------
errors : bool
show error bars
logscale : bool
use logscale on y-axis
fill : bool
fill bars woth solid color `c`
gap : float
leave a small space btw bars
radius : float
border radius of the top of the histogram bar. Default value is 0.1.
texture : str
url or path to an image to be used as texture for the bin
outline : bool
show outline of the bins
xtitle : str
title for the x-axis, can also be set using `axes=dict(xtitle="my x axis")`
ytitle : str
title for the y-axis, can also be set using `axes=dict(ytitle="my y axis")`
ac : str
axes color
padding : float, list
keep a padding space from the axes (as a fraction of the axis size).
This can be a list of four numbers.
aspect : float
the desired aspect ratio of the figure. Default is 4/3.
grid : bool
show the backgound grid for the axes, can also be set using `axes=dict(xyGrid=True)`
.. hint:: examples/pyplot/histo_1d_a.py histo_1d_b.py histo_1d_c.py histo_1d_d.py
.. image:: https://vedo.embl.es/images/pyplot/histo_1D.png
"""
def __init__(
self,
data,
errors=False,
logscale=False,
fill=True,
gap=0.02,
radius=0.05,
c="olivedrab",
alpha=1,
texture="",
outline=False,
lw=2,
lc="k",
# Figure and axes options:
like=None,
xlim=(None, None),
ylim=(0, None),
aspect=4/3,
padding=[0.025, 0.025, 0, 0.05],
#
title="",
xtitle=" ",
ytitle=" ",
ac="k",
grid=False,
ztolerance=None,
**fig_kwargs,
):
ndata = len(data)
if ndata == 4:
counts, xlabs, cols, edges = data
elif ndata == 3:
counts, xlabs, cols = data
edges = np.array(range(len(counts)+1))+0.5
elif ndata == 2:
counts, xlabs = data
edges = np.array(range(len(counts)+1))+0.5
cols = [c] * len(counts)
else:
m = "barplot error: data must be given as [counts, labels, colors, edges] not\n"
vedo.logger.error(m + f" {data}\n bin edges and colors are optional.")
raise RuntimeError()
# sanity checks
assert len(counts) == len(xlabs)
assert len(counts) == len(cols)
assert len(counts) == len(edges)-1
counts = np.asarray(counts)
edges = np.asarray(edges)
if logscale:
counts = | np.log10(counts + 1) | numpy.log10 |
import networkx as nx
import numpy as np
import pytest
from rpcq.messages import ParameterAref
from pyquil.parser import parse
from pyquil import Program, get_qc
from pyquil.api import QuantumComputer, QPU, QPUCompiler
from pyquil.api._compiler import _collect_classical_memory_write_locations
from pyquil.api._config import PyquilConfig
from pyquil.api._qpu import _extract_bitstrings
from pyquil.device import NxDevice
from pyquil.gates import I, X
from pyquil.quilatom import Expression
def test_qpu_run():
config = PyquilConfig()
if config.qpu_url and config.compiler_url:
g = nx.Graph()
g.add_node(0)
device = NxDevice(g)
qc = QuantumComputer(name="pyQuil test QC",
qam=QPU(endpoint=config.qpu_url,
user="pyQuil test suite"),
device=device,
compiler=QPUCompiler(endpoint=config.compiler_url,
device=device))
bitstrings = qc.run_and_measure(
program=Program(X(0)),
trials=1000,
)
assert bitstrings[0].shape == (1000,)
assert np.mean(bitstrings[0]) > 0.8
bitstrings = qc.run(qc.compile(Program(X(0))))
assert bitstrings.shape == (0, 0)
else:
pytest.skip("QPU or compiler-server not available; skipping QPU run test.")
def test_readout_demux():
p = Program("""DECLARE ro BIT[6]
RESET
RX(pi/2) 0
RX(pi/2) 1
RX(pi/2) 2
RX(pi/2) 3
MEASURE 0 ro[0]
MEASURE 1 ro[1]
MEASURE 2
RX(pi/2) 0
RX(pi/2) 1
RX(pi/2) 2
RX(pi/2) 3
MEASURE 0 ro[2]
MEASURE 1 ro[3]
MEASURE 2 ro[4]
MEASURE 3 ro[5]
""")
ro_sources = _collect_classical_memory_write_locations(p)
assert ro_sources == [
(0, 0),
(1, 0),
(0, 1),
(1, 1),
(2, 1),
(3, 0)
]
num_shots = 1000
buffers = {
# 0 measured, stored twice
"q0": np.random.randint(0, 2, size=(num_shots, 2)),
# 1 measured, stored twice
"q1": np.random.randint(0, 2, size=(num_shots, 2)),
# 2 measured twice, stored once
"q2": np.random.randint(0, 2, size=(num_shots, 2)),
# 3 measured once
"q3": np.random.randint(0, 2, size=num_shots),
}
bitstrings = _extract_bitstrings(ro_sources, buffers=buffers)
assert bitstrings.dtype == np.int64
assert np.allclose(bitstrings[:, 0], buffers["q0"][:, 0])
assert np.allclose(bitstrings[:, 1], buffers["q1"][:, 0])
assert np.allclose(bitstrings[:, 2], buffers["q0"][:, 1])
assert np.allclose(bitstrings[:, 3], buffers["q1"][:, 1])
assert np.allclose(bitstrings[:, 4], buffers["q2"][:, 1])
assert np.allclose(bitstrings[:, 5], buffers["q3"])
GATE_ARITHMETIC_PROGRAMS = [
Program("""
DECLARE theta REAL[1]
DECLARE beta REAL[1]
DECLARE ro BIT[3]
RX(pi/2) 0
RZ(3*theta) 0
RZ(beta+theta) 0
RX(-pi/2) 0
MEASURE 0 ro[0]
MEASURE 1 ro[1]
"""),
Program("""
RESET
DECLARE theta REAL[1]
DECLARE beta REAL[1]
DECLARE ro BIT[2]
RX(pi/2) 0
RZ(theta) 0
"""),
Program("""
DECLARE theta REAL[1]
DECLARE beta REAL[1]
DECLARE ro BIT[3]
RX(pi/2) 0
RZ(0.79*theta) 0
RZ(2*beta+theta*0.5+beta+beta) 0
RX(-pi/2) 0
MEASURE 0 ro[0]
MEASURE 1 ro[1]
"""),
Program("""
RX(pi) 0
"""),
]
@pytest.fixture
def mock_qpu():
return QPU(endpoint='tcp://not-needed:00000',
user="pyQuil test suite")
@pytest.fixture
def qpu_compiler(test_device):
try:
config = PyquilConfig()
compiler = QPUCompiler(endpoint=config.compiler_url, device=test_device, timeout=0.5)
compiler.quil_to_native_quil(Program(I(0)))
return compiler
except Exception as e:
return pytest.skip(f"This test requires compiler connection: {e}")
@pytest.fixture
def gate_arithmetic_binaries(qpu_compiler: QPUCompiler):
return [qpu_compiler.native_quil_to_executable(p) for p in GATE_ARITHMETIC_PROGRAMS]
def test_load(gate_arithmetic_binaries, mock_qpu):
def test_binary(binary):
assert hasattr(binary, "recalculation_table")
mock_qpu.load(binary)
assert mock_qpu.status == 'loaded'
for mref, rule in mock_qpu._executable.recalculation_table.items():
assert isinstance(mref, ParameterAref)
assert isinstance(rule, Expression)
assert len(mock_qpu._executable.recalculation_table) in [0, 2]
for bin in gate_arithmetic_binaries:
test_binary(bin)
def test_build_patch_tables(gate_arithmetic_binaries, mock_qpu):
for idx, bin in enumerate(gate_arithmetic_binaries[:-1]):
mock_qpu.load(bin)
theta = | np.random.randint(-100, 100) | numpy.random.randint |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Wed Oct 10 18:03:32 2018
This file is the new version for calculating the uncertainty value in each patch
It's better because:
1. It's a dynamic way of chosing most uncertain patch, since the provided patch can have overlapping for the adjacent pixels
2. It can be further developed to have the weighted uncertain for each patch by 1/(h*w) where h and w are the height and
width of the patch.
The thing I needs to be careful about this is that:
1. The selected most uncertain patch needs to be able to transformed back to the binary mask
2. The uncertainty value for the previously selected patch needs to be not considered during the selection. I think I can still set a
fixed number of patches, it's just it will be much more than before.
@author: s161488
"""
import numpy as np
from scipy import signal
def calculate_score_for_patch(image, kernel, stride_size, Num_Most_Uncert_Patch, crit = None, higher = True):
"""This function is used to calculate the utility score for each patch.
Args:
uncertainty_est: [Im_h, Im_w]
kernel: [k_h, k_w]
Returns:
most_uncert_image_index: [Num_Most_Selec] this should be the real image index
%most_uncert_patch_index: [Num_Most_Selec] this should be the numeric index for the selected patches
binary_mask: [Num_Most_Selec, Im_h, Im_w,1]
%pseudo_label: [Num_Most_Selec, Im_h, Im_w,1]
Op:
Before, I enter the uncert_est, I need to consider if there are already selected patches in the last acquisition step.
If there are some selected patches in the last acquisition step, then it can be annotated by the binary mask. Therefore,
before I enter the uncert_est, the uncertainty value for the selected patches should be zero.
Then the evaluation for the rest patches will be as same as below
Also, another thing needs to be considered is that if there are overlapping betweeen the new selected images and the previously
selected images, I need to aggregate the binary mask, as same as the ground truth label. This step will be as same as before.
"""
Im_h, Im_w = np.shape(image)
kh, kw = np.shape(kernel)
h_num_patch = Im_h-kh+1
w_num_patch = Im_w-kw+1
num_row_wise = h_num_patch//stride_size
num_col_wise = w_num_patch//stride_size
if stride_size == 1:
tot_num_patch_per_im = num_row_wise*num_col_wise
else:
tot_num_patch_per_im = (num_row_wise+1)*(num_col_wise+1)
patch_tot = select_patches_in_image_area(image, kernel, stride_size, num_row_wise, num_col_wise)
patch_tot = np.reshape(patch_tot, [-1])
#print('Based on the experiments, there are %d patches in total'%np.shape(patch_tot)[0])
#print('Based on the calculation, there supposed to be %d patches in tot'%(Num_Im*tot_num_patch_per_im))
sorted_index = np.argsort(patch_tot)
if higher is True:
select_most_uncert_patch = (sorted_index[-Num_Most_Uncert_Patch:]).astype('int64')
else:
select_most_uncert_patch = (sorted_index[:Num_Most_Uncert_Patch]).astype('int64')
if crit is not None:
select_most_uncert_patch = (sorted_index[np.array(sorted(patch_tot))>=crit]).astype('int64')
Num_Most_Uncert_Patch = np.shape(select_most_uncert_patch)[0]
if Num_Most_Uncert_Patch > 0:
# Num_Most_Uncert_Patch = np.shape(select_most_uncert_patch)[0]
select_most_uncert_patch_index_per_im = (select_most_uncert_patch%tot_num_patch_per_im).astype('int64')
if stride_size == 1:
select_most_uncert_patch_rownum_per_im = (select_most_uncert_patch_index_per_im//num_col_wise).astype('int64')
select_most_uncert_patch_colnum_per_im = (select_most_uncert_patch_index_per_im%num_col_wise).astype('int64')
else:
select_most_uncert_patch_rownum_per_im = (select_most_uncert_patch_index_per_im//(num_col_wise+1)).astype('int64')
select_most_uncert_patch_colnum_per_im = (select_most_uncert_patch_index_per_im%(num_col_wise+1)).astype('int64')
transfered_rownum, transfered_colnum = transfer_strid_rowcol_backto_nostride_rowcol(select_most_uncert_patch_rownum_per_im,
select_most_uncert_patch_colnum_per_im,
[h_num_patch, w_num_patch],
[num_row_wise+1, num_col_wise+1],
stride_size)
binary_mask_tot = []
box_coord = np.zeros([Num_Most_Uncert_Patch, 4])
for i in range(Num_Most_Uncert_Patch):
single_binary_mask = generate_binary_mask(Im_h, Im_w,
transfered_rownum[i],
transfered_colnum[i],
kh, kw)
row, col = np.where(single_binary_mask!=0)
row_sort = sorted(row)
col_sort = sorted(col)
box_coord[i,:] = [row_sort[0], col_sort[0], row_sort[-1], col_sort[-1]]
binary_mask_tot.append(single_binary_mask)
# binary_mask_tot = np.sum(binary_mask_tot, axis = 0)
binary_mask_tot = (np.sum(binary_mask_tot, axis = 0)!=0).astype('int32')
box_coord = np.array(box_coord, dtype = np.int32)
else:
binary_mask_tot = np.zeros([Im_h, Im_w], dtype = np.int32)
box_coord = np.zeros([1, 4], dtype = np.int32)
return binary_mask_tot, box_coord
def test_calc_patch():
import matplotlib.pyplot as plt
im = np.random.random([128,192])
kernel = | np.ones([10,10]) | numpy.ones |
import os
from flask import Flask, render_template, request
from flask.helpers import url_for
from werkzeug.utils import redirect
import tensorflow as tf
from tensorflow import keras
import requests
import numpy as np
app = Flask(__name__)
model = keras.models.load_model('myModel')
@app.route('/', methods=['GET'])
def index():
return render_template('index.html')
@app.route('/predict', methods=['POST'])
def predict():
if request.method == 'POST':
genders = request.form['genders']
if genders == 'male':
genders = np.array([0, 1])
else:
genders = np.array([1, 0])
polyuria = request.form['polyuria']
if polyuria == 'yes':
polyuria = np.array([0, 1])
else:
polyuria = np.array([1, 0])
polydipsia = request.form['polydipsia']
if polydipsia == 'yes':
polydipsia = np.array([0, 1])
else:
polydipsia = | np.array([1, 0]) | numpy.array |
import numpy as np
import PloidPy.binom_model as bm
EPS = np.finfo(np.float64).tiny
# calculates the likelihood values for each heterozygous state for the x matrix
# for the n ploidy model. This model does NOT calculate the WEIGHTED
# likelihood, just the likelihood of each value for each model.
def ploidy_Likelihood(x, n, r, p_nb, p_err, uniform_com=True):
het_p = np.arange(1, np.floor(n/2) + 1) / n
return bm.get_Likelihood(x, het_p, r, p_nb, p_err, uniform_com=uniform_com)
def weighted_Ploidy_Log_Likelihood(lh, p_err, uniform_weights=False,
uniform_com=False):
w = bm.get_Weights(lh, p_err, uniform=uniform_weights,
uniform_com=uniform_com)
a = | np.multiply(lh, w[:, np.newaxis]) | numpy.multiply |
#!/usr/bin/env python
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from builtins import zip
from builtins import range
import os, sys, re
import numpy as np
from .molecule import Molecule, Elements
from .nifty import isint, isfloat
np.set_printoptions(precision=4)
def print_mode(M, mode):
print('\n'.join(['%-3s' % M.elem[ii] + ' '.join(['% 7.3f' % j for j in i]) for ii, i in enumerate(mode)]))
def read_frq_gau(gauout):
XMode = 0
xyz = []
elem = []
elemThis = []
VMode = 0
frqs = []
intens = []
modes = []
for line in open(gauout).readlines():
line = line.strip().expandtabs()
if XMode >= 1:
# Perfectionist here; matches integer, element, and three floating points
if re.match("^[0-9]+ +[0-9]+ +[0-9]+( +[-+]?([0-9]*\.)?[0-9]+){3}$", line):
XMode = 2
sline = line.split()
elemThis.append(Elements[int(sline[1])])
xyz.append([float(i) for i in sline[3:]])
elif XMode == 2: # Break out of the loop if we encounter anything other than atomic data
if elem == []:
elem = elemThis
elif elem != elemThis:
logger.error('Gaussian output parser will not work if successive calculations have different numbers of atoms!\n')
raise RuntimeError
elemThis = []
xyz = np.array(xyz)
XMode = -1
elif XMode == 0 and "Coordinates (Angstroms)" in line:
XMode = 1
VModeNxt = None
if line.strip().startswith('Frequencies'):
VMode = 2
if VMode == 2:
s = line.split()
if 'Frequencies' in line:
nfrq = len(s) - 2
frqs += [float(i) for i in s[2:]]
if re.match('^[ \t]*Atom', line):
VModeNxt = 3
readmodes = [[] for i in range(nfrq)]
if 'IR Inten' in line:
intens += [float(i) for i in s[3:]]
if 'Imaginary Frequencies' in line:
VMode = 0
if VMode == 3:
s = line.split()
if len(s) != nfrq*3+2:
VMode = 0
modes += readmodes[:]
else:
for i in range(nfrq):
readmodes[i].append([float(s[j]) for j in range(2+3*i,5+3*i)])
if VModeNxt is not None: VMode = VModeNxt
unnorm = [np.array(i) for i in modes]
return np.array(frqs), [i/np.linalg.norm(i) for i in unnorm], np.array(intens), elem, xyz
def read_frq_tc(tcout, scrdir='scr'):
# Unfortunately, TeraChem's frequency data is scattered in the output file and scratch folder
lineCounter = -100
xyzpath = os.path.join(os.path.split(os.path.abspath(tcout))[0], scrdir, 'CentralGeometry.initcond.xyz')
tcdat = os.path.join(os.path.split(os.path.abspath(tcout))[0], scrdir, 'Frequencies.dat')
if not os.path.exists(xyzpath):
raise RuntimeError("%s doesn't exist; please provide a scratch folder to this function" % xyzpath)
if not os.path.exists(tcdat):
raise RuntimeError("%s doesn't exist; please provide a scratch folder to this function" % tcdat)
Mxyz = Molecule(xyzpath)
# This piece of Yudong's code reads the intensities
found_vib = False
freqs = []
intensities = []
for line in open(tcout):
if 'Vibrational Frequencies/Thermochemical Analysis After Removing Rotation and Translation' in line:
found_vib = True
if found_vib:
ls = line.split()
if len(ls) == 8 and ls[0].isdigit():
freqs.append(float(ls[2]))
intensities.append(float(ls[3]))
elif len(ls) == 3 and ls[2].endswith('i'):
freqs.append(-1*float(ls[2][:-1]))
intensities.append(0.0)
if line.strip() == '':
break
if found_vib is False:
raise RuntimeError("No frequency data was found in file %s" % filename)
for lineNumber, line in enumerate(open(tcdat).readlines()):
s = line.split()
if lineNumber == 0:
numAtoms = int(s[-1])
elif lineNumber == 1:
numModes = int(s[-1])
# Make list of unnormalized modes to be read in
frqs = np.zeros(numModes, dtype=float)
unnorm = [np.zeros(3*numAtoms, dtype=float) for i in range(numModes)]
elif all([isint(i) for i in s]):
lineCounter = 0
modeNumbers = [int(i) for i in s]
elif lineCounter == 1:
theseFrqs = [float(i) for i in s]
if len(theseFrqs) != len(modeNumbers):
raise RuntimeError('Parser error! Expected # frequencies to equal # modes')
for i in range(len(theseFrqs)):
frqs[modeNumbers[i]] = theseFrqs[i]
elif lineCounter >= 3:
if lineCounter%3 == 0:
if not isint(s[0]):
raise RuntimeError('Parser error! Expected integer at start of line')
disps = [float(i) for i in s[1:]]
else:
disps = [float(i) for i in s]
idx = lineCounter-3
if len(disps) != len(modeNumbers):
raise RuntimeError('Parser error! Expected # displacements to equal # modes')
for i in range(len(disps)):
unnorm[modeNumbers[i]][lineCounter-3] = disps[i]
if idx == 3*numAtoms-1:
lineCounter = -100
lineCounter += 1
if np.max(np.abs(np.array(frqs)-np.array(freqs))) > 1.0:
raise RuntimeError("Inconsistent frequencies from TeraChem output and scratch")
return np.array(frqs), [i/ | np.linalg.norm(i) | numpy.linalg.norm |
"""
=================================================
@path : PointNet-Series -> modelnet_prepare.py
@IDE : PyCharm
@Author : zYx.Tom, <EMAIL>
@Date : 2022-01-19 15:54
@Version: v0.1
@License: (C)Copyright 2020-2022, zYx.Tom
@Reference:
https://shapenet.cs.stanford.edu/media/modelnet40_normal_resampled.zip
@Desc : 预处理数据,加工多个以文件保存的数据为单个列表保存的数据
==================================================
"""
import pickle
import numpy as np
import argparse
from datetime import datetime
from os.path import join
from tqdm import tqdm
def farthest_point_sample(xyz, npoint):
"""
最远点采样(N:Number, D:Dimension)
不断迭代地选择距离已有采样点集合的最远点,从而得到物体的轮廓
采样点的数量应该小于原始点的数量,否则就是取出所有的点
Input:
xyz: point cloud data, [N, D]
npoint: number of samples
Return:
centroids: sampled point cloud index, [npoint, D]
"""
N, D = xyz.shape
xyz = xyz[:, :3]
centroids = np.zeros((npoint,)) # 质心点
distance = | np.ones((N,)) | numpy.ones |
# coding=utf-8
# Copyright 2018 The DisentanglementLib Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for modularity_explicitness.py."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from absl.testing import absltest
from disentanglement_lib.data.ground_truth import dummy_data
from disentanglement_lib.evaluation.metrics import modularity_explicitness
import numpy as np
from six.moves import range
import gin.tf
def _identity_discretizer(target, num_bins):
del num_bins
return target
class ModularityTest(absltest.TestCase):
def test_diagonal(self):
importance_matrix = np.diag(5.*np.ones(5))
result = modularity_explicitness.modularity(importance_matrix)
np.testing.assert_allclose(result, 1.0)
def test_diagonal_empty_codes(self):
importance_matrix = np.array([[1., 0.,], [0., 1.], [0., 0.]])
result = modularity_explicitness.modularity(importance_matrix)
np.testing.assert_allclose(result, 2./3.)
def test_zero(self):
importance_matrix = np.zeros(shape=[10, 10], dtype=np.float64)
result = modularity_explicitness.modularity(importance_matrix)
np.testing.assert_allclose(result, .0)
def test_redundant_codes(self):
importance_matrix = np.diag(5.*np.ones(5))
importance_matrix = np.vstack([importance_matrix, importance_matrix])
result = modularity_explicitness.modularity(importance_matrix)
np.testing.assert_allclose(result, 1.)
def test_missed_factors(self):
importance_matrix = np.diag(5.*np.ones(5))
result = modularity_explicitness.modularity(importance_matrix[:2, :])
np.testing.assert_allclose(result, 1.0)
def test_one_code_two_factors(self):
importance_matrix = np.diag(5.*np.ones(5))
importance_matrix = np.hstack([importance_matrix, importance_matrix])
result = modularity_explicitness.modularity(importance_matrix)
np.testing.assert_allclose(result, 1. - 1./9)
class ModularityExplicitnessTest(absltest.TestCase):
def test_metric(self):
gin.bind_parameter("discretizer.discretizer_fn", _identity_discretizer)
gin.bind_parameter("discretizer.num_bins", 10)
ground_truth_data = dummy_data.IdentityObservationsData()
representation_function = lambda x: np.array(x, dtype=np.float64)
random_state = np.random.RandomState(0)
scores = modularity_explicitness.compute_modularity_explicitness(
ground_truth_data, representation_function, random_state, None, 3000,
3000)
self.assertBetween(scores["modularity_score"], 0.9, 1.0)
def test_bad_metric(self):
gin.bind_parameter("discretizer.discretizer_fn", _identity_discretizer)
gin.bind_parameter("discretizer.num_bins", 10)
ground_truth_data = dummy_data.IdentityObservationsData()
random_state_rep = np.random.RandomState(0)
# The representation which randomly permutes the factors, should have equal
# non-zero MI which should give a low modularity score.
def representation_function(x):
code = | np.array(x, dtype=np.float64) | numpy.array |
import os
import tempfile
import zipfile
from io import BytesIO
import cv2
import click
import numpy as np
from skimage import filters
from tqdm import tqdm
import torch
import torch.nn.functional as F
from torch.autograd import Variable
from dataset import rtranspose
from loader import get_loaders
from loss import fmicro_th, dice_th, fmicro_np, dice_np
imsize = 512
def predict(net, loader, verbose=0):
ypred = torch.zeros([len(loader.dataset), imsize, imsize])
ytrue = torch.zeros([len(loader.dataset), imsize, imsize])
ypath = [''] * len(loader.dataset)
ytidx = torch.zeros(len(loader.dataset))
gen = enumerate(loader, 0)
if verbose == 1:
gen = tqdm(list(gen))
for i, data in gen:
images, ytrues, paths, ts = data
images = Variable(images.cuda(), volatile=True)
ypreds = net(images).select(1, 0)
ypred[i * loader.batch_size:(i + 1) * loader.batch_size] = ypreds.data.cpu()
if ytrues is not None:
ytrue[i * loader.batch_size:(i + 1) * loader.batch_size] = ytrues.select(1, 0)
ypath[i * loader.batch_size:(i + 1) * loader.batch_size] = paths
ytidx[i * loader.batch_size:(i + 1) * loader.batch_size] = ts
return ypred, ytrue, ypath, ytidx
@click.command()
@click.option('-n', '--name', default='invalid9000', help='Model name')
@click.option('-m', '--mode', default='best', help='Checkpoint to use')
@click.option('-f', '--nfolds', type=int, prompt=True, help='Number of folds')
@click.option('-b', '--batch-size', default=16, help='Batch size')
def main(name, mode, nfolds, batch_size):
out_root = f'output/{name}/'
os.makedirs(out_root, exist_ok=True)
paths = []
tomix = []
trues = []
probs = []
tidxs = []
EXCLUDED = ['quading/i628806.tif_i282989.tif_i417677.tif_i659777.tif.tif',
'quading/i933123.tif_i154348.tif_i435969.tif_i385761.tif.tif']
enames = sum([os.path.splitext(os.path.basename(p))[0].split('_') for p in EXCLUDED], [])
tmpzip = BytesIO()
with zipfile.ZipFile(tmpzip, 'w', zipfile.ZIP_DEFLATED) as zipf:
# for fold in range(nfolds):
# print(f'fold{fold}:')
#
# mpath = f'weights/{name}/{name}_fold{fold}_{mode}.pth'
# model = torch.load(mpath)
# model.cuda()
# model.eval()
#
# splits = ['train', 'valid', 'test']
#
# for split, loader in zip(splits, get_loaders(batch_size, nfolds, fold, training=False)):
# ypred, ytrue, ypath, ts = predict(model, loader, verbose=1)
# ypred = ypred[:, 6:-6, 6:-6].contiguous()
# ytrue = ytrue[:, 6:-6, 6:-6].contiguous()
# yprob = torch.sigmoid(ypred)
#
# if split != 'test':
# vprob = Variable(yprob, volatile=True).cuda()
# vtrue = Variable(ytrue, volatile=True).cuda()
# ll = F.binary_cross_entropy(vprob, vtrue).data[0]
#
# f1 = fmicro_th(vprob > 0.5, vtrue)
# dc = dice_th(vprob > 0.5, vtrue)
# sc = int(round(1e8 * (f1 + dc) / 2)) / 100
# print(f'[{0.5:0.1f}] '
# f'loss {ll:0.3f} f1 {f1:0.4f} '
# f'dice {dc:0.4f} score {sc:0.2f}')
#
# if split != 'train':
# store = [True for _ in ypath]
# else:
# store = [split == 'valid' or
# fold == 1 and np.any([p.find(str(name)) != -1 for name in enames])
# for p in ypath]
#
# tomix.extend(np.array([split == 'valid' for _ in store])[store])
# paths.extend(np.array(ypath)[store])
# trues.extend(ytrue.numpy()[store])
# probs.extend(yprob.numpy()[store])
# tidxs.extend(ts.numpy()[store])
#
# # untranspose
# for i, (true, prob, t) in enumerate(zip(trues, probs, tidxs)):
# trues[i] = rtranspose(true, t)
# probs[i] = rtranspose(prob, t)
#
# tomix = np.stack(tomix)
# paths = np.stack(paths)
# trues = np.stack(trues)
# probs = np.stack(probs)
#
# np.save(out_root + f'{name}_{mode}_tomix.npy', tomix)
# np.save(out_root + f'{name}_{mode}_paths.npy', paths)
# np.save(out_root + f'{name}_{mode}_trues.npy', trues)
# np.save(out_root + f'{name}_{mode}_probs.npy', probs)
tomix = np.load(out_root + f'{name}_{mode}_tomix.npy')
paths = | np.load(out_root + f'{name}_{mode}_paths.npy') | numpy.load |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import networkx as networkx
import numpy as numpy
import scipy as scipy
import scipy.integrate
class SEIRSModel():
"""
A class to simulate the Deterministic SEIRS Model
===================================================
Params: beta Rate of transmission (exposure)
sigma Rate of infection (upon exposure)
gamma Rate of recovery (upon infection)
xi Rate of re-susceptibility (upon recovery)
mu_I Rate of infection-related death
mu_0 Rate of baseline death
nu Rate of baseline birth
beta_D Rate of transmission (exposure) for individuals with detected infections
sigma_D Rate of infection (upon exposure) for individuals with detected infections
gamma_D Rate of recovery (upon infection) for individuals with detected infections
mu_D Rate of infection-related death for individuals with detected infections
theta_E Rate of baseline testing for exposed individuals
theta_I Rate of baseline testing for infectious individuals
psi_E Probability of positive test results for exposed individuals
psi_I Probability of positive test results for exposed individuals
q Probability of quarantined individuals interacting with others
initE Init number of exposed individuals
initI Init number of infectious individuals
initD_E Init number of detected infectious individuals
initD_I Init number of detected infectious individuals
initR Init number of recovered individuals
initF Init number of infection-related fatalities
(all remaining nodes initialized susceptible)
"""
def __init__(self, initN, beta, sigma, gamma, xi=0, mu_I=0, mu_0=0, nu=0, p=0,
beta_D=None, sigma_D=None, gamma_D=None, mu_D=None,
theta_E=0, theta_I=0, psi_E=0, psi_I=0, q=0,
initE=0, initI=10, initD_E=0, initD_I=0, initR=0, initF=0):
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Model Parameters:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
self.beta = beta
self.sigma = sigma
self.gamma = gamma
self.xi = xi
self.mu_I = mu_I
self.mu_0 = mu_0
self.nu = nu
self.p = p
# Testing-related parameters:
self.beta_D = beta_D if beta_D is not None else self.beta
self.sigma_D = sigma_D if sigma_D is not None else self.sigma
self.gamma_D = gamma_D if gamma_D is not None else self.gamma
self.mu_D = mu_D if mu_D is not None else self.mu_I
self.theta_E = theta_E if theta_E is not None else self.theta_E
self.theta_I = theta_I if theta_I is not None else self.theta_I
self.psi_E = psi_E if psi_E is not None else self.psi_E
self.psi_I = psi_I if psi_I is not None else self.psi_I
self.q = q if q is not None else self.q
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Initialize Timekeeping:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
self.t = 0
self.tmax = 0 # will be set when run() is called
self.tseries = numpy.array([0])
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Initialize Counts of inidividuals with each state:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
self.N = numpy.array([int(initN)])
self.numE = numpy.array([int(initE)])
self.numI = numpy.array([int(initI)])
self.numD_E = numpy.array([int(initD_E)])
self.numD_I = numpy.array([int(initD_I)])
self.numR = numpy.array([int(initR)])
self.numF = numpy.array([int(initF)])
self.numS = numpy.array([self.N[-1] - self.numE[-1] - self.numI[-1] - self.numD_E[-1] - self.numD_I[-1] - self.numR[-1] - self.numF[-1]])
assert(self.numS[0] >= 0), "The specified initial population size N must be greater than or equal to the initial compartment counts."
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
@staticmethod
def system_dfes(t, variables, beta, sigma, gamma, xi, mu_I, mu_0, nu,
beta_D, sigma_D, gamma_D, mu_D, theta_E, theta_I, psi_E, psi_I, q):
S, E, I, D_E, D_I, R, F = variables # varibles is a list with compartment counts as elements
N = S + E + I + D_E + D_I + R
dS = - (beta*S*I)/N - q*(beta_D*S*D_I)/N + xi*R + nu*N - mu_0*S
dE = (beta*S*I)/N + q*(beta_D*S*D_I)/N - sigma*E - theta_E*psi_E*E - mu_0*E
dI = sigma*E - gamma*I - mu_I*I - theta_I*psi_I*I - mu_0*I
dDE = theta_E*psi_E*E - sigma_D*D_E - mu_0*D_E
dDI = theta_I*psi_I*I + sigma_D*D_E - gamma_D*D_I - mu_D*D_I - mu_0*D_I
dR = gamma*I + gamma_D*D_I - xi*R - mu_0*R
dF = mu_I*I + mu_D*D_I
return [dS, dE, dI, dDE, dDI, dR, dF]
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def run_epoch(self, runtime, dt=0.1):
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Create a list of times at which the ODE solver should output system values.
# Append this list of times as the model's timeseries
t_eval = numpy.arange(start=self.t, stop=self.t+runtime, step=dt)
# Define the range of time values for the integration:
t_span = (self.t, self.t+runtime)
# Define the initial conditions as the system's current state:
# (which will be the t=0 condition if this is the first run of this model,
# else where the last sim left off)
init_cond = [self.numS[-1], self.numE[-1], self.numI[-1], self.numD_E[-1], self.numD_I[-1], self.numR[-1], self.numF[-1]]
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Solve the system of differential eqns:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
solution = scipy.integrate.solve_ivp(lambda t, X: SEIRSModel.system_dfes(t, X, self.beta, self.sigma, self.gamma, self.xi, self.mu_I, self.mu_0, self.nu,
self.beta_D, self.sigma_D, self.gamma_D, self.mu_D, self.theta_E, self.theta_I, self.psi_E, self.psi_I, self.q
),
t_span=[self.t, self.tmax], y0=init_cond, t_eval=t_eval
)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Store the solution output as the model's time series and data series:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
self.tseries = numpy.append(self.tseries, solution['t'])
self.numS = numpy.append(self.numS, solution['y'][0])
self.numE = numpy.append(self.numE, solution['y'][1])
self.numI = numpy.append(self.numI, solution['y'][2])
self.numD_E = numpy.append(self.numD_E, solution['y'][3])
self.numD_I = numpy.append(self.numD_I, solution['y'][4])
self.numR = numpy.append(self.numR, solution['y'][5])
self.numF = numpy.append(self.numF, solution['y'][6])
self.t = self.tseries[-1]
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def run(self, T, dt=0.1, checkpoints=None, verbose=False):
if(T>0):
self.tmax += T
else:
return False
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Pre-process checkpoint values:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if(checkpoints):
numCheckpoints = len(checkpoints['t'])
paramNames = ['beta', 'sigma', 'gamma', 'xi', 'mu_I', 'mu_0', 'nu',
'beta_D', 'sigma_D', 'gamma_D', 'mu_D',
'theta_E', 'theta_I', 'psi_E', 'psi_I', 'q']
for param in paramNames:
# For params that don't have given checkpoint values (or bad value given),
# set their checkpoint values to the value they have now for all checkpoints.
if(param not in list(checkpoints.keys())
or not isinstance(checkpoints[param], (list, numpy.ndarray))
or len(checkpoints[param])!=numCheckpoints):
checkpoints[param] = [getattr(self, param)]*numCheckpoints
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
# Run the simulation loop:
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
if(not checkpoints):
self.run_epoch(runtime=self.tmax, dt=dt)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print("t = %.2f" % self.t)
if(verbose):
print("\t S = " + str(self.numS[-1]))
print("\t E = " + str(self.numE[-1]))
print("\t I = " + str(self.numI[-1]))
print("\t D_E = " + str(self.numD_E[-1]))
print("\t D_I = " + str(self.numD_I[-1]))
print("\t R = " + str(self.numR[-1]))
print("\t F = " + str(self.numF[-1]))
else: # checkpoints provided
for checkpointIdx, checkpointTime in enumerate(checkpoints['t']):
# Run the sim until the next checkpoint time:
self.run_epoch(runtime=checkpointTime-self.t, dt=dt)
# Having reached the checkpoint, update applicable parameters:
print("[Checkpoint: Updating parameters]")
for param in paramNames:
setattr(self, param, checkpoints[param][checkpointIdx])
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
print("t = %.2f" % self.t)
if(verbose):
print("\t S = " + str(self.numS[-1]))
print("\t E = " + str(self.numE[-1]))
print("\t I = " + str(self.numI[-1]))
print("\t D_E = " + str(self.numD_E[-1]))
print("\t D_I = " + str(self.numD_I[-1]))
print("\t R = " + str(self.numR[-1]))
print("\t F = " + str(self.numF[-1]))
if(self.t < self.tmax):
self.run_epoch(runtime=self.tmax-self.t, dt=dt)
return True
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def total_num_infections(self, t_idx=None):
if(t_idx is None):
return (self.numE[:] + self.numI[:] + self.numD_E[:] + self.numD_I[:])
else:
return (self.numE[t_idx] + self.numI[t_idx] + self.numD_E[t_idx] + self.numD_I[t_idx])
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def plot(self, ax=None, plot_S='line', plot_E='line', plot_I='line',plot_R='line', plot_F='line',
plot_D_E='line', plot_D_I='line', combine_D=True,
color_S='tab:green', color_E='orange', color_I='crimson', color_R='tab:blue', color_F='black',
color_D_E='mediumorchid', color_D_I='mediumorchid', color_reference='#E0E0E0',
dashed_reference_results=None, dashed_reference_label='reference',
shaded_reference_results=None, shaded_reference_label='reference',
vlines=[], vline_colors=[], vline_styles=[], vline_labels=[],
ylim=None, xlim=None, legend=True, title=None, side_title=None, plot_percentages=True):
import matplotlib.pyplot as pyplot
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Create an Axes object if None provided:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if(not ax):
fig, ax = pyplot.subplots()
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Prepare data series to be plotted:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Fseries = self.numF/self.N if plot_percentages else self.numF
Eseries = self.numE/self.N if plot_percentages else self.numE
Dseries = (self.numD_E+self.numD_I)/self.N if plot_percentages else (self.numD_E+self.numD_I)
D_Eseries = self.numD_E/self.N if plot_percentages else self.numD_E
D_Iseries = self.numD_I/self.N if plot_percentages else self.numD_I
Iseries = self.numI/self.N if plot_percentages else self.numI
Rseries = self.numR/self.N if plot_percentages else self.numR
Sseries = self.numS/self.N if plot_percentages else self.numS
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Draw the reference data:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if(dashed_reference_results):
dashedReference_tseries = dashed_reference_results.tseries[::int(self.N/100)]
dashedReference_IDEstack = (dashed_reference_results.numI + dashed_reference_results.numD_I + dashed_reference_results.numD_E + dashed_reference_results.numE)[::int(self.N/100)] / (self.N if plot_percentages else 1)
ax.plot(dashedReference_tseries, dashedReference_IDEstack, color='#E0E0E0', linestyle='--', label='$I+D+E$ ('+dashed_reference_label+')', zorder=0)
if(shaded_reference_results):
shadedReference_tseries = shaded_reference_results.tseries
shadedReference_IDEstack = (shaded_reference_results.numI + shaded_reference_results.numD_I + shaded_reference_results.numD_E + shaded_reference_results.numE) / (self.N if plot_percentages else 1)
ax.fill_between(shaded_reference_results.tseries, shadedReference_IDEstack, 0, color='#EFEFEF', label='$I+D+E$ ('+shaded_reference_label+')', zorder=0)
ax.plot(shaded_reference_results.tseries, shadedReference_IDEstack, color='#E0E0E0', zorder=1)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Draw the stacked variables:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
topstack = numpy.zeros_like(self.tseries)
if(any(Fseries) and plot_F=='stacked'):
ax.fill_between(numpy.ma.masked_where(Fseries<=0, self.tseries), numpy.ma.masked_where(Fseries<=0, topstack+Fseries), topstack, color=color_F, alpha=0.5, label='$F$', zorder=2)
ax.plot( numpy.ma.masked_where(Fseries<=0, self.tseries), numpy.ma.masked_where(Fseries<=0, topstack+Fseries), color=color_F, zorder=3)
topstack = topstack+Fseries
if(any(Eseries) and plot_E=='stacked'):
ax.fill_between(numpy.ma.masked_where(Eseries<=0, self.tseries), numpy.ma.masked_where(Eseries<=0, topstack+Eseries), topstack, color=color_E, alpha=0.5, label='$E$', zorder=2)
ax.plot( numpy.ma.masked_where(Eseries<=0, self.tseries), numpy.ma.masked_where(Eseries<=0, topstack+Eseries), color=color_E, zorder=3)
topstack = topstack+Eseries
if(combine_D and plot_D_E=='stacked' and plot_D_I=='stacked'):
ax.fill_between(numpy.ma.masked_where(Dseries<=0, self.tseries), numpy.ma.masked_where(Dseries<=0, topstack+Dseries), topstack, color=color_D_E, alpha=0.5, label='$D_{all}$', zorder=2)
ax.plot( numpy.ma.masked_where(Dseries<=0, self.tseries), numpy.ma.masked_where(Dseries<=0, topstack+Dseries), color=color_D_E, zorder=3)
topstack = topstack+Dseries
else:
if(any(D_Eseries) and plot_D_E=='stacked'):
ax.fill_between(numpy.ma.masked_where(D_Eseries<=0, self.tseries), numpy.ma.masked_where(D_Eseries<=0, topstack+D_Eseries), topstack, color=color_D_E, alpha=0.5, label='$D_E$', zorder=2)
ax.plot( numpy.ma.masked_where(D_Eseries<=0, self.tseries), numpy.ma.masked_where(D_Eseries<=0, topstack+D_Eseries), color=color_D_E, zorder=3)
topstack = topstack+D_Eseries
if(any(D_Iseries) and plot_D_I=='stacked'):
ax.fill_between(numpy.ma.masked_where(D_Iseries<=0, self.tseries), numpy.ma.masked_where(D_Iseries<=0, topstack+D_Iseries), topstack, color=color_D_I, alpha=0.5, label='$D_I$', zorder=2)
ax.plot( numpy.ma.masked_where(D_Iseries<=0, self.tseries), numpy.ma.masked_where(D_Iseries<=0, topstack+D_Iseries), color=color_D_I, zorder=3)
topstack = topstack+D_Iseries
if(any(Iseries) and plot_I=='stacked'):
ax.fill_between(numpy.ma.masked_where(Iseries<=0, self.tseries), numpy.ma.masked_where(Iseries<=0, topstack+Iseries), topstack, color=color_I, alpha=0.5, label='$I$', zorder=2)
ax.plot( numpy.ma.masked_where(Iseries<=0, self.tseries), numpy.ma.masked_where(Iseries<=0, topstack+Iseries), color=color_I, zorder=3)
topstack = topstack+Iseries
if(any(Rseries) and plot_R=='stacked'):
ax.fill_between(numpy.ma.masked_where(Rseries<=0, self.tseries), numpy.ma.masked_where(Rseries<=0, topstack+Rseries), topstack, color=color_R, alpha=0.5, label='$R$', zorder=2)
ax.plot( numpy.ma.masked_where(Rseries<=0, self.tseries), numpy.ma.masked_where(Rseries<=0, topstack+Rseries), color=color_R, zorder=3)
topstack = topstack+Rseries
if(any(Sseries) and plot_S=='stacked'):
ax.fill_between(numpy.ma.masked_where(Sseries<=0, self.tseries), numpy.ma.masked_where(Sseries<=0, topstack+Sseries), topstack, color=color_S, alpha=0.5, label='$S$', zorder=2)
ax.plot( numpy.ma.masked_where(Sseries<=0, self.tseries), numpy.ma.masked_where(Sseries<=0, topstack+Sseries), color=color_S, zorder=3)
topstack = topstack+Sseries
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Draw the shaded variables:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if(any(Fseries) and plot_F=='shaded'):
ax.fill_between(numpy.ma.masked_where(Fseries<=0, self.tseries), numpy.ma.masked_where(Fseries<=0, Fseries), 0, color=color_F, alpha=0.5, label='$F$', zorder=4)
ax.plot( numpy.ma.masked_where(Fseries<=0, self.tseries), numpy.ma.masked_where(Fseries<=0, Fseries), color=color_F, zorder=5)
if(any(Eseries) and plot_E=='shaded'):
ax.fill_between(numpy.ma.masked_where(Eseries<=0, self.tseries), numpy.ma.masked_where(Eseries<=0, Eseries), 0, color=color_E, alpha=0.5, label='$E$', zorder=4)
ax.plot( numpy.ma.masked_where(Eseries<=0, self.tseries), numpy.ma.masked_where(Eseries<=0, Eseries), color=color_E, zorder=5)
if(combine_D and (any(Dseries) and plot_D_E=='shaded' and plot_D_E=='shaded')):
ax.fill_between(numpy.ma.masked_where(Dseries<=0, self.tseries), numpy.ma.masked_where(Dseries<=0, Dseries), 0, color=color_D_E, alpha=0.5, label='$D_{all}$', zorder=4)
ax.plot( numpy.ma.masked_where(Dseries<=0, self.tseries), numpy.ma.masked_where(Dseries<=0, Dseries), color=color_D_E, zorder=5)
else:
if(any(D_Eseries) and plot_D_E=='shaded'):
ax.fill_between(numpy.ma.masked_where(D_Eseries<=0, self.tseries), numpy.ma.masked_where(D_Eseries<=0, D_Eseries), 0, color=color_D_E, alpha=0.5, label='$D_E$', zorder=4)
ax.plot( numpy.ma.masked_where(D_Eseries<=0, self.tseries), numpy.ma.masked_where(D_Eseries<=0, D_Eseries), color=color_D_E, zorder=5)
if(any(D_Iseries) and plot_D_I=='shaded'):
ax.fill_between(numpy.ma.masked_where(D_Iseries<=0, self.tseries), numpy.ma.masked_where(D_Iseries<=0, D_Iseries), 0, color=color_D_I, alpha=0.5, label='$D_I$', zorder=4)
ax.plot( numpy.ma.masked_where(D_Iseries<=0, self.tseries), numpy.ma.masked_where(D_Iseries<=0, D_Iseries), color=color_D_I, zorder=5)
if(any(Iseries) and plot_I=='shaded'):
ax.fill_between(numpy.ma.masked_where(Iseries<=0, self.tseries), numpy.ma.masked_where(Iseries<=0, Iseries), 0, color=color_I, alpha=0.5, label='$I$', zorder=4)
ax.plot( numpy.ma.masked_where(Iseries<=0, self.tseries), numpy.ma.masked_where(Iseries<=0, Iseries), color=color_I, zorder=5)
if(any(Sseries) and plot_S=='shaded'):
ax.fill_between(numpy.ma.masked_where(Sseries<=0, self.tseries), numpy.ma.masked_where(Sseries<=0, Sseries), 0, color=color_S, alpha=0.5, label='$S$', zorder=4)
ax.plot( numpy.ma.masked_where(Sseries<=0, self.tseries), numpy.ma.masked_where(Sseries<=0, Sseries), color=color_S, zorder=5)
if(any(Rseries) and plot_R=='shaded'):
ax.fill_between(numpy.ma.masked_where(Rseries<=0, self.tseries), numpy.ma.masked_where(Rseries<=0, Rseries), 0, color=color_R, alpha=0.5, label='$R$', zorder=4)
ax.plot( numpy.ma.masked_where(Rseries<=0, self.tseries), numpy.ma.masked_where(Rseries<=0, Rseries), color=color_R, zorder=5)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Draw the line variables:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if(any(Fseries) and plot_F=='line'):
ax.plot(numpy.ma.masked_where(Fseries<=0, self.tseries), numpy.ma.masked_where(Fseries<=0, Fseries), color=color_F, label='$F$', zorder=6)
if(any(Eseries) and plot_E=='line'):
ax.plot(numpy.ma.masked_where(Eseries<=0, self.tseries), numpy.ma.masked_where(Eseries<=0, Eseries), color=color_E, label='$E$', zorder=6)
if(combine_D and (any(Dseries) and plot_D_E=='line' and plot_D_E=='line')):
ax.plot(numpy.ma.masked_where(Dseries<=0, self.tseries), numpy.ma.masked_where(Dseries<=0, Dseries), color=color_D_E, label='$D_{all}$', zorder=6)
else:
if(any(D_Eseries) and plot_D_E=='line'):
ax.plot(numpy.ma.masked_where(D_Eseries<=0, self.tseries), numpy.ma.masked_where(D_Eseries<=0, D_Eseries), color=color_D_E, label='$D_E$', zorder=6)
if(any(D_Iseries) and plot_D_I=='line'):
ax.plot(numpy.ma.masked_where(D_Iseries<=0, self.tseries), numpy.ma.masked_where(D_Iseries<=0, D_Iseries), color=color_D_I, label='$D_I$', zorder=6)
if(any(Iseries) and plot_I=='line'):
ax.plot(numpy.ma.masked_where(Iseries<=0, self.tseries), numpy.ma.masked_where(Iseries<=0, Iseries), color=color_I, label='$I$', zorder=6)
if(any(Sseries) and plot_S=='line'):
ax.plot(numpy.ma.masked_where(Sseries<=0, self.tseries), numpy.ma.masked_where(Sseries<=0, Sseries), color=color_S, label='$S$', zorder=6)
if(any(Rseries) and plot_R=='line'):
ax.plot(numpy.ma.masked_where(Rseries<=0, self.tseries), numpy.ma.masked_where(Rseries<=0, Rseries), color=color_R, label='$R$', zorder=6)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Draw the vertical line annotations:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if(len(vlines)>0 and len(vline_colors)==0):
vline_colors = ['gray']*len(vlines)
if(len(vlines)>0 and len(vline_labels)==0):
vline_labels = [None]*len(vlines)
if(len(vlines)>0 and len(vline_styles)==0):
vline_styles = [':']*len(vlines)
for vline_x, vline_color, vline_style, vline_label in zip(vlines, vline_colors, vline_styles, vline_labels):
if(vline_x is not None):
ax.axvline(x=vline_x, color=vline_color, linestyle=vline_style, alpha=1, label=vline_label)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Draw the plot labels:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
ax.set_xlabel('days')
ax.set_ylabel('percent of population' if plot_percentages else 'number of individuals')
ax.set_xlim(0, (max(self.tseries) if not xlim else xlim))
ax.set_ylim(0, ylim)
if(plot_percentages):
ax.set_yticklabels(['{:,.0%}'.format(y) for y in ax.get_yticks()])
if(legend):
legend_handles, legend_labels = ax.get_legend_handles_labels()
ax.legend(legend_handles[::-1], legend_labels[::-1], loc='upper right', facecolor='white', edgecolor='none', framealpha=0.9, prop={'size': 8})
if(title):
ax.set_title(title, size=12)
if(side_title):
ax.annotate(side_title, (0, 0.5), xytext=(-45, 0), ha='right', va='center',
size=12, rotation=90, xycoords='axes fraction', textcoords='offset points')
return ax
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def figure_basic(self, plot_S='line', plot_E='line', plot_I='line',plot_R='line', plot_F='line',
plot_D_E='line', plot_D_I='line', combine_D=True,
color_S='tab:green', color_E='orange', color_I='crimson', color_R='tab:blue', color_F='black',
color_D_E='mediumorchid', color_D_I='mediumorchid', color_reference='#E0E0E0',
dashed_reference_results=None, dashed_reference_label='reference',
shaded_reference_results=None, shaded_reference_label='reference',
vlines=[], vline_colors=[], vline_styles=[], vline_labels=[],
ylim=None, xlim=None, legend=True, title=None, side_title=None, plot_percentages=True,
figsize=(12,8), use_seaborn=True, show=True):
import matplotlib.pyplot as pyplot
fig, ax = pyplot.subplots(figsize=figsize)
if(use_seaborn):
import seaborn
seaborn.set_style('ticks')
seaborn.despine()
self.plot(ax=ax, plot_S=plot_S, plot_E=plot_E, plot_I=plot_I,plot_R=plot_R, plot_F=plot_F,
plot_D_E=plot_D_E, plot_D_I=plot_D_I, combine_D=combine_D,
color_S=color_S, color_E=color_E, color_I=color_I, color_R=color_R, color_F=color_F,
color_D_E=color_D_E, color_D_I=color_D_I, color_reference=color_reference,
dashed_reference_results=dashed_reference_results, dashed_reference_label=dashed_reference_label,
shaded_reference_results=shaded_reference_results, shaded_reference_label=shaded_reference_label,
vlines=vlines, vline_colors=vline_colors, vline_styles=vline_styles, vline_labels=vline_labels,
ylim=ylim, xlim=xlim, legend=legend, title=title, side_title=side_title, plot_percentages=plot_percentages)
if(show):
pyplot.show()
return fig, ax
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def figure_infections(self, plot_S=False, plot_E='stacked', plot_I='stacked',plot_R=False, plot_F=False,
plot_D_E='stacked', plot_D_I='stacked', combine_D=True,
color_S='tab:green', color_E='orange', color_I='crimson', color_R='tab:blue', color_F='black',
color_D_E='mediumorchid', color_D_I='mediumorchid', color_reference='#E0E0E0',
dashed_reference_results=None, dashed_reference_label='reference',
shaded_reference_results=None, shaded_reference_label='reference',
vlines=[], vline_colors=[], vline_styles=[], vline_labels=[],
ylim=None, xlim=None, legend=True, title=None, side_title=None, plot_percentages=True,
figsize=(12,8), use_seaborn=True, show=True):
import matplotlib.pyplot as pyplot
fig, ax = pyplot.subplots(figsize=figsize)
if(use_seaborn):
import seaborn
seaborn.set_style('ticks')
seaborn.despine()
self.plot(ax=ax, plot_S=plot_S, plot_E=plot_E, plot_I=plot_I,plot_R=plot_R, plot_F=plot_F,
plot_D_E=plot_D_E, plot_D_I=plot_D_I, combine_D=combine_D,
color_S=color_S, color_E=color_E, color_I=color_I, color_R=color_R, color_F=color_F,
color_D_E=color_D_E, color_D_I=color_D_I, color_reference=color_reference,
dashed_reference_results=dashed_reference_results, dashed_reference_label=dashed_reference_label,
shaded_reference_results=shaded_reference_results, shaded_reference_label=shaded_reference_label,
vlines=vlines, vline_colors=vline_colors, vline_styles=vline_styles, vline_labels=vline_labels,
ylim=ylim, xlim=xlim, legend=legend, title=title, side_title=side_title, plot_percentages=plot_percentages)
if(show):
pyplot.show()
return fig, ax
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
class SEIRSNetworkModel():
"""
A class to simulate the SEIRS Stochastic Network Model
===================================================
Params: G Network adjacency matrix (numpy array) or Networkx graph object.
beta Rate of transmission (exposure) (global)
beta_local Rate(s) of transmission (exposure) for adjacent individuals (optional)
sigma Rate of infection (upon exposure)
gamma Rate of recovery (upon infection)
xi Rate of re-susceptibility (upon recovery)
mu_I Rate of infection-related death
mu_0 Rate of baseline death
nu Rate of baseline birth
p Probability of interaction outside adjacent nodes
Q Quarantine adjacency matrix (numpy array) or Networkx graph object.
beta_D Rate of transmission (exposure) for individuals with detected infections (global)
beta_local Rate(s) of transmission (exposure) for adjacent individuals with detected infections (optional)
sigma_D Rate of infection (upon exposure) for individuals with detected infections
gamma_D Rate of recovery (upon infection) for individuals with detected infections
mu_D Rate of infection-related death for individuals with detected infections
theta_E Rate of baseline testing for exposed individuals
theta_I Rate of baseline testing for infectious individuals
phi_E Rate of contact tracing testing for exposed individuals
phi_I Rate of contact tracing testing for infectious individuals
psi_E Probability of positive test results for exposed individuals
psi_I Probability of positive test results for exposed individuals
q Probability of quarantined individuals interaction outside adjacent nodes
initE Init number of exposed individuals
initI Init number of infectious individuals
initD_E Init number of detected infectious individuals
initD_I Init number of detected infectious individuals
initR Init number of recovered individuals
initF Init number of infection-related fatalities
(all remaining nodes initialized susceptible)
"""
def __init__(self, G, beta, sigma, gamma, xi=0, mu_I=0, mu_0=0, nu=0, beta_local=None, p=0,
Q=None, beta_D=None, sigma_D=None, gamma_D=None, mu_D=None, beta_D_local=None,
theta_E=0, theta_I=0, phi_E=0, phi_I=0, psi_E=1, psi_I=1, q=0,
initE=0, initI=10, initD_E=0, initD_I=0, initR=0, initF=0,
node_groups=None, store_Xseries=False):
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Setup Adjacency matrix:
self.update_G(G)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Setup Quarantine Adjacency matrix:
if(Q is None):
Q = G # If no Q graph is provided, use G in its place
self.update_Q(Q)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Model Parameters:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
self.parameters = { 'beta':beta, 'sigma':sigma, 'gamma':gamma, 'xi':xi, 'mu_I':mu_I, 'mu_0':mu_0, 'nu':nu,
'beta_D':beta_D, 'sigma_D':sigma_D, 'gamma_D':gamma_D, 'mu_D':mu_D,
'beta_local':beta_local, 'beta_D_local':beta_D_local, 'p':p,'q':q,
'theta_E':theta_E, 'theta_I':theta_I, 'phi_E':phi_E, 'phi_I':phi_I, 'psi_E':phi_E, 'psi_I':psi_I }
self.update_parameters()
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Each node can undergo up to 4 transitions (sans vitality/re-susceptibility returns to S state),
# so there are ~numNodes*4 events/timesteps expected; initialize numNodes*5 timestep slots to start
# (will be expanded during run if needed)
self.tseries = numpy.zeros(5*self.numNodes)
self.numE = numpy.zeros(5*self.numNodes)
self.numI = numpy.zeros(5*self.numNodes)
self.numD_E = numpy.zeros(5*self.numNodes)
self.numD_I = numpy.zeros(5*self.numNodes)
self.numR = numpy.zeros(5*self.numNodes)
self.numF = numpy.zeros(5*self.numNodes)
self.numS = numpy.zeros(5*self.numNodes)
self.N = numpy.zeros(5*self.numNodes)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Initialize Timekeeping:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
self.t = 0
self.tmax = 0 # will be set when run() is called
self.tidx = 0
self.tseries[0] = 0
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Initialize Counts of inidividuals with each state:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
self.numE[0] = int(initE)
self.numI[0] = int(initI)
self.numD_E[0] = int(initD_E)
self.numD_I[0] = int(initD_I)
self.numR[0] = int(initR)
self.numF[0] = int(initF)
self.numS[0] = self.numNodes - self.numE[0] - self.numI[0] - self.numD_E[0] - self.numD_I[0] - self.numR[0] - self.numF[0]
self.N[0] = self.numS[0] + self.numE[0] + self.numI[0] + self.numD_E[0] + self.numD_I[0] + self.numR[0]
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Node states:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
self.S = 1
self.E = 2
self.I = 3
self.D_E = 4
self.D_I = 5
self.R = 6
self.F = 7
self.X = numpy.array([self.S]*int(self.numS[0]) + [self.E]*int(self.numE[0]) + [self.I]*int(self.numI[0]) + [self.D_E]*int(self.numD_E[0]) + [self.D_I]*int(self.numD_I[0]) + [self.R]*int(self.numR[0]) + [self.F]*int(self.numF[0])).reshape((self.numNodes,1))
numpy.random.shuffle(self.X)
self.store_Xseries = store_Xseries
if(store_Xseries):
self.Xseries = numpy.zeros(shape=(5*self.numNodes, self.numNodes), dtype='uint8')
self.Xseries[0,:] = self.X.T
self.transitions = {
'StoE': {'currentState':self.S, 'newState':self.E},
'EtoI': {'currentState':self.E, 'newState':self.I},
'ItoR': {'currentState':self.I, 'newState':self.R},
'ItoF': {'currentState':self.I, 'newState':self.F},
'RtoS': {'currentState':self.R, 'newState':self.S},
'EtoDE': {'currentState':self.E, 'newState':self.D_E},
'ItoDI': {'currentState':self.I, 'newState':self.D_I},
'DEtoDI': {'currentState':self.D_E, 'newState':self.D_I},
'DItoR': {'currentState':self.D_I, 'newState':self.R},
'DItoF': {'currentState':self.D_I, 'newState':self.F},
'_toS': {'currentState':True, 'newState':self.S},
}
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Initialize node subgroup data series:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
self.nodeGroupData = None
if(node_groups):
self.nodeGroupData = {}
for groupName, nodeList in node_groups.items():
self.nodeGroupData[groupName] = {'nodes': numpy.array(nodeList),
'mask': numpy.isin(range(self.numNodes), nodeList).reshape((self.numNodes,1))}
self.nodeGroupData[groupName]['numS'] = numpy.zeros(5*self.numNodes)
self.nodeGroupData[groupName]['numE'] = numpy.zeros(5*self.numNodes)
self.nodeGroupData[groupName]['numI'] = numpy.zeros(5*self.numNodes)
self.nodeGroupData[groupName]['numD_E'] = numpy.zeros(5*self.numNodes)
self.nodeGroupData[groupName]['numD_I'] = numpy.zeros(5*self.numNodes)
self.nodeGroupData[groupName]['numR'] = numpy.zeros(5*self.numNodes)
self.nodeGroupData[groupName]['numF'] = numpy.zeros(5*self.numNodes)
self.nodeGroupData[groupName]['N'] = numpy.zeros(5*self.numNodes)
self.nodeGroupData[groupName]['numS'][0] = numpy.count_nonzero(self.nodeGroupData[groupName]['mask']*self.X==self.S)
self.nodeGroupData[groupName]['numE'][0] = numpy.count_nonzero(self.nodeGroupData[groupName]['mask']*self.X==self.E)
self.nodeGroupData[groupName]['numI'][0] = numpy.count_nonzero(self.nodeGroupData[groupName]['mask']*self.X==self.I)
self.nodeGroupData[groupName]['numD_E'][0] = numpy.count_nonzero(self.nodeGroupData[groupName]['mask']*self.X==self.D_E)
self.nodeGroupData[groupName]['numD_I'][0] = numpy.count_nonzero(self.nodeGroupData[groupName]['mask']*self.X==self.D_I)
self.nodeGroupData[groupName]['numR'][0] = numpy.count_nonzero(self.nodeGroupData[groupName]['mask']*self.X==self.R)
self.nodeGroupData[groupName]['numF'][0] = numpy.count_nonzero(self.nodeGroupData[groupName]['mask']*self.X==self.F)
self.nodeGroupData[groupName]['N'][0] = self.nodeGroupData[groupName]['numS'][0] + self.nodeGroupData[groupName]['numE'][0] + self.nodeGroupData[groupName]['numI'][0] + self.nodeGroupData[groupName]['numD_E'][0] + self.nodeGroupData[groupName]['numD_I'][0] + self.nodeGroupData[groupName]['numR'][0]
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def update_parameters(self):
import time
updatestart = time.time()
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Model parameters:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
self.beta = numpy.array(self.parameters['beta']).reshape((self.numNodes, 1)) if isinstance(self.parameters['beta'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['beta'], shape=(self.numNodes,1))
self.sigma = numpy.array(self.parameters['sigma']).reshape((self.numNodes, 1)) if isinstance(self.parameters['sigma'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['sigma'], shape=(self.numNodes,1))
self.gamma = numpy.array(self.parameters['gamma']).reshape((self.numNodes, 1)) if isinstance(self.parameters['gamma'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['gamma'], shape=(self.numNodes,1))
self.xi = numpy.array(self.parameters['xi']).reshape((self.numNodes, 1)) if isinstance(self.parameters['xi'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['xi'], shape=(self.numNodes,1))
self.mu_I = numpy.array(self.parameters['mu_I']).reshape((self.numNodes, 1)) if isinstance(self.parameters['mu_I'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['mu_I'], shape=(self.numNodes,1))
self.mu_0 = numpy.array(self.parameters['mu_0']).reshape((self.numNodes, 1)) if isinstance(self.parameters['mu_0'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['mu_0'], shape=(self.numNodes,1))
self.nu = numpy.array(self.parameters['nu']).reshape((self.numNodes, 1)) if isinstance(self.parameters['nu'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['nu'], shape=(self.numNodes,1))
self.p = numpy.array(self.parameters['p']).reshape((self.numNodes, 1)) if isinstance(self.parameters['p'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['p'], shape=(self.numNodes,1))
# Testing-related parameters:
self.beta_D = (numpy.array(self.parameters['beta_D']).reshape((self.numNodes, 1)) if isinstance(self.parameters['beta_D'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['beta_D'], shape=(self.numNodes,1))) if self.parameters['beta_D'] is not None else self.beta
self.sigma_D = (numpy.array(self.parameters['sigma_D']).reshape((self.numNodes, 1)) if isinstance(self.parameters['sigma_D'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['sigma_D'], shape=(self.numNodes,1))) if self.parameters['sigma_D'] is not None else self.sigma
self.gamma_D = (numpy.array(self.parameters['gamma_D']).reshape((self.numNodes, 1)) if isinstance(self.parameters['gamma_D'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['gamma_D'], shape=(self.numNodes,1))) if self.parameters['gamma_D'] is not None else self.gamma
self.mu_D = (numpy.array(self.parameters['mu_D']).reshape((self.numNodes, 1)) if isinstance(self.parameters['mu_D'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['mu_D'], shape=(self.numNodes,1))) if self.parameters['mu_D'] is not None else self.mu_I
self.theta_E = numpy.array(self.parameters['theta_E']).reshape((self.numNodes, 1)) if isinstance(self.parameters['theta_E'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['theta_E'], shape=(self.numNodes,1))
self.theta_I = numpy.array(self.parameters['theta_I']).reshape((self.numNodes, 1)) if isinstance(self.parameters['theta_I'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['theta_I'], shape=(self.numNodes,1))
self.phi_E = numpy.array(self.parameters['phi_E']).reshape((self.numNodes, 1)) if isinstance(self.parameters['phi_E'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['phi_E'], shape=(self.numNodes,1))
self.phi_I = numpy.array(self.parameters['phi_I']).reshape((self.numNodes, 1)) if isinstance(self.parameters['phi_I'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['phi_I'], shape=(self.numNodes,1))
self.psi_E = numpy.array(self.parameters['psi_E']).reshape((self.numNodes, 1)) if isinstance(self.parameters['psi_E'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['psi_E'], shape=(self.numNodes,1))
self.psi_I = numpy.array(self.parameters['psi_I']).reshape((self.numNodes, 1)) if isinstance(self.parameters['psi_I'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['psi_I'], shape=(self.numNodes,1))
self.q = numpy.array(self.parameters['q']).reshape((self.numNodes, 1)) if isinstance(self.parameters['q'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['q'], shape=(self.numNodes,1))
#Local transmission parameters:
if(self.parameters['beta_local'] is not None):
if(isinstance(self.parameters['beta_local'], (list, numpy.ndarray))):
if(isinstance(self.parameters['beta_local'], list)):
self.beta_local = numpy.array(self.parameters['beta_local'])
else: # is numpy.ndarray
self.beta_local = self.parameters['beta_local']
if(self.beta_local.ndim == 1):
self.beta_local.reshape((self.numNodes, 1))
elif(self.beta_local.ndim == 2):
self.beta_local.reshape((self.numNodes, self.numNodes))
else:
self.beta_local = numpy.full_like(self.beta, fill_value=self.parameters['beta_local'])
else:
self.beta_local = self.beta
#----------------------------------------
if(self.parameters['beta_D_local'] is not None):
if(isinstance(self.parameters['beta_D_local'], (list, numpy.ndarray))):
if(isinstance(self.parameters['beta_D_local'], list)):
self.beta_D_local = numpy.array(self.parameters['beta_D_local'])
else: # is numpy.ndarray
self.beta_D_local = self.parameters['beta_D_local']
if(self.beta_D_local.ndim == 1):
self.beta_D_local.reshape((self.numNodes, 1))
elif(self.beta_D_local.ndim == 2):
self.beta_D_local.reshape((self.numNodes, self.numNodes))
else:
self.beta_D_local = numpy.full_like(self.beta_D, fill_value=self.parameters['beta_D_local'])
else:
self.beta_D_local = self.beta_D
# Pre-multiply beta values by the adjacency matrix ("transmission weight connections")
if(self.beta_local.ndim == 1):
self.A_beta = scipy.sparse.csr_matrix.multiply(self.A, numpy.tile(self.beta_local, (1,self.numNodes))).tocsr()
elif(self.beta_local.ndim == 2):
self.A_beta = scipy.sparse.csr_matrix.multiply(self.A, self.beta_local).tocsr()
# Pre-multiply beta_D values by the quarantine adjacency matrix ("transmission weight connections")
if(self.beta_D_local.ndim == 1):
self.A_Q_beta_D = scipy.sparse.csr_matrix.multiply(self.A_Q, numpy.tile(self.beta_D_local, (1,self.numNodes))).tocsr()
elif(self.beta_D_local.ndim == 2):
self.A_Q_beta_D = scipy.sparse.csr_matrix.multiply(self.A_Q, self.beta_D_local).tocsr()
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Update scenario flags:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
self.update_scenario_flags()
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def node_degrees(self, Amat):
return Amat.sum(axis=0).reshape(self.numNodes,1) # sums of adj matrix cols
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def update_G(self, new_G):
self.G = new_G
# Adjacency matrix:
if type(new_G)==numpy.ndarray:
self.A = scipy.sparse.csr_matrix(new_G)
elif type(new_G)==networkx.classes.graph.Graph:
self.A = networkx.adj_matrix(new_G) # adj_matrix gives scipy.sparse csr_matrix
else:
raise BaseException("Input an adjacency matrix or networkx object only.")
self.numNodes = int(self.A.shape[1])
self.degree = numpy.asarray(self.node_degrees(self.A)).astype(float)
return
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def update_Q(self, new_Q):
self.Q = new_Q
# Quarantine Adjacency matrix:
if type(new_Q)==numpy.ndarray:
self.A_Q = scipy.sparse.csr_matrix(new_Q)
elif type(new_Q)==networkx.classes.graph.Graph:
self.A_Q = networkx.adj_matrix(new_Q) # adj_matrix gives scipy.sparse csr_matrix
else:
raise BaseException("Input an adjacency matrix or networkx object only.")
self.numNodes_Q = int(self.A_Q.shape[1])
self.degree_Q = numpy.asarray(self.node_degrees(self.A_Q)).astype(float)
assert(self.numNodes == self.numNodes_Q), "The normal and quarantine adjacency graphs must be of the same size."
return
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def update_scenario_flags(self):
self.testing_scenario = ( (numpy.any(self.psi_I) and (numpy.any(self.theta_I) or numpy.any(self.phi_I)))
or (numpy.any(self.psi_E) and (numpy.any(self.theta_E) or numpy.any(self.phi_E))) )
self.tracing_scenario = ( (numpy.any(self.psi_E) and numpy.any(self.phi_E))
or (numpy.any(self.psi_I) and numpy.any(self.phi_I)) )
self.vitality_scenario = (numpy.any(self.mu_0) and numpy.any(self.nu))
self.resusceptibility_scenario = (numpy.any(self.xi))
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def total_num_infections(self, t_idx=None):
if(t_idx is None):
return (self.numE[:] + self.numI[:] + self.numD_E[:] + self.numD_I[:])
else:
return (self.numE[t_idx] + self.numI[t_idx] + self.numD_E[t_idx] + self.numD_I[t_idx])
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def calc_propensities(self):
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Pre-calculate matrix multiplication terms that may be used in multiple propensity calculations,
# and check to see if their computation is necessary before doing the multiplication
transmissionTerms_I = numpy.zeros(shape=(self.numNodes,1))
if(numpy.any(self.numI[self.tidx])
and numpy.any(self.beta!=0)):
transmissionTerms_I = numpy.asarray( scipy.sparse.csr_matrix.dot(self.A_beta, self.X==self.I) )
transmissionTerms_DI = numpy.zeros(shape=(self.numNodes,1))
if(self.testing_scenario
and numpy.any(self.numD_I[self.tidx])
and numpy.any(self.beta_D)):
transmissionTerms_DI = numpy.asarray( scipy.sparse.csr_matrix.dot(self.A_Q_beta_D, self.X==self.D_I) )
numContacts_D = numpy.zeros(shape=(self.numNodes,1))
if(self.tracing_scenario
and (numpy.any(self.numD_E[self.tidx]) or numpy.any(self.numD_I[self.tidx]))):
numContacts_D = numpy.asarray( scipy.sparse.csr_matrix.dot( self.A, ((self.X==self.D_E)|(self.X==self.D_I)) ) )
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
propensities_StoE = ( self.p*((self.beta*self.numI[self.tidx] + self.q*self.beta_D*self.numD_I[self.tidx])/self.N[self.tidx])
+ (1-self.p)*numpy.divide((transmissionTerms_I + transmissionTerms_DI), self.degree, out=numpy.zeros_like(self.degree), where=self.degree!=0)
)*(self.X==self.S)
propensities_EtoI = self.sigma*(self.X==self.E)
propensities_ItoR = self.gamma*(self.X==self.I)
propensities_ItoF = self.mu_I*(self.X==self.I)
# propensities_EtoDE = ( self.theta_E + numpy.divide((self.phi_E*numContacts_D), self.degree, out=numpy.zeros_like(self.degree), where=self.degree!=0) )*self.psi_E*(self.X==self.E)
propensities_EtoDE = (self.theta_E + self.phi_E*numContacts_D)*self.psi_E*(self.X==self.E)
# propensities_ItoDI = ( self.theta_I + numpy.divide((self.phi_I*numContacts_D), self.degree, out=numpy.zeros_like(self.degree), where=self.degree!=0) )*self.psi_I*(self.X==self.I)
propensities_ItoDI = (self.theta_I + self.phi_I*numContacts_D)*self.psi_I*(self.X==self.I)
propensities_DEtoDI = self.sigma_D*(self.X==self.D_E)
propensities_DItoR = self.gamma_D*(self.X==self.D_I)
propensities_DItoF = self.mu_D*(self.X==self.D_I)
propensities_RtoS = self.xi*(self.X==self.R)
propensities__toS = self.nu*(self.X!=self.F)
propensities = numpy.hstack([propensities_StoE, propensities_EtoI,
propensities_ItoR, propensities_ItoF,
propensities_EtoDE, propensities_ItoDI, propensities_DEtoDI,
propensities_DItoR, propensities_DItoF,
propensities_RtoS, propensities__toS])
columns = ['StoE', 'EtoI', 'ItoR', 'ItoF', 'EtoDE', 'ItoDI', 'DEtoDI', 'DItoR', 'DItoF', 'RtoS', '_toS']
return propensities, columns
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def increase_data_series_length(self):
self.tseries= numpy.pad(self.tseries, [(0, 5*self.numNodes)], mode='constant', constant_values=0)
self.numS = numpy.pad(self.numS, [(0, 5*self.numNodes)], mode='constant', constant_values=0)
self.numE = numpy.pad(self.numE, [(0, 5*self.numNodes)], mode='constant', constant_values=0)
self.numI = numpy.pad(self.numI, [(0, 5*self.numNodes)], mode='constant', constant_values=0)
self.numD_E = numpy.pad(self.numD_E, [(0, 5*self.numNodes)], mode='constant', constant_values=0)
self.numD_I = numpy.pad(self.numD_I, [(0, 5*self.numNodes)], mode='constant', constant_values=0)
self.numR = numpy.pad(self.numR, [(0, 5*self.numNodes)], mode='constant', constant_values=0)
self.numF = numpy.pad(self.numF, [(0, 5*self.numNodes)], mode='constant', constant_values=0)
self.N = numpy.pad(self.N, [(0, 5*self.numNodes)], mode='constant', constant_values=0)
if(self.store_Xseries):
self.Xseries = numpy.pad(self.Xseries, [(0, 5*self.numNodes), (0,0)], mode=constant, constant_values=0)
if(self.nodeGroupData):
for groupName in self.nodeGroupData:
self.nodeGroupData[groupName]['numS'] = numpy.pad(self.nodeGroupData[groupName]['numS'], [(0, 5*self.numNodes)], mode='constant', constant_values=0)
self.nodeGroupData[groupName]['numE'] = numpy.pad(self.nodeGroupData[groupName]['numE'], [(0, 5*self.numNodes)], mode='constant', constant_values=0)
self.nodeGroupData[groupName]['numI'] = numpy.pad(self.nodeGroupData[groupName]['numI'], [(0, 5*self.numNodes)], mode='constant', constant_values=0)
self.nodeGroupData[groupName]['numD_E'] = numpy.pad(self.nodeGroupData[groupName]['numD_E'], [(0, 5*self.numNodes)], mode='constant', constant_values=0)
self.nodeGroupData[groupName]['numD_I'] = numpy.pad(self.nodeGroupData[groupName]['numD_I'], [(0, 5*self.numNodes)], mode='constant', constant_values=0)
self.nodeGroupData[groupName]['numR'] = numpy.pad(self.nodeGroupData[groupName]['numR'], [(0, 5*self.numNodes)], mode='constant', constant_values=0)
self.nodeGroupData[groupName]['numF'] = numpy.pad(self.nodeGroupData[groupName]['numF'], [(0, 5*self.numNodes)], mode='constant', constant_values=0)
self.nodeGroupData[groupName]['N'] = numpy.pad(self.nodeGroupData[groupName]['N'], [(0, 5*self.numNodes)], mode='constant', constant_values=0)
return None
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def finalize_data_series(self):
self.tseries= numpy.array(self.tseries, dtype=float)[:self.tidx+1]
self.numS = numpy.array(self.numS, dtype=float)[:self.tidx+1]
self.numE = numpy.array(self.numE, dtype=float)[:self.tidx+1]
self.numI = numpy.array(self.numI, dtype=float)[:self.tidx+1]
self.numD_E = numpy.array(self.numD_E, dtype=float)[:self.tidx+1]
self.numD_I = numpy.array(self.numD_I, dtype=float)[:self.tidx+1]
self.numR = numpy.array(self.numR, dtype=float)[:self.tidx+1]
self.numF = numpy.array(self.numF, dtype=float)[:self.tidx+1]
self.N = numpy.array(self.N, dtype=float)[:self.tidx+1]
if(self.store_Xseries):
self.Xseries = self.Xseries[:self.tidx+1, :]
if(self.nodeGroupData):
for groupName in self.nodeGroupData:
self.nodeGroupData[groupName]['numS'] = numpy.array(self.nodeGroupData[groupName]['numS'], dtype=float)[:self.tidx+1]
self.nodeGroupData[groupName]['numE'] = numpy.array(self.nodeGroupData[groupName]['numE'], dtype=float)[:self.tidx+1]
self.nodeGroupData[groupName]['numI'] = numpy.array(self.nodeGroupData[groupName]['numI'], dtype=float)[:self.tidx+1]
self.nodeGroupData[groupName]['numD_E'] = numpy.array(self.nodeGroupData[groupName]['numD_E'], dtype=float)[:self.tidx+1]
self.nodeGroupData[groupName]['numD_I'] = numpy.array(self.nodeGroupData[groupName]['numD_I'], dtype=float)[:self.tidx+1]
self.nodeGroupData[groupName]['numR'] = numpy.array(self.nodeGroupData[groupName]['numR'], dtype=float)[:self.tidx+1]
self.nodeGroupData[groupName]['numF'] = numpy.array(self.nodeGroupData[groupName]['numF'], dtype=float)[:self.tidx+1]
self.nodeGroupData[groupName]['N'] = numpy.array(self.nodeGroupData[groupName]['N'], dtype=float)[:self.tidx+1]
return None
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def run_iteration(self):
if(self.tidx >= len(self.tseries)-1):
# Room has run out in the timeseries storage arrays; double the size of these arrays:
self.increase_data_series_length()
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# 1. Generate 2 random numbers uniformly distributed in (0,1)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
r1 = numpy.random.rand()
r2 = numpy.random.rand()
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# 2. Calculate propensities
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
propensities, transitionTypes = self.calc_propensities()
# Terminate when probability of all events is 0:
if(propensities.sum() <= 0.0):
self.finalize_data_series()
return False
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# 3. Calculate alpha
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
propensities_flat = propensities.ravel(order='F')
cumsum = propensities_flat.cumsum()
alpha = propensities_flat.sum()
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# 4. Compute the time until the next event takes place
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
tau = (1/alpha)*numpy.log(float(1/r1))
self.t += tau
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# 5. Compute which event takes place
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
transitionIdx = numpy.searchsorted(cumsum,r2*alpha)
transitionNode = transitionIdx % self.numNodes
transitionType = transitionTypes[ int(transitionIdx/self.numNodes) ]
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# 6. Update node states and data series
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
assert(self.X[transitionNode] == self.transitions[transitionType]['currentState'] and self.X[transitionNode]!=self.F), "Assertion error: Node "+str(transitionNode)+" has unexpected current state "+str(self.X[transitionNode])+" given the intended transition of "+str(transitionType)+"."
self.X[transitionNode] = self.transitions[transitionType]['newState']
self.tidx += 1
self.tseries[self.tidx] = self.t
self.numS[self.tidx] = numpy.clip(numpy.count_nonzero(self.X==self.S), a_min=0, a_max=self.numNodes)
self.numE[self.tidx] = numpy.clip(numpy.count_nonzero(self.X==self.E), a_min=0, a_max=self.numNodes)
self.numI[self.tidx] = numpy.clip(numpy.count_nonzero(self.X==self.I), a_min=0, a_max=self.numNodes)
self.numD_E[self.tidx] = numpy.clip(numpy.count_nonzero(self.X==self.D_E), a_min=0, a_max=self.numNodes)
self.numD_I[self.tidx] = numpy.clip(numpy.count_nonzero(self.X==self.D_I), a_min=0, a_max=self.numNodes)
self.numR[self.tidx] = numpy.clip(numpy.count_nonzero(self.X==self.R), a_min=0, a_max=self.numNodes)
self.numF[self.tidx] = numpy.clip(numpy.count_nonzero(self.X==self.F), a_min=0, a_max=self.numNodes)
self.N[self.tidx] = numpy.clip((self.numS[self.tidx] + self.numE[self.tidx] + self.numI[self.tidx] + self.numD_E[self.tidx] + self.numD_I[self.tidx] + self.numR[self.tidx]), a_min=0, a_max=self.numNodes)
if(self.store_Xseries):
self.Xseries[self.tidx,:] = self.X.T
if(self.nodeGroupData):
for groupName in self.nodeGroupData:
self.nodeGroupData[groupName]['numS'][self.tidx] = numpy.count_nonzero(self.nodeGroupData[groupName]['mask']*self.X==self.S)
self.nodeGroupData[groupName]['numE'][self.tidx] = numpy.count_nonzero(self.nodeGroupData[groupName]['mask']*self.X==self.E)
self.nodeGroupData[groupName]['numI'][self.tidx] = numpy.count_nonzero(self.nodeGroupData[groupName]['mask']*self.X==self.I)
self.nodeGroupData[groupName]['numD_E'][self.tidx] = numpy.count_nonzero(self.nodeGroupData[groupName]['mask']*self.X==self.D_E)
self.nodeGroupData[groupName]['numD_I'][self.tidx] = numpy.count_nonzero(self.nodeGroupData[groupName]['mask']*self.X==self.D_I)
self.nodeGroupData[groupName]['numR'][self.tidx] = numpy.count_nonzero(self.nodeGroupData[groupName]['mask']*self.X==self.R)
self.nodeGroupData[groupName]['numF'][self.tidx] = numpy.count_nonzero(self.nodeGroupData[groupName]['mask']*self.X==self.F)
self.nodeGroupData[groupName]['N'][self.tidx] = numpy.clip((self.nodeGroupData[groupName]['numS'][0] + self.nodeGroupData[groupName]['numE'][0] + self.nodeGroupData[groupName]['numI'][0] + self.nodeGroupData[groupName]['numD_E'][0] + self.nodeGroupData[groupName]['numD_I'][0] + self.nodeGroupData[groupName]['numR'][0]), a_min=0, a_max=self.numNodes)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Terminate if tmax reached or num infectious and num exposed is 0:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if(self.t >= self.tmax or (self.numI[self.tidx]<1 and self.numE[self.tidx]<1 and self.numD_E[self.tidx]<1 and self.numD_I[self.tidx]<1)):
self.finalize_data_series()
return False
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
return True
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def run(self, T, checkpoints=None, print_interval=10, verbose='t'):
if(T>0):
self.tmax += T
else:
return False
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Pre-process checkpoint values:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if(checkpoints):
numCheckpoints = len(checkpoints['t'])
for chkpt_param, chkpt_values in checkpoints.items():
assert(isinstance(chkpt_values, (list, numpy.ndarray)) and len(chkpt_values)==numCheckpoints), "Expecting a list of values with length equal to number of checkpoint times ("+str(numCheckpoints)+") for each checkpoint parameter."
checkpointIdx = numpy.searchsorted(checkpoints['t'], self.t) # Finds 1st index in list greater than given val
if(checkpointIdx >= numCheckpoints):
# We are out of checkpoints, stop checking them:
checkpoints = None
else:
checkpointTime = checkpoints['t'][checkpointIdx]
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
# Run the simulation loop:
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
print_reset = True
running = True
while running:
running = self.run_iteration()
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Handle checkpoints if applicable:
if(checkpoints):
if(self.t >= checkpointTime):
if(verbose is not False):
print("[Checkpoint: Updating parameters]")
# A checkpoint has been reached, update param values:
if('G' in list(checkpoints.keys())):
self.update_G(checkpoints['G'][checkpointIdx])
if('Q' in list(checkpoints.keys())):
self.update_Q(checkpoints['Q'][checkpointIdx])
for param in list(self.parameters.keys()):
if(param in list(checkpoints.keys())):
self.parameters.update({param: checkpoints[param][checkpointIdx]})
# Update parameter data structures and scenario flags:
self.update_parameters()
# Update the next checkpoint time:
checkpointIdx = numpy.searchsorted(checkpoints['t'], self.t) # Finds 1st index in list greater than given val
if(checkpointIdx >= numCheckpoints):
# We are out of checkpoints, stop checking them:
checkpoints = None
else:
checkpointTime = checkpoints['t'][checkpointIdx]
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if(print_interval):
if(print_reset and (int(self.t) % print_interval == 0)):
if(verbose=="t"):
print("t = %.2f" % self.t)
if(verbose==True):
print("t = %.2f" % self.t)
print("\t S = " + str(self.numS[self.tidx]))
print("\t E = " + str(self.numE[self.tidx]))
print("\t I = " + str(self.numI[self.tidx]))
print("\t D_E = " + str(self.numD_E[self.tidx]))
print("\t D_I = " + str(self.numD_I[self.tidx]))
print("\t R = " + str(self.numR[self.tidx]))
print("\t F = " + str(self.numF[self.tidx]))
print_reset = False
elif(not print_reset and (int(self.t) % 10 != 0)):
print_reset = True
return True
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def plot(self, ax=None, plot_S='line', plot_E='line', plot_I='line',plot_R='line', plot_F='line',
plot_D_E='line', plot_D_I='line', combine_D=True,
color_S='tab:green', color_E='orange', color_I='crimson', color_R='tab:blue', color_F='black',
color_D_E='mediumorchid', color_D_I='mediumorchid', color_reference='#E0E0E0',
dashed_reference_results=None, dashed_reference_label='reference',
shaded_reference_results=None, shaded_reference_label='reference',
vlines=[], vline_colors=[], vline_styles=[], vline_labels=[],
ylim=None, xlim=None, legend=True, title=None, side_title=None, plot_percentages=True):
import matplotlib.pyplot as pyplot
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Create an Axes object if None provided:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if(not ax):
fig, ax = pyplot.subplots()
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Prepare data series to be plotted:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Fseries = self.numF/self.numNodes if plot_percentages else self.numF
Eseries = self.numE/self.numNodes if plot_percentages else self.numE
Dseries = (self.numD_E+self.numD_I)/self.numNodes if plot_percentages else (self.numD_E+self.numD_I)
D_Eseries = self.numD_E/self.numNodes if plot_percentages else self.numD_E
D_Iseries = self.numD_I/self.numNodes if plot_percentages else self.numD_I
Iseries = self.numI/self.numNodes if plot_percentages else self.numI
Rseries = self.numR/self.numNodes if plot_percentages else self.numR
Sseries = self.numS/self.numNodes if plot_percentages else self.numS
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Draw the reference data:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if(dashed_reference_results):
dashedReference_tseries = dashed_reference_results.tseries[::int(self.numNodes/100)]
dashedReference_IDEstack = (dashed_reference_results.numI + dashed_reference_results.numD_I + dashed_reference_results.numD_E + dashed_reference_results.numE)[::int(self.numNodes/100)] / (self.numNodes if plot_percentages else 1)
ax.plot(dashedReference_tseries, dashedReference_IDEstack, color='#E0E0E0', linestyle='--', label='$I+D+E$ ('+dashed_reference_label+')', zorder=0)
if(shaded_reference_results):
shadedReference_tseries = shaded_reference_results.tseries
shadedReference_IDEstack = (shaded_reference_results.numI + shaded_reference_results.numD_I + shaded_reference_results.numD_E + shaded_reference_results.numE) / (self.numNodes if plot_percentages else 1)
ax.fill_between(shaded_reference_results.tseries, shadedReference_IDEstack, 0, color='#EFEFEF', label='$I+D+E$ ('+shaded_reference_label+')', zorder=0)
ax.plot(shaded_reference_results.tseries, shadedReference_IDEstack, color='#E0E0E0', zorder=1)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Draw the stacked variables:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
topstack = numpy.zeros_like(self.tseries)
if(any(Fseries) and plot_F=='stacked'):
ax.fill_between(numpy.ma.masked_where(Fseries<=0, self.tseries), numpy.ma.masked_where(Fseries<=0, topstack+Fseries), topstack, color=color_F, alpha=0.5, label='$F$', zorder=2)
ax.plot( numpy.ma.masked_where(Fseries<=0, self.tseries), numpy.ma.masked_where(Fseries<=0, topstack+Fseries), color=color_F, zorder=3)
topstack = topstack+Fseries
if(any(Eseries) and plot_E=='stacked'):
ax.fill_between(numpy.ma.masked_where(Eseries<=0, self.tseries), numpy.ma.masked_where(Eseries<=0, topstack+Eseries), topstack, color=color_E, alpha=0.5, label='$E$', zorder=2)
ax.plot( numpy.ma.masked_where(Eseries<=0, self.tseries), numpy.ma.masked_where(Eseries<=0, topstack+Eseries), color=color_E, zorder=3)
topstack = topstack+Eseries
if(combine_D and plot_D_E=='stacked' and plot_D_I=='stacked'):
ax.fill_between(numpy.ma.masked_where(Dseries<=0, self.tseries), numpy.ma.masked_where(Dseries<=0, topstack+Dseries), topstack, color=color_D_E, alpha=0.5, label='$D_{all}$', zorder=2)
ax.plot( numpy.ma.masked_where(Dseries<=0, self.tseries), numpy.ma.masked_where(Dseries<=0, topstack+Dseries), color=color_D_E, zorder=3)
topstack = topstack+Dseries
else:
if(any(D_Eseries) and plot_D_E=='stacked'):
ax.fill_between(numpy.ma.masked_where(D_Eseries<=0, self.tseries), numpy.ma.masked_where(D_Eseries<=0, topstack+D_Eseries), topstack, color=color_D_E, alpha=0.5, label='$D_E$', zorder=2)
ax.plot( numpy.ma.masked_where(D_Eseries<=0, self.tseries), numpy.ma.masked_where(D_Eseries<=0, topstack+D_Eseries), color=color_D_E, zorder=3)
topstack = topstack+D_Eseries
if(any(D_Iseries) and plot_D_I=='stacked'):
ax.fill_between(numpy.ma.masked_where(D_Iseries<=0, self.tseries), numpy.ma.masked_where(D_Iseries<=0, topstack+D_Iseries), topstack, color=color_D_I, alpha=0.5, label='$D_I$', zorder=2)
ax.plot( numpy.ma.masked_where(D_Iseries<=0, self.tseries), numpy.ma.masked_where(D_Iseries<=0, topstack+D_Iseries), color=color_D_I, zorder=3)
topstack = topstack+D_Iseries
if(any(Iseries) and plot_I=='stacked'):
ax.fill_between(numpy.ma.masked_where(Iseries<=0, self.tseries), numpy.ma.masked_where(Iseries<=0, topstack+Iseries), topstack, color=color_I, alpha=0.5, label='$I$', zorder=2)
ax.plot( numpy.ma.masked_where(Iseries<=0, self.tseries), numpy.ma.masked_where(Iseries<=0, topstack+Iseries), color=color_I, zorder=3)
topstack = topstack+Iseries
if(any(Rseries) and plot_R=='stacked'):
ax.fill_between(numpy.ma.masked_where(Rseries<=0, self.tseries), numpy.ma.masked_where(Rseries<=0, topstack+Rseries), topstack, color=color_R, alpha=0.5, label='$R$', zorder=2)
ax.plot( numpy.ma.masked_where(Rseries<=0, self.tseries), numpy.ma.masked_where(Rseries<=0, topstack+Rseries), color=color_R, zorder=3)
topstack = topstack+Rseries
if(any(Sseries) and plot_S=='stacked'):
ax.fill_between(numpy.ma.masked_where(Sseries<=0, self.tseries), numpy.ma.masked_where(Sseries<=0, topstack+Sseries), topstack, color=color_S, alpha=0.5, label='$S$', zorder=2)
ax.plot( numpy.ma.masked_where(Sseries<=0, self.tseries), numpy.ma.masked_where(Sseries<=0, topstack+Sseries), color=color_S, zorder=3)
topstack = topstack+Sseries
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Draw the shaded variables:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if(any(Fseries) and plot_F=='shaded'):
ax.fill_between(numpy.ma.masked_where(Fseries<=0, self.tseries), numpy.ma.masked_where(Fseries<=0, Fseries), 0, color=color_F, alpha=0.5, label='$F$', zorder=4)
ax.plot( numpy.ma.masked_where(Fseries<=0, self.tseries), numpy.ma.masked_where(Fseries<=0, Fseries), color=color_F, zorder=5)
if(any(Eseries) and plot_E=='shaded'):
ax.fill_between(numpy.ma.masked_where(Eseries<=0, self.tseries), numpy.ma.masked_where(Eseries<=0, Eseries), 0, color=color_E, alpha=0.5, label='$E$', zorder=4)
ax.plot( numpy.ma.masked_where(Eseries<=0, self.tseries), numpy.ma.masked_where(Eseries<=0, Eseries), color=color_E, zorder=5)
if(combine_D and (any(Dseries) and plot_D_E=='shaded' and plot_D_I=='shaded')):
ax.fill_between(numpy.ma.masked_where(Dseries<=0, self.tseries), numpy.ma.masked_where(Dseries<=0, Dseries), 0, color=color_D_E, alpha=0.5, label='$D_{all}$', zorder=4)
ax.plot( numpy.ma.masked_where(Dseries<=0, self.tseries), numpy.ma.masked_where(Dseries<=0, Dseries), color=color_D_E, zorder=5)
else:
if(any(D_Eseries) and plot_D_E=='shaded'):
ax.fill_between(numpy.ma.masked_where(D_Eseries<=0, self.tseries), numpy.ma.masked_where(D_Eseries<=0, D_Eseries), 0, color=color_D_E, alpha=0.5, label='$D_E$', zorder=4)
ax.plot( numpy.ma.masked_where(D_Eseries<=0, self.tseries), numpy.ma.masked_where(D_Eseries<=0, D_Eseries), color=color_D_E, zorder=5)
if(any(D_Iseries) and plot_D_I=='shaded'):
ax.fill_between(numpy.ma.masked_where(D_Iseries<=0, self.tseries), numpy.ma.masked_where(D_Iseries<=0, D_Iseries), 0, color=color_D_I, alpha=0.5, label='$D_I$', zorder=4)
ax.plot( numpy.ma.masked_where(D_Iseries<=0, self.tseries), numpy.ma.masked_where(D_Iseries<=0, D_Iseries), color=color_D_I, zorder=5)
if(any(Iseries) and plot_I=='shaded'):
ax.fill_between(numpy.ma.masked_where(Iseries<=0, self.tseries), numpy.ma.masked_where(Iseries<=0, Iseries), 0, color=color_I, alpha=0.5, label='$I$', zorder=4)
ax.plot( numpy.ma.masked_where(Iseries<=0, self.tseries), numpy.ma.masked_where(Iseries<=0, Iseries), color=color_I, zorder=5)
if(any(Sseries) and plot_S=='shaded'):
ax.fill_between(numpy.ma.masked_where(Sseries<=0, self.tseries), numpy.ma.masked_where(Sseries<=0, Sseries), 0, color=color_S, alpha=0.5, label='$S$', zorder=4)
ax.plot( numpy.ma.masked_where(Sseries<=0, self.tseries), numpy.ma.masked_where(Sseries<=0, Sseries), color=color_S, zorder=5)
if(any(Rseries) and plot_R=='shaded'):
ax.fill_between(numpy.ma.masked_where(Rseries<=0, self.tseries), numpy.ma.masked_where(Rseries<=0, Rseries), 0, color=color_R, alpha=0.5, label='$R$', zorder=4)
ax.plot( numpy.ma.masked_where(Rseries<=0, self.tseries), numpy.ma.masked_where(Rseries<=0, Rseries), color=color_R, zorder=5)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Draw the line variables:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if(any(Fseries) and plot_F=='line'):
ax.plot(numpy.ma.masked_where(Fseries<=0, self.tseries), numpy.ma.masked_where(Fseries<=0, Fseries), color=color_F, label='$F$', zorder=6)
if(any(Eseries) and plot_E=='line'):
ax.plot(numpy.ma.masked_where(Eseries<=0, self.tseries), numpy.ma.masked_where(Eseries<=0, Eseries), color=color_E, label='$E$', zorder=6)
if(combine_D and (any(Dseries) and plot_D_E=='line' and plot_D_I=='line')):
ax.plot(numpy.ma.masked_where(Dseries<=0, self.tseries), numpy.ma.masked_where(Dseries<=0, Dseries), color=color_D_E, label='$D_{all}$', zorder=6)
else:
if(any(D_Eseries) and plot_D_E=='line'):
ax.plot(numpy.ma.masked_where(D_Eseries<=0, self.tseries), numpy.ma.masked_where(D_Eseries<=0, D_Eseries), color=color_D_E, label='$D_E$', zorder=6)
if(any(D_Iseries) and plot_D_I=='line'):
ax.plot(numpy.ma.masked_where(D_Iseries<=0, self.tseries), numpy.ma.masked_where(D_Iseries<=0, D_Iseries), color=color_D_I, label='$D_I$', zorder=6)
if(any(Iseries) and plot_I=='line'):
ax.plot(numpy.ma.masked_where(Iseries<=0, self.tseries), numpy.ma.masked_where(Iseries<=0, Iseries), color=color_I, label='$I$', zorder=6)
if(any(Sseries) and plot_S=='line'):
ax.plot(numpy.ma.masked_where(Sseries<=0, self.tseries), numpy.ma.masked_where(Sseries<=0, Sseries), color=color_S, label='$S$', zorder=6)
if(any(Rseries) and plot_R=='line'):
ax.plot(numpy.ma.masked_where(Rseries<=0, self.tseries), numpy.ma.masked_where(Rseries<=0, Rseries), color=color_R, label='$R$', zorder=6)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Draw the vertical line annotations:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if(len(vlines)>0 and len(vline_colors)==0):
vline_colors = ['gray']*len(vlines)
if(len(vlines)>0 and len(vline_labels)==0):
vline_labels = [None]*len(vlines)
if(len(vlines)>0 and len(vline_styles)==0):
vline_styles = [':']*len(vlines)
for vline_x, vline_color, vline_style, vline_label in zip(vlines, vline_colors, vline_styles, vline_labels):
if(vline_x is not None):
ax.axvline(x=vline_x, color=vline_color, linestyle=vline_style, alpha=1, label=vline_label)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Draw the plot labels:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
ax.set_xlabel('days')
ax.set_ylabel('percent of population' if plot_percentages else 'number of individuals')
ax.set_xlim(0, (max(self.tseries) if not xlim else xlim))
ax.set_ylim(0, ylim)
if(plot_percentages):
ax.set_yticklabels(['{:,.0%}'.format(y) for y in ax.get_yticks()])
if(legend):
legend_handles, legend_labels = ax.get_legend_handles_labels()
ax.legend(legend_handles[::-1], legend_labels[::-1], loc='upper right', facecolor='white', edgecolor='none', framealpha=0.9, prop={'size': 8})
if(title):
ax.set_title(title, size=12)
if(side_title):
ax.annotate(side_title, (0, 0.5), xytext=(-45, 0), ha='right', va='center',
size=12, rotation=90, xycoords='axes fraction', textcoords='offset points')
return ax
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def figure_basic(self, plot_S='line', plot_E='line', plot_I='line',plot_R='line', plot_F='line',
plot_D_E='line', plot_D_I='line', combine_D=True,
color_S='tab:green', color_E='orange', color_I='crimson', color_R='tab:blue', color_F='black',
color_D_E='mediumorchid', color_D_I='mediumorchid', color_reference='#E0E0E0',
dashed_reference_results=None, dashed_reference_label='reference',
shaded_reference_results=None, shaded_reference_label='reference',
vlines=[], vline_colors=[], vline_styles=[], vline_labels=[],
ylim=None, xlim=None, legend=True, title=None, side_title=None, plot_percentages=True,
figsize=(12,8), use_seaborn=True, show=True):
import matplotlib.pyplot as pyplot
fig, ax = pyplot.subplots(figsize=figsize)
if(use_seaborn):
import seaborn
seaborn.set_style('ticks')
seaborn.despine()
self.plot(ax=ax, plot_S=plot_S, plot_E=plot_E, plot_I=plot_I,plot_R=plot_R, plot_F=plot_F,
plot_D_E=plot_D_E, plot_D_I=plot_D_I, combine_D=combine_D,
color_S=color_S, color_E=color_E, color_I=color_I, color_R=color_R, color_F=color_F,
color_D_E=color_D_E, color_D_I=color_D_I, color_reference=color_reference,
dashed_reference_results=dashed_reference_results, dashed_reference_label=dashed_reference_label,
shaded_reference_results=shaded_reference_results, shaded_reference_label=shaded_reference_label,
vlines=vlines, vline_colors=vline_colors, vline_styles=vline_styles, vline_labels=vline_labels,
ylim=ylim, xlim=xlim, legend=legend, title=title, side_title=side_title, plot_percentages=plot_percentages)
if(show):
pyplot.show()
return fig, ax
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def figure_infections(self, plot_S=False, plot_E='stacked', plot_I='stacked',plot_R=False, plot_F=False,
plot_D_E='stacked', plot_D_I='stacked', combine_D=True,
color_S='tab:green', color_E='orange', color_I='crimson', color_R='tab:blue', color_F='black',
color_D_E='mediumorchid', color_D_I='mediumorchid', color_reference='#E0E0E0',
dashed_reference_results=None, dashed_reference_label='reference',
shaded_reference_results=None, shaded_reference_label='reference',
vlines=[], vline_colors=[], vline_styles=[], vline_labels=[],
ylim=None, xlim=None, legend=True, title=None, side_title=None, plot_percentages=True,
figsize=(12,8), use_seaborn=True, show=True):
import matplotlib.pyplot as pyplot
fig, ax = pyplot.subplots(figsize=figsize)
if(use_seaborn):
import seaborn
seaborn.set_style('ticks')
seaborn.despine()
self.plot(ax=ax, plot_S=plot_S, plot_E=plot_E, plot_I=plot_I,plot_R=plot_R, plot_F=plot_F,
plot_D_E=plot_D_E, plot_D_I=plot_D_I, combine_D=combine_D,
color_S=color_S, color_E=color_E, color_I=color_I, color_R=color_R, color_F=color_F,
color_D_E=color_D_E, color_D_I=color_D_I, color_reference=color_reference,
dashed_reference_results=dashed_reference_results, dashed_reference_label=dashed_reference_label,
shaded_reference_results=shaded_reference_results, shaded_reference_label=shaded_reference_label,
vlines=vlines, vline_colors=vline_colors, vline_styles=vline_styles, vline_labels=vline_labels,
ylim=ylim, xlim=xlim, legend=legend, title=title, side_title=side_title, plot_percentages=plot_percentages)
if(show):
pyplot.show()
return fig, ax
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
#%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
class SymptomaticSEIRSNetworkModel():
"""
A class to simulate the SEIRS Stochastic Network Model
with Symptom Presentation Compartments
===================================================
Params:
G Network adjacency matrix (numpy array) or Networkx graph object.
beta Rate of transmission (global interactions)
beta_local Rate(s) of transmission between adjacent individuals (optional)
beta_A Rate of transmission (global interactions)
beta_A_local Rate(s) of transmission between adjacent individuals (optional)
sigma Rate of progression to infectious state (inverse of latent period)
lamda Rate of progression to infectious (a)symptomatic state (inverse of prodromal period)
eta Rate of progression to hospitalized state (inverse of onset-to-admission period)
gamma Rate of recovery for non-hospitalized symptomatic individuals (inverse of symptomatic infectious period)
gamma_A Rate of recovery for asymptomatic individuals (inverse of asymptomatic infectious period)
gamma_H Rate of recovery for hospitalized symptomatic individuals (inverse of hospitalized infectious period)
mu_H Rate of death for hospitalized individuals (inverse of admission-to-death period)
xi Rate of re-susceptibility (upon recovery)
mu_0 Rate of baseline death
nu Rate of baseline birth
a Probability of an infected individual remaining asymptomatic
h Probability of a symptomatic individual being hospitalized
f Probability of death for hospitalized individuals (case fatality rate)
p Probability of individuals interacting with global population
Q Quarantine adjacency matrix (numpy array) or Networkx graph object.
beta_D Rate of transmission for individuals with detected infections (global interactions)
beta_D_local Rate(s) of transmission (exposure) for adjacent individuals with detected infections (optional)
sigma_D Rate of progression to infectious state for individuals with detected infections
lamda_D Rate of progression to infectious (a)symptomatic state for individuals with detected infections
eta_D Rate of progression to hospitalized state for individuals with detected infections
gamma_D_S Rate of recovery for non-hospitalized symptomatic individuals for individuals with detected infections
gamma_D_A Rate of recovery for asymptomatic individuals for individuals with detected infections
theta_E Rate of random testing for exposed individuals
theta_pre Rate of random testing for infectious pre-symptomatic individuals
theta_S Rate of random testing for infectious symptomatic individuals
theta_A Rate of random testing for infectious asymptomatic individuals
phi_E Rate of testing when a close contact has tested positive for exposed individuals
phi_pre Rate of testing when a close contact has tested positive for infectious pre-symptomatic individuals
phi_S Rate of testing when a close contact has tested positive for infectious symptomatic individuals
phi_A Rate of testing when a close contact has tested positive for infectious asymptomatic individuals
d_E Probability of positive test for exposed individuals
d_pre Probability of positive test for infectious pre-symptomatic individuals
d_S Probability of positive test for infectious symptomatic individuals
d_A Probability of positive test for infectious asymptomatic individuals
q Probability of individuals with detected infection interacting with global population
initE Initial number of exposed individuals
initI_pre Initial number of infectious pre-symptomatic individuals
initI_S Initial number of infectious symptomatic individuals
initI_A Initial number of infectious asymptomatic individuals
initH Initial number of hospitalized individuals
initR Initial number of recovered individuals
initF Initial number of infection-related fatalities
initD_E Initial number of detected exposed individuals
initD_pre Initial number of detected infectious pre-symptomatic individuals
initD_S Initial number of detected infectious symptomatic individuals
initD_A Initial number of detected infectious asymptomatic individuals
(all remaining nodes initialized susceptible)
"""
def __init__(self, G, beta, sigma, lamda, gamma,
eta=0, gamma_A=None, gamma_H=None, mu_H=0, xi=0, mu_0=0, nu=0, a=0, h=0, f=0, p=0,
beta_local=None, beta_A=None, beta_A_local=None,
Q=None, lamda_D=None, beta_D=None, beta_D_local=None, sigma_D=None, eta_D=None, gamma_D_S=None, gamma_D_A=None,
theta_E=0, theta_pre=0, theta_S=0, theta_A=0, phi_E=0, phi_pre=0, phi_S=0, phi_A=0,
d_E=1, d_pre=1, d_S=1, d_A=1, q=0,
initE=0, initI_pre=0, initI_S=0, initI_A=0, initH=0, initR=0, initF=0,
initD_E=0, initD_pre=0, initD_S=0, initD_A=0,
node_groups=None, store_Xseries=False):
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Setup Adjacency matrix:
self.update_G(G)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Setup Quarantine Adjacency matrix:
if(Q is None):
Q = G # If no Q graph is provided, use G in its place
self.update_Q(Q)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Model Parameters:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
self.parameters = { 'beta':beta, 'sigma':sigma, 'lamda':lamda, 'gamma':gamma,
'eta':eta, 'gamma_A':gamma_A, 'gamma_H':gamma_H, 'mu_H':mu_H,
'xi':xi, 'mu_0':mu_0, 'nu':nu, 'a':a, 'h':h, 'f':f, 'p':p,
'beta_local':beta_local, 'beta_A':beta_A, 'beta_A_local':beta_A_local,
'lamda_D':lamda_D, 'beta_D':beta_D, 'beta_D_local':beta_D_local, 'sigma_D':sigma_D,
'eta_D':eta_D, 'gamma_D_S':gamma_D_S, 'gamma_D_A':gamma_D_A,
'theta_E':theta_E, 'theta_pre':theta_pre, 'theta_S':theta_S, 'theta_A':theta_A,
'phi_E':phi_E, 'phi_pre':phi_pre, 'phi_S':phi_S, 'phi_A':phi_A,
'd_E':d_E, 'd_pre':d_pre, 'd_S':d_S, 'd_A':d_A, 'q':q,
'initE':initE, 'initI_pre':initI_pre, 'initI_S':initI_S, 'initI_A':initI_A,
'initH':initH, 'initR':initR, 'initF':initF,
'initD_E':initD_E, 'initD_pre':initD_pre, 'initD_S':initD_S, 'initD_A':initD_A }
self.update_parameters()
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Each node can undergo 4-6 transitions (sans vitality/re-susceptibility returns to S state),
# so there are ~numNodes*6 events/timesteps expected; initialize numNodes*6 timestep slots to start
# (will be expanded during run if needed for some reason)
self.tseries = numpy.zeros(5*self.numNodes)
self.numS = numpy.zeros(5*self.numNodes)
self.numE = numpy.zeros(5*self.numNodes)
self.numI_pre = numpy.zeros(5*self.numNodes)
self.numI_S = numpy.zeros(5*self.numNodes)
self.numI_A = numpy.zeros(5*self.numNodes)
self.numH = numpy.zeros(5*self.numNodes)
self.numR = numpy.zeros(5*self.numNodes)
self.numF = numpy.zeros(5*self.numNodes)
self.numD_E = numpy.zeros(5*self.numNodes)
self.numD_pre = numpy.zeros(5*self.numNodes)
self.numD_S = numpy.zeros(5*self.numNodes)
self.numD_A = numpy.zeros(5*self.numNodes)
self.N = numpy.zeros(5*self.numNodes)
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Initialize Timekeeping:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
self.t = 0
self.tmax = 0 # will be set when run() is called
self.tidx = 0
self.tseries[0] = 0
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Initialize Counts of inidividuals with each state:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
self.numE[0] = int(initE)
self.numI_pre[0] = int(initI_pre)
self.numI_S[0] = int(initI_S)
self.numI_A[0] = int(initI_A)
self.numH[0] = int(initH)
self.numR[0] = int(initR)
self.numF[0] = int(initF)
self.numD_E[0] = int(initD_E)
self.numD_pre[0] = int(initD_pre)
self.numD_S[0] = int(initD_S)
self.numD_A[0] = int(initD_A)
self.numS[0] = (self.numNodes - self.numE[0] - self.numI_pre[0] - self.numI_S[0] - self.numI_A[0] - self.numH[0] - self.numR[0]
- self.numD_E[0] - self.numD_pre[0] - self.numD_S[0] - self.numD_A[0] - self.numF[0])
self.N[0] = self.numNodes - self.numF[0]
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Node states:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
self.S = 1
self.E = 2
self.I_pre = 3
self.I_S = 4
self.I_A = 5
self.H = 6
self.R = 7
self.F = 8
self.D_E = 9
self.D_pre = 10
self.D_S = 11
self.D_A = 12
self.X = numpy.array( [self.S]*int(self.numS[0]) + [self.E]*int(self.numE[0])
+ [self.I_pre]*int(self.numI_pre[0]) + [self.I_S]*int(self.numI_S[0]) + [self.I_A]*int(self.numI_A[0])
+ [self.H]*int(self.numH[0]) + [self.R]*int(self.numR[0]) + [self.F]*int(self.numF[0])
+ [self.D_E]*int(self.numD_E[0]) + [self.D_pre]*int(self.numD_pre[0]) + [self.D_S]*int(self.numD_S[0]) + [self.D_A]*int(self.numD_A[0])
).reshape((self.numNodes,1))
numpy.random.shuffle(self.X)
self.store_Xseries = store_Xseries
if(store_Xseries):
self.Xseries = numpy.zeros(shape=(5*self.numNodes, self.numNodes), dtype='uint8')
self.Xseries[0,:] = self.X.T
self.transitions = {
'StoE': {'currentState':self.S, 'newState':self.E},
'EtoIPRE': {'currentState':self.E, 'newState':self.I_pre},
'EtoDE': {'currentState':self.E, 'newState':self.D_E},
'IPREtoIS': {'currentState':self.I_pre, 'newState':self.I_S},
'IPREtoIA': {'currentState':self.I_pre, 'newState':self.I_A},
'IPREtoDPRE': {'currentState':self.I_pre, 'newState':self.D_pre},
'IStoH': {'currentState':self.I_S, 'newState':self.H},
'IStoR': {'currentState':self.I_S, 'newState':self.R},
'IStoDS': {'currentState':self.I_S, 'newState':self.D_S},
'IAtoR': {'currentState':self.I_A, 'newState':self.R},
'IAtoDA': {'currentState':self.I_A, 'newState':self.D_A},
'HtoR': {'currentState':self.H, 'newState':self.R},
'HtoF': {'currentState':self.H, 'newState':self.F},
'RtoS': {'currentState':self.R, 'newState':self.S},
'DEtoDPRE': {'currentState':self.D_E, 'newState':self.D_pre},
'DPREtoDS': {'currentState':self.D_pre, 'newState':self.D_S},
'DPREtoDA': {'currentState':self.D_pre, 'newState':self.D_A},
'DStoH': {'currentState':self.D_S, 'newState':self.H},
'DStoR': {'currentState':self.D_S, 'newState':self.R},
'DAtoR': {'currentState':self.D_A, 'newState':self.R},
'_toS': {'currentState':True, 'newState':self.S},
}
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Initialize node subgroup data series:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
self.nodeGroupData = None
if(node_groups):
self.nodeGroupData = {}
for groupName, nodeList in node_groups.items():
self.nodeGroupData[groupName] = {'nodes': numpy.array(nodeList),
'mask': numpy.isin(range(self.numNodes), nodeList).reshape((self.numNodes,1))}
self.nodeGroupData[groupName]['numS'] = numpy.zeros(5*self.numNodes)
self.nodeGroupData[groupName]['numE'] = numpy.zeros(5*self.numNodes)
self.nodeGroupData[groupName]['numI_pre'] = numpy.zeros(5*self.numNodes)
self.nodeGroupData[groupName]['numI_S'] = numpy.zeros(5*self.numNodes)
self.nodeGroupData[groupName]['numI_A'] = numpy.zeros(5*self.numNodes)
self.nodeGroupData[groupName]['numH'] = numpy.zeros(5*self.numNodes)
self.nodeGroupData[groupName]['numR'] = numpy.zeros(5*self.numNodes)
self.nodeGroupData[groupName]['numF'] = numpy.zeros(5*self.numNodes)
self.nodeGroupData[groupName]['numD_E'] = numpy.zeros(5*self.numNodes)
self.nodeGroupData[groupName]['numD_pre'] = numpy.zeros(5*self.numNodes)
self.nodeGroupData[groupName]['numD_S'] = numpy.zeros(5*self.numNodes)
self.nodeGroupData[groupName]['numD_A'] = numpy.zeros(5*self.numNodes)
self.nodeGroupData[groupName]['N'] = numpy.zeros(5*self.numNodes)
self.nodeGroupData[groupName]['numS'][0] = numpy.count_nonzero(self.nodeGroupData[groupName]['mask']*self.X==self.S)
self.nodeGroupData[groupName]['numE'][0] = numpy.count_nonzero(self.nodeGroupData[groupName]['mask']*self.X==self.E)
self.nodeGroupData[groupName]['numI_pre'][0] = numpy.count_nonzero(self.nodeGroupData[groupName]['mask']*self.X==self.I_pre)
self.nodeGroupData[groupName]['numI_S'][0] = numpy.count_nonzero(self.nodeGroupData[groupName]['mask']*self.X==self.I_S)
self.nodeGroupData[groupName]['numI_A'][0] = numpy.count_nonzero(self.nodeGroupData[groupName]['mask']*self.X==self.I_A)
self.nodeGroupData[groupName]['numH'][0] = numpy.count_nonzero(self.nodeGroupData[groupName]['mask']*self.X==self.H)
self.nodeGroupData[groupName]['numR'][0] = numpy.count_nonzero(self.nodeGroupData[groupName]['mask']*self.X==self.R)
self.nodeGroupData[groupName]['numF'][0] = numpy.count_nonzero(self.nodeGroupData[groupName]['mask']*self.X==self.F)
self.nodeGroupData[groupName]['numD_E'][0] = numpy.count_nonzero(self.nodeGroupData[groupName]['mask']*self.X==self.D_E)
self.nodeGroupData[groupName]['numD_pre'][0] = numpy.count_nonzero(self.nodeGroupData[groupName]['mask']*self.X==self.D_pre)
self.nodeGroupData[groupName]['numD_I_S'][0] = numpy.count_nonzero(self.nodeGroupData[groupName]['mask']*self.X==self.D_I_S)
self.nodeGroupData[groupName]['numD_I_A'][0] = numpy.count_nonzero(self.nodeGroupData[groupName]['mask']*self.X==self.D_I_A)
self.nodeGroupData[groupName]['N'][0] = self.numNodes - self.numF[0]
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def update_parameters(self):
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Model parameters:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
self.beta = numpy.array(self.parameters['beta']).reshape((self.numNodes, 1)) if isinstance(self.parameters['beta'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['beta'], shape=(self.numNodes,1))
self.beta_A = (numpy.array(self.parameters['beta_A']).reshape((self.numNodes, 1)) if isinstance(self.parameters['beta_A'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['beta_A'], shape=(self.numNodes,1))) if self.parameters['beta_A'] is not None else self.beta
self.sigma = numpy.array(self.parameters['sigma']).reshape((self.numNodes, 1)) if isinstance(self.parameters['sigma'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['sigma'], shape=(self.numNodes,1))
self.lamda = numpy.array(self.parameters['lamda']).reshape((self.numNodes, 1)) if isinstance(self.parameters['lamda'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['lamda'], shape=(self.numNodes,1))
self.gamma = numpy.array(self.parameters['gamma']).reshape((self.numNodes, 1)) if isinstance(self.parameters['gamma'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['gamma'], shape=(self.numNodes,1))
self.eta = numpy.array(self.parameters['eta']).reshape((self.numNodes, 1)) if isinstance(self.parameters['eta'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['eta'], shape=(self.numNodes,1))
self.gamma_A = (numpy.array(self.parameters['gamma_A']).reshape((self.numNodes, 1))if isinstance(self.parameters['gamma_A'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['gamma_A'], shape=(self.numNodes,1))) if self.parameters['gamma_A'] is not None else self.gamma
self.gamma_H = (numpy.array(self.parameters['gamma_H']).reshape((self.numNodes, 1))if isinstance(self.parameters['gamma_H'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['gamma_H'], shape=(self.numNodes,1))) if self.parameters['gamma_H'] is not None else self.gamma
self.mu_H = numpy.array(self.parameters['mu_H']).reshape((self.numNodes, 1)) if isinstance(self.parameters['mu_H'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['mu_H'], shape=(self.numNodes,1))
self.xi = numpy.array(self.parameters['xi']).reshape((self.numNodes, 1)) if isinstance(self.parameters['xi'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['xi'], shape=(self.numNodes,1))
self.mu_0 = numpy.array(self.parameters['mu_0']).reshape((self.numNodes, 1)) if isinstance(self.parameters['mu_0'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['mu_0'], shape=(self.numNodes,1))
self.nu = numpy.array(self.parameters['nu']).reshape((self.numNodes, 1)) if isinstance(self.parameters['nu'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['nu'], shape=(self.numNodes,1))
self.a = numpy.array(self.parameters['a']).reshape((self.numNodes, 1)) if isinstance(self.parameters['a'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['a'], shape=(self.numNodes,1))
self.h = numpy.array(self.parameters['h']).reshape((self.numNodes, 1)) if isinstance(self.parameters['h'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['h'], shape=(self.numNodes,1))
self.f = numpy.array(self.parameters['f']).reshape((self.numNodes, 1)) if isinstance(self.parameters['f'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['f'], shape=(self.numNodes,1))
self.p = numpy.array(self.parameters['p']).reshape((self.numNodes, 1)) if isinstance(self.parameters['p'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['p'], shape=(self.numNodes,1))
# Testing-related parameters:
self.beta_D = (numpy.array(self.parameters['beta_D']).reshape((self.numNodes, 1)) if isinstance(self.parameters['beta_D'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['beta_D'], shape=(self.numNodes,1))) if self.parameters['beta_D'] is not None else self.beta
self.sigma_D = (numpy.array(self.parameters['sigma_D']).reshape((self.numNodes, 1)) if isinstance(self.parameters['sigma_D'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['sigma_D'], shape=(self.numNodes,1))) if self.parameters['sigma_D'] is not None else self.sigma
self.lamda_D = (numpy.array(self.parameters['lamda_D']).reshape((self.numNodes, 1)) if isinstance(self.parameters['lamda_D'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['lamda_D'], shape=(self.numNodes,1))) if self.parameters['lamda_D'] is not None else self.lamda
self.gamma_D_S = (numpy.array(self.parameters['gamma_D_S']).reshape((self.numNodes, 1))if isinstance(self.parameters['gamma_D_S'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['gamma_D_S'], shape=(self.numNodes,1))) if self.parameters['gamma_D_S'] is not None else self.gamma
self.gamma_D_A = (numpy.array(self.parameters['gamma_D_A']).reshape((self.numNodes, 1))if isinstance(self.parameters['gamma_D_A'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['gamma_D_A'], shape=(self.numNodes,1))) if self.parameters['gamma_D_A'] is not None else self.gamma
self.eta_D = (numpy.array(self.parameters['eta_D']).reshape((self.numNodes, 1)) if isinstance(self.parameters['eta_D'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['eta_D'], shape=(self.numNodes,1))) if self.parameters['eta_D'] is not None else self.eta
self.theta_E = numpy.array(self.parameters['theta_E']).reshape((self.numNodes, 1)) if isinstance(self.parameters['theta_E'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['theta_E'], shape=(self.numNodes,1))
self.theta_pre = numpy.array(self.parameters['theta_pre']).reshape((self.numNodes, 1)) if isinstance(self.parameters['theta_pre'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['theta_pre'], shape=(self.numNodes,1))
self.theta_S = numpy.array(self.parameters['theta_S']).reshape((self.numNodes, 1)) if isinstance(self.parameters['theta_S'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['theta_S'], shape=(self.numNodes,1))
self.theta_A = numpy.array(self.parameters['theta_A']).reshape((self.numNodes, 1)) if isinstance(self.parameters['theta_A'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['theta_A'], shape=(self.numNodes,1))
self.phi_E = numpy.array(self.parameters['phi_E']).reshape((self.numNodes, 1)) if isinstance(self.parameters['phi_E'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['phi_E'], shape=(self.numNodes,1))
self.phi_pre = numpy.array(self.parameters['phi_pre']).reshape((self.numNodes, 1)) if isinstance(self.parameters['phi_pre'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['phi_pre'], shape=(self.numNodes,1))
self.phi_S = numpy.array(self.parameters['phi_S']).reshape((self.numNodes, 1)) if isinstance(self.parameters['phi_S'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['phi_S'], shape=(self.numNodes,1))
self.phi_A = numpy.array(self.parameters['phi_A']).reshape((self.numNodes, 1)) if isinstance(self.parameters['phi_A'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['phi_A'], shape=(self.numNodes,1))
self.d_E = numpy.array(self.parameters['d_E']).reshape((self.numNodes, 1)) if isinstance(self.parameters['d_E'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['d_E'], shape=(self.numNodes,1))
self.d_pre = numpy.array(self.parameters['d_pre']).reshape((self.numNodes, 1)) if isinstance(self.parameters['d_pre'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['d_pre'], shape=(self.numNodes,1))
self.d_S = numpy.array(self.parameters['d_S']).reshape((self.numNodes, 1)) if isinstance(self.parameters['d_S'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['d_S'], shape=(self.numNodes,1))
self.d_A = numpy.array(self.parameters['d_A']).reshape((self.numNodes, 1)) if isinstance(self.parameters['d_A'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['d_A'], shape=(self.numNodes,1))
self.q = numpy.array(self.parameters['q']).reshape((self.numNodes, 1)) if isinstance(self.parameters['q'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['q'], shape=(self.numNodes,1))
#Local transmission parameters:
if(self.parameters['beta_local'] is not None):
if(isinstance(self.parameters['beta_local'], (list, numpy.ndarray))):
if(isinstance(self.parameters['beta_local'], list)):
self.beta_local = numpy.array(self.parameters['beta_local'])
else: # is numpy.ndarray
self.beta_local = self.parameters['beta_local']
if(self.beta_local.ndim == 1):
self.beta_local.reshape((self.numNodes, 1))
elif(self.beta_local.ndim == 2):
self.beta_local.reshape((self.numNodes, self.numNodes))
else:
self.beta_local = numpy.full_like(self.beta, fill_value=self.parameters['beta_local'])
else:
self.beta_local = self.beta
#----------------------------------------
if(self.parameters['beta_A_local'] is not None):
if(isinstance(self.parameters['beta_A_local'], (list, numpy.ndarray))):
if(isinstance(self.parameters['beta_A_local'], list)):
self.beta_A_local = numpy.array(self.parameters['beta_A_local'])
else: # is numpy.ndarray
self.beta_A_local = self.parameters['beta_A_local']
if(self.beta_A_local.ndim == 1):
self.beta_A_local.reshape((self.numNodes, 1))
elif(self.beta_A_local.ndim == 2):
self.beta_A_local.reshape((self.numNodes, self.numNodes))
else:
self.beta_A_local = numpy.full_like(self.beta_A, fill_value=self.parameters['beta_A_local'])
else:
self.beta_A_local = self.beta_A
#----------------------------------------
if(self.parameters['beta_D_local'] is not None):
if(isinstance(self.parameters['beta_D_local'], (list, numpy.ndarray))):
if(isinstance(self.parameters['beta_D_local'], list)):
self.beta_D_local = numpy.array(self.parameters['beta_D_local'])
else: # is numpy.ndarray
self.beta_D_local = self.parameters['beta_D_local']
if(self.beta_D_local.ndim == 1):
self.beta_D_local.reshape((self.numNodes, 1))
elif(self.beta_D_local.ndim == 2):
self.beta_D_local.reshape((self.numNodes, self.numNodes))
else:
self.beta_D_local = numpy.full_like(self.beta_D, fill_value=self.parameters['beta_D_local'])
else:
self.beta_D_local = self.beta_D
# Pre-multiply beta values by the adjacency matrix ("transmission weight connections")
if(self.beta_local.ndim == 1):
self.A_beta = scipy.sparse.csr_matrix.multiply(self.A, numpy.tile(self.beta_local, (1,self.numNodes))).tocsr()
elif(self.beta_local.ndim == 2):
self.A_beta = scipy.sparse.csr_matrix.multiply(self.A, self.beta_local).tocsr()
# Pre-multiply beta_A values by the adjacency matrix ("transmission weight connections")
if(self.beta_A_local.ndim == 1):
self.A_beta_A = scipy.sparse.csr_matrix.multiply(self.A, numpy.tile(self.beta_A_local, (1,self.numNodes))).tocsr()
elif(self.beta_A_local.ndim == 2):
self.A_beta_A = scipy.sparse.csr_matrix.multiply(self.A, self.beta_A_local).tocsr()
# Pre-multiply beta_D values by the quarantine adjacency matrix ("transmission weight connections")
if(self.beta_D_local.ndim == 1):
self.A_Q_beta_D = scipy.sparse.csr_matrix.multiply(self.A_Q, numpy.tile(self.beta_D_local, (1,self.numNodes))).tocsr()
elif(self.beta_D_local.ndim == 2):
self.A_Q_beta_D = scipy.sparse.csr_matrix.multiply(self.A_Q, self.beta_D_local).tocsr()
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Update scenario flags:
#~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
self.update_scenario_flags()
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def node_degrees(self, Amat):
return Amat.sum(axis=0).reshape(self.numNodes,1) # sums of adj matrix cols
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def update_G(self, new_G):
self.G = new_G
# Adjacency matrix:
if type(new_G)==numpy.ndarray:
self.A = scipy.sparse.csr_matrix(new_G)
elif type(new_G)==networkx.classes.graph.Graph:
self.A = networkx.adj_matrix(new_G) # adj_matrix gives scipy.sparse csr_matrix
else:
raise BaseException("Input an adjacency matrix or networkx object only.")
self.numNodes = int(self.A.shape[1])
self.degree = numpy.asarray(self.node_degrees(self.A)).astype(float)
return
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def update_Q(self, new_Q):
self.Q = new_Q
# Quarantine Adjacency matrix:
if type(new_Q)==numpy.ndarray:
self.A_Q = scipy.sparse.csr_matrix(new_Q)
elif type(new_Q)==networkx.classes.graph.Graph:
self.A_Q = networkx.adj_matrix(new_Q) # adj_matrix gives scipy.sparse csr_matrix
else:
raise BaseException("Input an adjacency matrix or networkx object only.")
self.numNodes_Q = int(self.A_Q.shape[1])
self.degree_Q = numpy.asarray(self.node_degrees(self.A_Q)).astype(float)
assert(self.numNodes == self.numNodes_Q), "The normal and quarantine adjacency graphs must be of the same size."
return
#^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def update_scenario_flags(self):
self.testing_scenario = ( (numpy.any(self.d_E) and (numpy.any(self.theta_E) or numpy.any(self.phi_E)))
or (numpy.any(self.d_pre) and (numpy.any(self.theta_pre) or numpy.any(self.phi_pre)))
or (numpy.any(self.d_S) and (numpy.any(self.theta_S) or numpy.any(self.phi_S)))
or (numpy.any(self.d_A) and (numpy.any(self.theta_A) or numpy.any(self.phi_A))) )
self.tracing_scenario = ( (numpy.any(self.d_E) and numpy.any(self.phi_E))
or (numpy.any(self.d_pre) and numpy.any(self.phi_pre))
or (numpy.any(self.d_S) and numpy.any(self.phi_S))
or (numpy.any(self.d_A) and numpy.any(self.phi_A)) )
self.vitality_scenario = ( | numpy.any(self.mu_0) | numpy.any |
import csv
import cv2
import os
import numpy as np
def read_data_nn():
attribute_list = []
label_list = []
nn_outputs_list = []
global training_attributes
global training_class_labels
global testing_attributes
global testing_class_labels
global training_nn_outputs
global testing_nn_outputs ##Potentially not needed
reader=csv.reader(open(os.path.join(path_to_data, "Train/x_train.txt"),"rt", encoding='ascii'),delimiter=' ')
for row in reader:
# attributes in columns 0-561 of this attributes only file
attribute_list.append(list(row[i] for i in (range(0,561))))
# attribute_list.append(list(row[i] for i in (list(range(0,57)))))
reader=csv.reader(open(os.path.join(path_to_data, "Train/y_train.txt"),"rt", encoding='ascii'),delimiter=' ')
for row in reader:
# class label in column 1 of this labels only file
label_list.append(row[0])
nn_outputs_list.append(class_label_to_nn_output(row[0], len(classes), True, 1))
training_attributes=np.array(attribute_list).astype(np.float32)
training_class_labels=np.array(label_list).astype(np.float32)
training_nn_outputs=np.array(nn_outputs_list).astype(np.float32)
# Testing data - as currently split
attribute_list = []
label_list = []
nn_outputs_list = []
reader=csv.reader(open(os.path.join(path_to_data, "Test/x_test.txt"),"rt", encoding='ascii'),delimiter=' ')
for row in reader:
# attributes in columns 0-561 of this attributes only file
attribute_list.append(list(row[i] for i in (range(0,561))))
reader=csv.reader(open(os.path.join(path_to_data, "Test/y_test.txt"),"rt", encoding='ascii'),delimiter=' ')
for row in reader:
# class label in column 1 of this labels only file
label_list.append(row[0])
nn_outputs_list.append(class_label_to_nn_output(row[0], len(classes), True, 1)) ##Potentially not needed
testing_attributes=np.array(attribute_list).astype(np.float32)
testing_class_labels=np.array(label_list).astype(np.float32)
testing_nn_outputs=np.array(nn_outputs_list).astype(np.float32) ##Potentially not needed
########### test output for sanity
print(training_attributes)
print(len(training_attributes))
print(training_class_labels)
print(len(training_class_labels))
print(testing_attributes)
print(len(testing_attributes))
print(testing_class_labels)
print(len(testing_class_labels))
##this is the code for the original nn class
if(False):
########### Load Training and Testing Data Sets
# load training data set
reader=csv.reader(open("spambase.train","rt", encoding='ascii'),delimiter=',')
attribute_list = []
label_list = []
nn_outputs_list = []
#### N.B there is a change in the loader here (compared to other examples)
for row in reader:
# attributes in columns 0-56, class label in last column,
attribute_list.append(list(row[i] for i in (list(range(0,57)))))
label_list.append(row[57])
nn_outputs_list.append(class_label_to_nn_output(row[57], len(classes), True, 1))
training_attributes=np.array(attribute_list).astype(np.float32)
training_class_labels=np.array(label_list).astype(np.float32)
training_nn_outputs=np.array(nn_outputs_list).astype(np.float32)
# load testing data set
reader=csv.reader(open("spambase.test","rt", encoding='ascii'),delimiter=',')
attribute_list = []
label_list = []
nn_outputs_list = []
for row in reader:
# attributes in columns 0-56, class label in last column,
attribute_list.append(list(row[i] for i in (list(range(0,57)))))
label_list.append(row[57])
testing_attributes=np.array(attribute_list).astype(np.float32)
testing_class_labels=np.array(label_list).astype(np.float32)
#####################################################################
def neuralnetworks():
############ Perform Training -- Neural Network
# create the network object
nnetwork = cv2.ml.ANN_MLP_create();
# define number of layers, sizes of layers and train neural network
# neural networks only support numerical inputs (convert any categorical inputs)
# set the network to be 2 layer 57->10->2
# - one input node per attribute in a sample
# - 10 hidden nodes
# - one output node per class
# defined by the column vector layer_sizes
num_hidden_layers = 5
layer_sizes = np.int32([561, num_hidden_layers, len(classes)]); # format = [inputs, hidden layer n ..., output]
nnetwork.setLayerSizes(layer_sizes);
# create the network using a sigmoid function with alpha and beta
# parameters = 1 specified respectively (standard sigmoid)
nnetwork.setActivationFunction(cv2.ml.ANN_MLP_SIGMOID_SYM, 1, 1);
# available activation functions = (cv2.ml.ANN_MLP_SIGMOID_SYM or cv2.ml.ANN_MLP_IDENTITY, cv2.ml.ANN_MLP_GAUSSIAN)
# specify stopping criteria and backpropogation for training
nnetwork.setTrainMethod(cv2.ml.ANN_MLP_BACKPROP);
nnetwork.setBackpropMomentumScale(0.1);
nnetwork.setBackpropWeightScale(0.1);
nnetwork.setTermCriteria((cv2.TERM_CRITERIA_COUNT + cv2.TERM_CRITERIA_EPS, 1000, 0.001))
## N.B. The OpenCV neural network (MLP) implementation does not
## support categorical variable output explicitly unlike the
## other OpenCV ML classes.
## Instead, following the traditional approach of neural networks,
## the output class label is formed by we a binary vector that
## corresponds the desired output layer result for a given class
## e.g. {0, 0 ... 1, 0, 0} components (one element by class) where
## an entry "1" in the i-th vector position correspondes to a class
## label for class i
## for optimal performance with the OpenCV intepretation of the sigmoid
## we use {-1, -1 ... 1, -1, -1}
## prior to training we must construct these output layer responses
## from our conventional class labels (carried out by class_label_to_nn_output()
# train the neural network (using training data)
nnetwork.train(training_attributes, cv2.ml.ROW_SAMPLE, training_nn_outputs);
############ Perform Testing -- Neural Network
# for each testing example
predicted_class_labels = np.empty_like(testing_attributes[:,0])
for i in range(0, len(testing_attributes[:,0])) :
# perform neural network prediction (i.e. classification)
# (to get around some kind of OpenCV python interface bug, vertically stack the
# example with a second row of zeros of the same size and type which is ignored).
sample = np.vstack((testing_attributes[i,:],
np.zeros(len(testing_attributes[i,:])).astype(np.float32)));
retrval,output_layer_responses = nnetwork.predict(sample);
# the class label c (result) is the index of the most
# +ve of the output layer responses (from the first of the two examples in the stack)
result = np.argmax(output_layer_responses[0]) +1; ####Corrected with +1 for the -1 in class to nn output method
predicted_class_labels[i] = result
return (predicted_class_labels, testing_class_labels)
def class_label_to_nn_output(label, max_classes, is_sigmoid, value):
########### construct output layer
# expand training responses defined as class labels {0,1...,N} to output layer
# responses for the OpenCV MLP (Neural Network) implementation such that class
# label c becomes {0,0,0, ... 1, ...0} where the c-th entry is the only non-zero
# entry (equal to "value", conventionally = 1) in the N-length vector
# labels : a row vector of class label transformed to {0,0,0, ... 1, ...0}
# max_classes : maximum class label
# value: value use to label the class response in the output layer vector
# sigmoid : {true | false} - return {-value,....value,....-value} instead for
# optimal use with OpenCV sigmoid function
if (is_sigmoid):
output = np.ones(max_classes).astype(np.float32) * (-1 * value)
output[int(label)-1] = value
else:
output = | np.zeros(max_classes) | numpy.zeros |
# BSD 3-Clause License; see https://github.com/scikit-hep/awkward-1.0/blob/main/LICENSE
import pytest # noqa: F401
import numpy as np # noqa: F401
import awkward as ak # noqa: F401
def test_empty_listarray():
a = ak.Array(
ak.layout.ListArray64(
ak.layout.Index64(np.array([], dtype=np.int64)),
ak.layout.Index64( | np.array([], dtype=np.int64) | numpy.array |
import gc
gc.collect()
import numpy as np
import pandas as pd
import scipy as sp
import sklearn as skl
from sklearn.model_selection import cross_val_score
from math import *
import random as rd
import cvxpy as cp
import multiprocessing as mp
import matplotlib.pyplot as plt
import gc
import statsmodels.api as sm
from sklearn.model_selection import KFold
from sklearn.model_selection import train_test_split
import time
def maternkernel(x,y,gamma):
x=np.array(x)
y=np.array(y)
return (1+sqrt(3)*sp.linalg.norm(x-y)/gamma)*exp(-sqrt(3)*sp.linalg.norm(x-y)/gamma)
def minmaxkernel(x,y,gamma):
aux=x
auy=y
x=np.array(x)
y=np.array(y)
if len(x.shape)==0:
x=[aux]
y=[auy]
d=len(x)
res=0
for i in range(d):
res=res+min(x[i],y[i])
return res
def pinball(z,t):
if t>1 or t<0:
print("tau must be in [0,1] \n")
t=float(input("try an other tau"))
return(0.5*cp.abs(z)+(t-0.5)*z)
#testing the pinball loss function output
out=[]
for i in np.arange(-5,5,0.1):
out.append(pinball(i,0.5))
#linear kernel
def linearkernel(x,y,gamma):
x=np.array(x)
y=np.array(y)
return x.T*y+gamma
#laplacian kernel
def LaplaceKernel(x,y,gamma):
x=np.array(x)
y=np.array(y)
return exp(-sp.linalg.norm(x-y)/gamma)
def SigmoidKernel(x,y,gamma):
x=np.array(x)
y= | np.array(y) | numpy.array |
# Copyright (c) 2017, CNRS-LAAS
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import logging
import types
from itertools import cycle
from typing import List, Optional, Sequence, Tuple, Union
import matplotlib.cm
import matplotlib.colors
import matplotlib.pyplot
import numpy as np
import fire_rs.geodata.display as gdd
from fire_rs.geodata.geo_data import GeoData
from fire_rs.planning.new_planning import Plan
logger = logging.getLogger(__name__)
TRAJECTORY_COLORS = ["darkgreen", "darkblue", "darkorange", "darkmagenta", "darkred"]
class TrajectoryDisplayExtension(gdd.DisplayExtension):
"""Extension to GeoDataDisplay that an observation trajectories."""
def __init__(self, base_display: 'gdd.GeoDataDisplayBase', plan_trajectory):
super().__init__(base_display, self.__class__.__name__)
self.plan_trajectory = plan_trajectory
def draw_utility_shade(self, geodata: 'Optional[GeoData]' = None, layer: 'str' = 'utility',
with_colorbar: 'bool' = True, label: 'str' = "Utility", **kwargs):
util_arr = np.array(self._base_display.geodata[layer]) if geodata is None else np.array(
geodata[layer])
if 'vmin' not in kwargs:
kwargs['vmin'] = np.nanmin(util_arr)
if 'vmax' not in kwargs:
kwargs['vmin'] = np.nanmax(util_arr)
if 'cmap' not in kwargs:
kwargs['cmap'] = matplotlib.cm.Purples_r
if 'interpolation' not in kwargs:
kwargs['interpolation'] = 'none'
if 'extent' not in kwargs:
kwargs['extent'] = self._base_display.image_scale
shade = self._base_display.axes.imshow(util_arr.T[::-1, ...], **kwargs)
self._base_display.drawings.append(shade)
if with_colorbar:
self._add_utility_shade_colorbar(shade, label)
def _add_utility_shade_colorbar(self, shade, label: 'str' = "Utility"):
cb = self._base_display.figure.colorbar(shade, ax=self._base_display.axes, shrink=0.65,
aspect=20, format="%.1f")
cb.set_label(label)
self._base_display.colorbars.append(cb)
def draw_waypoints(self, *args, **kwargs):
"""Draw path waypoints in a GeoDataDisplay figure."""
color = kwargs.get('color', 'C0')
waypoints = self.plan_trajectory.as_waypoints()
x = [wp.x for wp in waypoints]
y = [wp.y for wp in waypoints]
self._base_display.drawings.append(
self._base_display.axes.scatter(x[::2], y[::2], s=7, c=color, marker='D'))
self._base_display.drawings.append(
self._base_display.axes.scatter(x[1::2], y[1::2], s=7, c=color, marker='>'))
def draw_flighttime_path(self, *args,
colorbar_time_range: 'Optional[Tuple[float, float]]' = None,
**kwargs):
"""Draw trajectory in a GeoDataDisplay figure.
The color of the trajectory will reflect the time taken by the trajectory.
Optional argument colorbar_time_range may be a tuple of start and end times in seconds.
If the Optional argument with_colorbar is set to True, a color bar will be displayed of the
trajectory.
"""
if len(self.plan_trajectory) < 2:
return
label = kwargs.get('label', None)
sampled_waypoints = self.plan_trajectory.sampled(step_size=5)
x = [wp.x for wp in sampled_waypoints]
y = [wp.y for wp in sampled_waypoints]
color_range = np.linspace(self.plan_trajectory.start_time() / 60,
self.plan_trajectory.end_time() / 60, len(x))
color_norm = matplotlib.colors.Normalize(vmin=color_range[0], vmax=color_range[-1])
if colorbar_time_range is not None:
color_norm = matplotlib.colors.Normalize(vmin=colorbar_time_range[0] / 60,
vmax=colorbar_time_range[1] / 60)
self._base_display.drawings.append(
self._base_display.axes.scatter(x, y, s=1, edgecolors='none', c=color_range,
label=label, norm=color_norm,
cmap=matplotlib.cm.plasma,
zorder=self._base_display.FOREGROUND_LAYER))
if kwargs.get('with_colorbar', False):
cb = self._base_display.figure.colorbar(self._base_display.drawings[-1],
ax=self._base_display.axes,
shrink=0.65, aspect=20, format="%d min")
cb.set_label("Flight time")
self._base_display.colorbars.append(cb)
def draw_trajectory_solid(self, trajectory: 'Optional[up.Trajectory]' = None, **kwargs):
"""Draw trajectory in a GeoDataDisplay figure with solid color
kwargs:
color: desired color. Default: C0.
"""
traj = trajectory if trajectory is not None else self.plan_trajectory
if len(traj) < 2:
return
color = kwargs.get('color', 'C0')
size = kwargs.get('size', 1)
label = kwargs.get('label', None)
linestyle = kwargs.get('linestyle', '-')
time_range = kwargs.get('time_range', (-np.inf, np.inf))
sampled_waypoints = traj.sampled_with_time(time_range, step_size=5)
x = [wp.x for i, wp in enumerate(sampled_waypoints[0]) if
time_range[0] < sampled_waypoints[1][i] < time_range[1]]
y = [wp.y for i, wp in enumerate(sampled_waypoints[0]) if
time_range[0] < sampled_waypoints[1][i] < time_range[1]]
self._base_display.drawings.append(
self._base_display.axes.plot(x, y, linewidth=size, linestyle=linestyle, c=color,
label=label, zorder=self._base_display.FOREGROUND_LAYER))
# TODO: implement legend
def draw_waypoint_trail(self, wp_trail, **kwargs):
"""Draw a waypoint trail in a GeoDataDisplay figure with solid color
kwargs:
color: desired color. Default: C0.
"""
color = kwargs.get('color', 'C0')
size = kwargs.get('size', 1)
label = kwargs.get('label', None)
linestyle = kwargs.get('linestyle', '--')
self._base_display.drawings.append(
self._base_display.axes.plot(*(zip(*wp_trail)), linewidth=size, linestyle=linestyle,
c=color, label=label,
zorder=self._base_display.FOREGROUND_LAYER))
def draw_segments(self, *args, **kwargs):
"""Draw observation segments with start and end points in a GeoDataDisplay figure."""
if len(self.plan_trajectory) < 2:
return
color = kwargs.get('color', 'C0')
time_range = kwargs.get('time_range', (-np.inf, np.inf))
py_segments = [s for s in self.plan_trajectory.segments]
py_modifi_segments = [s for i, s in enumerate(py_segments) if
self.plan_trajectory.can_modify(i) and
time_range[0] <= self.plan_trajectory.start_time(i) <= time_range[1]]
py_frozen_segments = [s for i, s in enumerate(py_segments) if
not self.plan_trajectory.can_modify(i) and
time_range[0] <= self.plan_trajectory.start_time(i) <= time_range[1]]
# Plot modifiable segments
start_x_m = [s.start.x for s in py_modifi_segments]
start_y_m = [s.start.y for s in py_modifi_segments]
end_x_m = [s.end.x for s in py_modifi_segments]
end_y_m = [s.end.y for s in py_modifi_segments]
self._base_display.drawings.append(
self._base_display.axes.scatter(start_x_m, start_y_m, s=10, edgecolor='black', c=color,
marker='o',
zorder=self._base_display.FOREGROUND_OVERLAY_LAYER))
self._base_display.drawings.append(
self._base_display.axes.scatter(end_x_m, end_y_m, s=10, edgecolor='black', c=color,
marker='>',
zorder=self._base_display.FOREGROUND_OVERLAY_LAYER))
# Plot frozen segments (all but the bases)
start_x_f = [s.start.x for s in py_frozen_segments if
not (s == py_segments[0] or s == py_segments[-1])]
start_y_f = [s.start.y for s in py_frozen_segments if
not (s == py_segments[0] or s == py_segments[-1])]
end_x_f = [s.end.x for s in py_frozen_segments if
not (s == py_segments[0] or s == py_segments[-1])]
end_y_f = [s.end.y for s in py_frozen_segments if
not (s == py_segments[0] or s == py_segments[-1])]
self._base_display.drawings.append(
self._base_display.axes.scatter(start_x_f, start_y_f, s=10, edgecolor='black',
c='black', marker='o',
zorder=self._base_display.FOREGROUND_OVERLAY_LAYER))
self._base_display.drawings.append(
self._base_display.axes.scatter(end_x_f, end_y_f, s=10, edgecolor='black', c='black',
marker='>',
zorder=self._base_display.FOREGROUND_OVERLAY_LAYER))
if time_range[0] <= self.plan_trajectory.start_time(0) <= time_range[1]:
start_base = py_segments[0]
self._base_display.drawings.append(self._base_display.axes.scatter(
start_base.start.x, start_base.start.y, s=10, edgecolor=color, c=color, marker='D',
zorder=self._base_display.FOREGROUND_OVERLAY_LAYER))
if time_range[0] <= self.plan_trajectory.start_time(len(self.plan_trajectory) - 1) <= \
time_range[1]:
finish_base = py_segments[-1]
self._base_display.drawings.append(
self._base_display.axes.scatter(finish_base.start.x, finish_base.start.y, s=10,
edgecolor=color, c=color, marker='D',
zorder=self._base_display.FOREGROUND_OVERLAY_LAYER))
start_x = [s.start.x for i, s in enumerate(py_segments) if
time_range[0] <= self.plan_trajectory.start_time(i) <= time_range[1]]
start_y = [s.start.y for i, s in enumerate(py_segments) if
time_range[0] <= self.plan_trajectory.start_time(i) <= time_range[1]]
end_x = [s.end.x for i, s in enumerate(py_segments) if
time_range[0] <= self.plan_trajectory.start_time(i) <= time_range[1]]
end_y = [s.end.y for i, s in enumerate(py_segments) if
time_range[0] <= self.plan_trajectory.start_time(i) <= time_range[1]]
# Draw lines between segment bounds
for i in range(len(start_x)):
self._base_display.drawings.append(
self._base_display.axes.plot([start_x[i], end_x[i]], [start_y[i], end_y[i]],
c=color, linewidth=2,
zorder=self._base_display.FOREGROUND_LAYER))
def draw_bases(self, trajectory: 'Optional[up.Trajectory]' = None, **kwargs):
traj = trajectory if trajectory is not None else self.plan_trajectory
if 's' not in kwargs:
kwargs['s'] = 10
if 'color' not in kwargs:
kwargs['color'] = 'black'
if 'edgecolor' not in kwargs:
kwargs['edgecolor'] = 'face'
if 'marker' not in kwargs:
kwargs['marker'] = 'D'
if 'zorder' not in kwargs:
kwargs['zorder'] = self._base_display.FOREGROUND_OVERLAY_LAYER
base_s = traj.segments[0]
base_e = traj.segments[-1]
if base_s:
self._base_display.drawings.append(
self._base_display.axes.scatter(base_s.start.x, base_s.start.y,
**kwargs))
if base_e:
self._base_display.drawings.append(
self._base_display.axes.scatter(base_e.end.x, base_e.end.y, **kwargs))
def draw_arrows(self, trajectory: 'Optional[up.Trajectory]' = None, **kwargs):
"""Draw trajectory waypoints in a GeoDataDisplay figure."""
traj = trajectory if trajectory is not None else self.plan_trajectory
if len(traj) < 2:
return
# if 'size' not in kwargs:
# kwargs['size'] = 10
if 'color' not in kwargs:
kwargs['color'] = 'black'
if 'edgecolor' not in kwargs:
kwargs['edgecolor'] = 'face'
if 'units' not in kwargs:
kwargs['units'] = 'xy'
if 'angles' not in kwargs:
kwargs['angles'] = 'xy'
if 'scale_units' not in kwargs:
kwargs['scale_units'] = 'xy'
if 'scale' not in kwargs:
kwargs['scale'] = 1
if 'headaxislength' not in kwargs:
kwargs['headaxislength'] = 4
if 'headlength' not in kwargs:
kwargs['headlength'] = 5
if 'headwidth' not in kwargs:
kwargs['headwidth'] = 5
if 'width' not in kwargs:
kwargs['width'] = 15
if 'pivot' not in kwargs:
kwargs['pivot'] = 'middle'
if 'zorder' not in kwargs:
kwargs['zorder'] = self._base_display.FOREGROUND_OVERLAY_LAYER
py_segments = traj.segments[:]
py_modifi_segments = [s for i, s in enumerate(py_segments) if
traj.can_modify(i)]
py_frozen_segments = [s for i, s in enumerate(py_segments) if
traj.can_modify(i)]
if py_modifi_segments:
# Plot modifiable segments
start_x_m = np.array([s.start.x for s in py_modifi_segments])
start_y_m = np.array([s.start.y for s in py_modifi_segments])
start_dir_m = np.array([s.start.dir for s in py_modifi_segments])
self._base_display.drawings.append(
self._base_display.axes.quiver(start_x_m, start_y_m,
100 * np.cos(start_dir_m),
100 * np.sin(start_dir_m),
**kwargs))
# Plot frozen segments (all but the bases)
# start_x_f = [s.start.x for s in py_frozen_segments[1:len(py_frozen_segments) - 1]]
# start_y_f = [s.start.y for s in py_frozen_segments[1:len(py_frozen_segments) - 1]]
# start_dir_f = [s.start.dir for s in py_frozen_segments[1:len(py_frozen_segments) - 1]]
#
# for i in range(len(start_x_f)):
# self._base_display.drawings.append(
# self._base_display.axis.quiver(start_x_f[i], start_y_f[i],
# 100 * np.cos(start_dir_f[i]),
# 100 * np.sin(start_dir_f[i]), units='xy',
# angles='xy',
# scale_units='xy', scale=1,
# facecolor=color, edgecolor='gray',
# headaxislength=4, headlength=5, headwidth=5,
# width=15, pivot='middle',
# zorder=self._base_display.FOREGROUND_OVERLAY_LAYER))
def draw_observedcells(self, observations, **kwargs):
"""Plot observed cells as points"""
for ptt in observations:
self._base_display.axes.scatter(ptt[0][0], ptt[0][1], s=4, c=(0., 1., 0., .5),
zorder=self._base_display.BACKGROUND_OVERLAY_LAYER,
edgecolors='none', marker='s')
def draw_observation_map(self, obs_map: 'Optional[GeoData]' = None, layer='observed',
color='green', **kwargs):
o_map = np.array(obs_map[layer]) if obs_map is not None else np.array(
self._base_display.geodata[layer])
o_map[~np.isnan(o_map)] = 1
# define the colors
cmap = matplotlib.colors.ListedColormap([color])
o_map = np.around(o_map.T[::-1, ...] / 60., 1)
if 'vmin' not in kwargs:
kwargs['vmin'] = np.nanmin(o_map)
if 'vmax' not in kwargs:
kwargs['vmax'] = | np.nanmax(o_map) | numpy.nanmax |
import pandas as pd
import numpy as np
import matplotlib
matplotlib.use('agg')
import matplotlib.pyplot as plt
from matplotlib import cm, colors
from astropy.modeling import models, fitting
# Reading in all data files at once
import glob
path_normal ='/projects/p30137/ageller/testing/EBLSST/add_m5/output_files'
allFiles_normal = glob.glob(path_normal + "/*.csv")
path_fast = '/projects/p30137/ageller/testing/EBLSST/add_m5/fast/old/output_files'
allFiles_fast = glob.glob(path_fast + "/*.csv")
path_obsDist = '/projects/p30137/ageller/testing/EBLSST/add_m5/fast/old/obsDist/output_files'
allFiles_obsDist = glob.glob(path_obsDist + "/*.csv")
N_totalnormal_array = []
N_totalobservablenormal_array = []
N_totalrecoverablenormal_array = []
N_totalnormal_array_03 = []
N_totalobservablenormal_array_03 = []
N_totalrecoverablenormal_array_03 = []
N_totalnormal_array_1 = []
N_totalobservablenormal_array_1 = []
N_totalrecoverablenormal_array_1 = []
N_totalnormal_array_10 = []
N_totalobservablenormal_array_10 = []
N_totalrecoverablenormal_array_10 = []
N_totalnormal_array_30 = []
N_totalobservablenormal_array_30 = []
N_totalrecoverablenormal_array_30 = []
N_totalnormal_array_100 = []
N_totalobservablenormal_array_100 = []
N_totalrecoverablenormal_array_100 = []
N_totalnormal_array_1000 = []
N_totalobservablenormal_array_1000 = []
N_totalrecoverablenormal_array_1000 = []
N_totalnormal22_array = []
N_totalobservablenormal22_array = []
N_totalrecoverablenormal22_array = []
N_totalnormal22_array_03 = []
N_totalobservablenormal22_array_03 = []
N_totalrecoverablenormal22_array_03 = []
N_totalnormal22_array_1 = []
N_totalobservablenormal22_array_1 = []
N_totalrecoverablenormal22_array_1 = []
N_totalnormal22_array_10 = []
N_totalobservablenormal22_array_10 = []
N_totalrecoverablenormal22_array_10 = []
N_totalnormal22_array_30 = []
N_totalobservablenormal22_array_30 = []
N_totalrecoverablenormal22_array_30 = []
N_totalnormal22_array_100 = []
N_totalobservablenormal22_array_100 = []
N_totalrecoverablenormal22_array_100 = []
N_totalnormal22_array_1000 = []
N_totalobservablenormal22_array_1000 = []
N_totalrecoverablenormal22_array_1000 = []
N_totalnormal195_array = []
N_totalobservablenormal195_array = []
N_totalrecoverablenormal195_array = []
N_totalnormal195_array_03 = []
N_totalobservablenormal195_array_03 = []
N_totalrecoverablenormal195_array_03 = []
N_totalnormal195_array_1 = []
N_totalobservablenormal195_array_1 = []
N_totalrecoverablenormal195_array_1 = []
N_totalnormal195_array_10 = []
N_totalobservablenormal195_array_10 = []
N_totalrecoverablenormal195_array_10 = []
N_totalnormal195_array_30 = []
N_totalobservablenormal195_array_30 = []
N_totalrecoverablenormal195_array_30 = []
N_totalnormal195_array_100 = []
N_totalobservablenormal195_array_100 = []
N_totalrecoverablenormal195_array_100 = []
N_totalnormal195_array_1000 = []
N_totalobservablenormal195_array_1000 = []
N_totalrecoverablenormal195_array_1000 = []
N_totalfast_array = []
N_totalobservablefast_array = []
N_totalrecoverablefast_array = []
N_totalfast_array_03 = []
N_totalobservablefast_array_03 = []
N_totalrecoverablefast_array_03 = []
N_totalfast_array_1 = []
N_totalobservablefast_array_1 = []
N_totalrecoverablefast_array_1 = []
N_totalfast_array_10 = []
N_totalobservablefast_array_10 = []
N_totalrecoverablefast_array_10 = []
N_totalfast_array_30 = []
N_totalobservablefast_array_30 = []
N_totalrecoverablefast_array_30 = []
N_totalfast_array_100 = []
N_totalobservablefast_array_100 = []
N_totalrecoverablefast_array_100 = []
N_totalfast_array_1000 = []
N_totalobservablefast_array_1000 = []
N_totalrecoverablefast_array_1000 = []
N_totalfast22_array = []
N_totalobservablefast22_array = []
N_totalrecoverablefast22_array = []
N_totalfast22_array_03 = []
N_totalobservablefast22_array_03 = []
N_totalrecoverablefast22_array_03 = []
N_totalfast22_array_1 = []
N_totalobservablefast22_array_1 = []
N_totalrecoverablefast22_array_1 = []
N_totalfast22_array_10 = []
N_totalobservablefast22_array_10 = []
N_totalrecoverablefast22_array_10 = []
N_totalfast22_array_30 = []
N_totalobservablefast22_array_30 = []
N_totalrecoverablefast22_array_30 = []
N_totalfast22_array_100 = []
N_totalobservablefast22_array_100 = []
N_totalrecoverablefast22_array_100 = []
N_totalfast22_array_1000 = []
N_totalobservablefast22_array_1000 = []
N_totalrecoverablefast22_array_1000 = []
N_totalfast195_array = []
N_totalobservablefast195_array = []
N_totalrecoverablefast195_array = []
N_totalfast195_array_03 = []
N_totalobservablefast195_array_03 = []
N_totalrecoverablefast195_array_03 = []
N_totalfast195_array_1 = []
N_totalobservablefast195_array_1 = []
N_totalrecoverablefast195_array_1 = []
N_totalfast195_array_10 = []
N_totalobservablefast195_array_10 = []
N_totalrecoverablefast195_array_10 = []
N_totalfast195_array_30 = []
N_totalobservablefast195_array_30 = []
N_totalrecoverablefast195_array_30 = []
N_totalfast195_array_100 = []
N_totalobservablefast195_array_100 = []
N_totalrecoverablefast195_array_100 = []
N_totalfast195_array_1000 = []
N_totalobservablefast195_array_1000 = []
N_totalrecoverablefast195_array_1000 = []
N_totalobsDist_array = []
N_totalobservableobsDist_array = []
N_totalrecoverableobsDist_array = []
N_totalobsDist_array_03 = []
N_totalobservableobsDist_array_03 = []
N_totalrecoverableobsDist_array_03 = []
N_totalobsDist_array_1 = []
N_totalobservableobsDist_array_1 = []
N_totalrecoverableobsDist_array_1 = []
N_totalobsDist_array_10 = []
N_totalobservableobsDist_array_10 = []
N_totalrecoverableobsDist_array_10 = []
N_totalobsDist_array_30 = []
N_totalobservableobsDist_array_30 = []
N_totalrecoverableobsDist_array_30 = []
N_totalobsDist_array_100 = []
N_totalobservableobsDist_array_100 = []
N_totalrecoverableobsDist_array_100 = []
N_totalobsDist_array_1000 = []
N_totalobservableobsDist_array_1000 = []
N_totalrecoverableobsDist_array_1000 = []
N_totalobsDist22_array = []
N_totalobservableobsDist22_array = []
N_totalrecoverableobsDist22_array = []
N_totalobsDist22_array_03 = []
N_totalobservableobsDist22_array_03 = []
N_totalrecoverableobsDist22_array_03 = []
N_totalobsDist22_array_1 = []
N_totalobservableobsDist22_array_1 = []
N_totalrecoverableobsDist22_array_1 = []
N_totalobsDist22_array_10 = []
N_totalobservableobsDist22_array_10 = []
N_totalrecoverableobsDist22_array_10 = []
N_totalobsDist22_array_30 = []
N_totalobservableobsDist22_array_30 = []
N_totalrecoverableobsDist22_array_30 = []
N_totalobsDist22_array_100 = []
N_totalobservableobsDist22_array_100 = []
N_totalrecoverableobsDist22_array_100 = []
N_totalobsDist22_array_1000 = []
N_totalobservableobsDist22_array_1000 = []
N_totalrecoverableobsDist22_array_1000 = []
N_totalobsDist195_array = []
N_totalobservableobsDist195_array = []
N_totalrecoverableobsDist195_array = []
N_totalobsDist195_array_03 = []
N_totalobservableobsDist195_array_03 = []
N_totalrecoverableobsDist195_array_03 = []
N_totalobsDist195_array_1 = []
N_totalobservableobsDist195_array_1 = []
N_totalrecoverableobsDist195_array_1 = []
N_totalobsDist195_array_10 = []
N_totalobservableobsDist195_array_10 = []
N_totalrecoverableobsDist195_array_10 = []
N_totalobsDist195_array_30 = []
N_totalobservableobsDist195_array_30 = []
N_totalrecoverableobsDist195_array_30 = []
N_totalobsDist195_array_100 = []
N_totalobservableobsDist195_array_100 = []
N_totalrecoverableobsDist195_array_100 = []
N_totalobsDist195_array_1000 = []
N_totalobservableobsDist195_array_1000 = []
N_totalrecoverableobsDist195_array_1000 = []
def fitRagfb():
x = [0.05, 0.1, 1, 8, 15] #estimates of midpoints in bins, and using this: https:/sites.uni.edu/morgans/astro/course/Notes/section2/spectralmasses.html
y = [0.20, 0.35, 0.50, 0.70, 0.75]
init = models.PowerLaw1D(amplitude=0.5, x_0=1, alpha=-1.)
fitter = fitting.LevMarLSQFitter()
fit = fitter(init, x, y)
return fit
fbFit= fitRagfb()
mbins = np.arange(0,10, 0.1, dtype='float')
cutP = 0.10 #condition on recoverability/tolerance
for filenormal_ in sorted(allFiles_normal):
filename = filenormal_[60:]
fileid = filename.strip('output_file.csv')
print ("I'm starting " + fileid)
datnormal = pd.read_csv(filenormal_, sep = ',', header=2)
PeriodIn = datnormal['p'] # input period -- 'p' in data file
##########################################################
datnormal1 = pd.read_csv(filenormal_, sep = ',', header=0, nrows=1)
N_tri = datnormal1["NstarsTRILEGAL"][0]
#print("N_tri = ", N_tri)
Nall = len(PeriodIn)
m1hAll0, m1b = np.histogram(datnormal["m1"], bins=mbins)
dm1 = np.diff(m1b)
m1val = m1b[:-1] + dm1/2.
fb = np.sum(m1hAll0/Nall*fbFit(m1val))
N_mult = N_tri*fb
##########################################################
if len(PeriodIn) == 0.:
continue
if N_tri == 0:
continue
else:
PeriodOut = datnormal['LSM_PERIOD'] #LSM_PERIOD in data file
appMagMean = datnormal['appMagMean'] #apparent magnitude, will use to make cuts for 24 (default), 22, and then Kepler's range (?? -- brighter than LSST can manage-- to 19) OR 19.5 (SNR = 10)
observable = datnormal.loc[PeriodOut != -999].index
observable_03 = datnormal.loc[(PeriodIn <= 0.3) & (PeriodOut != -999)].index
observable_1 = datnormal.loc[(PeriodIn <= 1) & (PeriodOut != -999)].index
observable_10 = datnormal.loc[(PeriodIn <= 10) & (PeriodOut != -999)].index
observable_30 = datnormal.loc[(PeriodIn <= 30) & (PeriodOut != -999)].index
observable_100 = datnormal.loc[(PeriodIn <= 100) & (PeriodOut != -999)].index
observable_1000 = datnormal.loc[(PeriodIn <= 1000) & (PeriodOut != -999)].index
observable_22 = datnormal.loc[(PeriodOut != -999) & (appMagMean <= 22.)].index
observable_03_22 = datnormal.loc[(PeriodIn <= 0.3) & (PeriodOut != -999) & (appMagMean <= 22.)].index
observable_1_22 = datnormal.loc[(PeriodIn <= 1) & (PeriodOut != -999) & (appMagMean <= 22.)].index
observable_10_22 = datnormal.loc[(PeriodIn <= 10) & (PeriodOut != -999) & (appMagMean <= 22.)].index
observable_30_22 = datnormal.loc[(PeriodIn <= 30) & (PeriodOut != -999) & (appMagMean <= 22.)].index
observable_100_22 = datnormal.loc[(PeriodIn <= 100) & (PeriodOut != -999) & (appMagMean <= 22.)].index
observable_1000_22 = datnormal.loc[(PeriodIn <= 1000) & (PeriodOut != -999) & (appMagMean <= 22.)].index
observable_195 = datnormal.loc[(PeriodOut != -999) & (appMagMean <= 19.5)].index
observable_03_195 = datnormal.loc[(PeriodIn <= 0.3) & (PeriodOut != -999) & (appMagMean <= 19.5)].index
observable_1_195 = datnormal.loc[(PeriodIn <= 1) & (PeriodOut != -999) & (appMagMean <= 19.5)].index
observable_10_195 = datnormal.loc[(PeriodIn <= 10) & (PeriodOut != -999) & (appMagMean <= 19.5)].index
observable_30_195 = datnormal.loc[(PeriodIn <= 30) & (PeriodOut != -999) & (appMagMean <= 19.5)].index
observable_100_195 = datnormal.loc[(PeriodIn <= 100) & (PeriodOut != -999) & (appMagMean <= 19.5)].index
observable_1000_195 = datnormal.loc[(PeriodIn <= 1000) & (PeriodOut != -999) & (appMagMean <= 19.5)].index
fullP = abs(PeriodOut - PeriodIn)/PeriodIn
halfP = abs(PeriodOut - 0.5*PeriodIn)/(0.5*PeriodIn)
twiceP = abs(PeriodOut - 2*PeriodIn)/(2*PeriodIn)
recoverable = datnormal.loc[(PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP))].index
recoverable_03 = datnormal.loc[(PeriodIn <= 0.3) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP))].index
recoverable_1 = datnormal.loc[(PeriodIn <= 1) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP))].index
recoverable_10 = datnormal.loc[(PeriodIn <= 10) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP))].index
recoverable_30 = datnormal.loc[(PeriodIn <= 30) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP))].index
recoverable_100 = datnormal.loc[(PeriodIn <= 100) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP))].index
recoverable_1000 = datnormal.loc[(PeriodIn <= 1000) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP))].index
recoverable_22 = datnormal.loc[(PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP)) & (appMagMean <= 22.)].index
recoverable_03_22 = datnormal.loc[(PeriodIn <= 0.3) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP)) & (appMagMean <= 22.)].index
recoverable_1_22 = datnormal.loc[(PeriodIn <= 1) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP)) & (appMagMean <= 22.)].index
recoverable_10_22 = datnormal.loc[(PeriodIn <= 10) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP)) & (appMagMean <= 22.)].index
recoverable_30_22 = datnormal.loc[(PeriodIn <= 30) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP)) & (appMagMean <= 22.)].index
recoverable_100_22 = datnormal.loc[(PeriodIn <= 100) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP)) & (appMagMean <= 22.)].index
recoverable_1000_22 = datnormal.loc[(PeriodIn <= 1000) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP)) & (appMagMean <= 22.)].index
recoverable_195 = datnormal.loc[(PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP)) & (appMagMean <= 19.5)].index
recoverable_03_195 = datnormal.loc[(PeriodIn <= 0.3) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP)) & (appMagMean <= 19.5)].index
recoverable_1_195 = datnormal.loc[(PeriodIn <= 1) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP)) & (appMagMean <= 19.5)].index
recoverable_10_195 = datnormal.loc[(PeriodIn <= 10) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP)) & (appMagMean <= 19.5)].index
recoverable_30_195 = datnormal.loc[(PeriodIn <= 30) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP)) & (appMagMean <= 19.5)].index
recoverable_100_195 = datnormal.loc[(PeriodIn <= 100) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP)) & (appMagMean <= 19.5)].index
recoverable_1000_195 = datnormal.loc[(PeriodIn <= 1000) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP)) & (appMagMean <= 19.5)].index
P03 = datnormal.loc[PeriodIn <= 0.3].index
P1 = datnormal.loc[PeriodIn <= 1].index
P10 = datnormal.loc[PeriodIn <= 10].index
P30 = datnormal.loc[PeriodIn <= 30].index
P100 = datnormal.loc[PeriodIn <= 100].index
P1000 = datnormal.loc[PeriodIn <= 1000].index
P_22 = datnormal.loc[appMagMean <= 22.].index
P03_22 = datnormal.loc[(PeriodIn <= 0.3) & (appMagMean <= 22.)].index
P1_22 = datnormal.loc[(PeriodIn <= 1) & (appMagMean <= 22.)].index
P10_22 = datnormal.loc[(PeriodIn <= 10) & (appMagMean <= 22.)].index
P30_22 = datnormal.loc[(PeriodIn <= 30) & (appMagMean <= 22.)].index
P100_22 = datnormal.loc[(PeriodIn <= 100) & (appMagMean <= 22.)].index
P1000_22 = datnormal.loc[(PeriodIn <= 1000) & (appMagMean <= 22.)].index
P_195 = datnormal.loc[appMagMean <= 19.5].index
P03_195 = datnormal.loc[(PeriodIn <= 0.3) & (appMagMean <= 19.5)].index
P1_195 = datnormal.loc[(PeriodIn <= 1) & (appMagMean <= 19.5)].index
P10_195 = datnormal.loc[(PeriodIn <= 10) & (appMagMean <= 19.5)].index
P30_195 = datnormal.loc[(PeriodIn <= 30) & (appMagMean <= 19.5)].index
P100_195 = datnormal.loc[(PeriodIn <= 100) & (appMagMean <= 19.5)].index
P1000_195 = datnormal.loc[(PeriodIn <= 1000) & (appMagMean <= 19.5)].index
N_all = (len(PeriodIn)/len(PeriodIn))*N_mult
N_all03 = (len(P03)/len(PeriodIn))*N_mult
N_all1 = (len(P1)/len(PeriodIn))*N_mult
N_all10 = (len(P10)/len(PeriodIn))*N_mult
N_all30 = (len(P30)/len(PeriodIn))*N_mult
N_all100 = (len(P100)/len(PeriodIn))*N_mult
N_all1000 = (len(P1000)/len(PeriodIn))*N_mult
N_all_22 = (len(P_22)/len(PeriodIn))*N_mult
N_all03_22 = (len(P03_22)/len(PeriodIn))*N_mult
N_all1_22 = (len(P1_22)/len(PeriodIn))*N_mult
N_all10_22 = (len(P10_22)/len(PeriodIn))*N_mult
N_all30_22 = (len(P30_22)/len(PeriodIn))*N_mult
N_all100_22 = (len(P100_22)/len(PeriodIn))*N_mult
N_all1000_22 = (len(P1000_22)/len(PeriodIn))*N_mult
N_all_195 = (len(P_195)/len(PeriodIn))*N_mult
N_all03_195 = (len(P03_195)/len(PeriodIn))*N_mult
N_all1_195 = (len(P1_195)/len(PeriodIn))*N_mult
N_all10_195 = (len(P10_195)/len(PeriodIn))*N_mult
N_all30_195 = (len(P30_195)/len(PeriodIn))*N_mult
N_all100_195 = (len(P100_195)/len(PeriodIn))*N_mult
N_all1000_195 = (len(P1000_195)/len(PeriodIn))*N_mult
N_obs = (len(observable)/len(PeriodIn))*N_mult
N_obs03 = (len(observable_03)/len(PeriodIn))*N_mult
N_obs1 = (len(observable_1)/len(PeriodIn))*N_mult
N_obs10 = (len(observable_10)/len(PeriodIn))*N_mult
N_obs30 = (len(observable_30)/len(PeriodIn))*N_mult
N_obs100 = (len(observable_100)/len(PeriodIn))*N_mult
N_obs1000 = (len(observable_1000)/len(PeriodIn))*N_mult
N_obs_22 = (len(observable_22)/len(PeriodIn))*N_mult
N_obs03_22 = (len(observable_03_22)/len(PeriodIn))*N_mult
N_obs1_22 = (len(observable_1_22)/len(PeriodIn))*N_mult
N_obs10_22 = (len(observable_10_22)/len(PeriodIn))*N_mult
N_obs30_22 = (len(observable_30_22)/len(PeriodIn))*N_mult
N_obs100_22 = (len(observable_100_22)/len(PeriodIn))*N_mult
N_obs1000_22 = (len(observable_1000_22)/len(PeriodIn))*N_mult
N_obs_195 = (len(observable_195)/len(PeriodIn))*N_mult
N_obs03_195 = (len(observable_03_195)/len(PeriodIn))*N_mult
N_obs1_195 = (len(observable_1_195)/len(PeriodIn))*N_mult
N_obs10_195 = (len(observable_10_195)/len(PeriodIn))*N_mult
N_obs30_195 = (len(observable_30_195)/len(PeriodIn))*N_mult
N_obs100_195 = (len(observable_100_195)/len(PeriodIn))*N_mult
N_obs1000_195 = (len(observable_1000_195)/len(PeriodIn))*N_mult
N_rec = (len(recoverable)/len(PeriodIn))*N_mult
N_rec03 = (len(recoverable_03)/len(PeriodIn))*N_mult
N_rec1 = (len(recoverable_1)/len(PeriodIn))*N_mult
N_rec10 = (len(recoverable_10)/len(PeriodIn))*N_mult
N_rec30 = (len(recoverable_30)/len(PeriodIn))*N_mult
N_rec100 = (len(recoverable_100)/len(PeriodIn))*N_mult
N_rec1000 = (len(recoverable_1000)/len(PeriodIn))*N_mult
N_rec_22 = (len(recoverable_22)/len(PeriodIn))*N_mult
N_rec03_22 = (len(recoverable_03_22)/len(PeriodIn))*N_mult
N_rec1_22 = (len(recoverable_1_22)/len(PeriodIn))*N_mult
N_rec10_22 = (len(recoverable_10_22)/len(PeriodIn))*N_mult
N_rec30_22 = (len(recoverable_30_22)/len(PeriodIn))*N_mult
N_rec100_22 = (len(recoverable_100_22)/len(PeriodIn))*N_mult
N_rec1000_22 = (len(recoverable_1000_22)/len(PeriodIn))*N_mult
N_rec_195 = (len(recoverable_195)/len(PeriodIn))*N_mult
N_rec03_195 = (len(recoverable_03_195)/len(PeriodIn))*N_mult
N_rec1_195 = (len(recoverable_1_195)/len(PeriodIn))*N_mult
N_rec10_195 = (len(recoverable_10_195)/len(PeriodIn))*N_mult
N_rec30_195 = (len(recoverable_30_195)/len(PeriodIn))*N_mult
N_rec100_195 = (len(recoverable_100_195)/len(PeriodIn))*N_mult
N_rec1000_195 = (len(recoverable_1000_195)/len(PeriodIn))*N_mult
N_totalnormal_array.append(float(N_all))
N_totalobservablenormal_array.append(float(N_obs))
N_totalrecoverablenormal_array.append(float(N_rec))
N_totalnormal_array_03.append(float(N_all03))
N_totalobservablenormal_array_03.append(float(N_obs03))
N_totalrecoverablenormal_array_03.append(float(N_rec03))
N_totalnormal_array_1.append(float(N_all1))
N_totalobservablenormal_array_1.append(float(N_obs1))
N_totalrecoverablenormal_array_1.append(float(N_rec1))
N_totalnormal_array_10.append(float(N_all10))
N_totalobservablenormal_array_10.append(float(N_obs10))
N_totalrecoverablenormal_array_10.append(float(N_rec10))
N_totalnormal_array_30.append(float(N_all30))
N_totalobservablenormal_array_30.append(float(N_obs30))
N_totalrecoverablenormal_array_30.append(float(N_rec30))
N_totalnormal_array_100.append(float(N_all100))
N_totalobservablenormal_array_100.append(float(N_obs100))
N_totalrecoverablenormal_array_100.append(float(N_rec100))
N_totalnormal_array_1000.append(float(N_all1000))
N_totalobservablenormal_array_1000.append(float(N_obs1000))
N_totalrecoverablenormal_array_1000.append(float(N_rec1000))
N_totalnormal22_array.append(float(N_all_22))
N_totalobservablenormal22_array.append(float(N_obs_22))
N_totalrecoverablenormal22_array.append(float(N_rec_22))
N_totalnormal22_array_03.append(float(N_all03_22))
N_totalobservablenormal22_array_03.append(float(N_obs03_22))
N_totalrecoverablenormal22_array_03.append(float(N_rec03_22))
N_totalnormal22_array_1.append(float(N_all1_22))
N_totalobservablenormal22_array_1.append(float(N_obs1_22))
N_totalrecoverablenormal22_array_1.append(float(N_rec1_22))
N_totalnormal22_array_10.append(float(N_all10_22))
N_totalobservablenormal22_array_10.append(float(N_obs10_22))
N_totalrecoverablenormal22_array_10.append(float(N_rec10_22))
N_totalnormal22_array_30.append(float(N_all30_22))
N_totalobservablenormal22_array_30.append(float(N_obs30_22))
N_totalrecoverablenormal22_array_30.append(float(N_rec30_22))
N_totalnormal22_array_100.append(float(N_all100_22))
N_totalobservablenormal22_array_100.append(float(N_obs100_22))
N_totalrecoverablenormal22_array_100.append(float(N_rec100_22))
N_totalnormal22_array_1000.append(float(N_all1000_22))
N_totalobservablenormal22_array_1000.append(float(N_obs1000_22))
N_totalrecoverablenormal22_array_1000.append(float(N_rec1000_22))
N_totalnormal195_array.append(float(N_all_195))
N_totalobservablenormal195_array.append(float(N_obs_195))
N_totalrecoverablenormal195_array.append(float(N_rec_195))
N_totalnormal195_array_03.append(float(N_all03_195))
N_totalobservablenormal195_array_03.append(float(N_obs03_195))
N_totalrecoverablenormal195_array_03.append(float(N_rec03_195))
N_totalnormal195_array_1.append(float(N_all1_195))
N_totalobservablenormal195_array_1.append(float(N_obs1_195))
N_totalrecoverablenormal195_array_1.append(float(N_rec1_195))
N_totalnormal195_array_10.append(float(N_all10_195))
N_totalobservablenormal195_array_10.append(float(N_obs10_195))
N_totalrecoverablenormal195_array_10.append(float(N_rec10_195))
N_totalnormal195_array_30.append(float(N_all30_195))
N_totalobservablenormal195_array_30.append(float(N_obs30_195))
N_totalrecoverablenormal195_array_30.append(float(N_rec30_195))
N_totalnormal195_array_100.append(float(N_all100_195))
N_totalobservablenormal195_array_100.append(float(N_obs100_195))
N_totalrecoverablenormal195_array_100.append(float(N_rec100_195))
N_totalnormal195_array_1000.append(float(N_all1000_195))
N_totalobservablenormal195_array_1000.append(float(N_obs1000_195))
N_totalrecoverablenormal195_array_1000.append(float(N_rec1000_195))
N_totalnormal = np.sum(N_totalnormal_array)
N_totalnormal_03 = np.sum(N_totalnormal_array_03)
N_totalnormal_1 = np.sum(N_totalnormal_array_1)
N_totalnormal_10 = np.sum(N_totalnormal_array_10)
N_totalnormal_30 = np.sum(N_totalnormal_array_30)
N_totalnormal_100 = np.sum(N_totalnormal_array_100)
N_totalnormal_1000 = np.sum(N_totalnormal_array_1000)
N_totalobservablenormal = np.sum(N_totalobservablenormal_array)
N_totalobservablenormal_03 = np.sum(N_totalobservablenormal_array_03)
N_totalobservablenormal_1 = np.sum(N_totalobservablenormal_array_1)
N_totalobservablenormal_10 = np.sum(N_totalobservablenormal_array_10)
N_totalobservablenormal_30 = np.sum(N_totalobservablenormal_array_30)
N_totalobservablenormal_100 = np.sum(N_totalobservablenormal_array_100)
N_totalobservablenormal_1000 = np.sum(N_totalobservablenormal_array_1000)
N_totalrecoverablenormal = np.sum(N_totalrecoverablenormal_array)
N_totalrecoverablenormal_03 = np.sum(N_totalrecoverablenormal_array_03)
N_totalrecoverablenormal_1 = np.sum(N_totalrecoverablenormal_array_1)
N_totalrecoverablenormal_10 = np.sum(N_totalrecoverablenormal_array_10)
N_totalrecoverablenormal_30 = np.sum(N_totalrecoverablenormal_array_30)
N_totalrecoverablenormal_100 = np.sum(N_totalrecoverablenormal_array_100)
N_totalrecoverablenormal_1000 = np.sum(N_totalrecoverablenormal_array_1000)
N_totalnormal22 = np.sum(N_totalnormal22_array)
N_totalnormal22_03 = np.sum(N_totalnormal22_array_03)
N_totalnormal22_1 = np.sum(N_totalnormal22_array_1)
N_totalnormal22_10 = np.sum(N_totalnormal22_array_10)
N_totalnormal22_30 = np.sum(N_totalnormal22_array_30)
N_totalnormal22_100 = np.sum(N_totalnormal22_array_100)
N_totalnormal22_1000 = np.sum(N_totalnormal22_array_1000)
N_totalobservablenormal22 = np.sum(N_totalobservablenormal22_array)
N_totalobservablenormal22_03 = np.sum(N_totalobservablenormal22_array_03)
N_totalobservablenormal22_1 = np.sum(N_totalobservablenormal22_array_1)
N_totalobservablenormal22_10 = np.sum(N_totalobservablenormal22_array_10)
N_totalobservablenormal22_30 = np.sum(N_totalobservablenormal22_array_30)
N_totalobservablenormal22_100 = np.sum(N_totalobservablenormal22_array_100)
N_totalobservablenormal22_1000 = np.sum(N_totalobservablenormal22_array_1000)
N_totalrecoverablenormal22 = np.sum(N_totalrecoverablenormal22_array)
N_totalrecoverablenormal22_03 = np.sum(N_totalrecoverablenormal22_array_03)
N_totalrecoverablenormal22_1 = np.sum(N_totalrecoverablenormal22_array_1)
N_totalrecoverablenormal22_10 = np.sum(N_totalrecoverablenormal22_array_10)
N_totalrecoverablenormal22_30 = np.sum(N_totalrecoverablenormal22_array_30)
N_totalrecoverablenormal22_100 = np.sum(N_totalrecoverablenormal22_array_100)
N_totalrecoverablenormal22_1000 = np.sum(N_totalrecoverablenormal22_array_1000)
N_totalnormal195 = np.sum(N_totalnormal195_array)
N_totalnormal195_03 = np.sum(N_totalnormal195_array_03)
N_totalnormal195_1 = np.sum(N_totalnormal195_array_1)
N_totalnormal195_10 = np.sum(N_totalnormal195_array_10)
N_totalnormal195_30 = np.sum(N_totalnormal195_array_30)
N_totalnormal195_100 = np.sum(N_totalnormal195_array_100)
N_totalnormal195_1000 = np.sum(N_totalnormal195_array_1000)
N_totalobservablenormal195 = np.sum(N_totalobservablenormal195_array)
N_totalobservablenormal195_03 = np.sum(N_totalobservablenormal195_array_03)
N_totalobservablenormal195_1 = np.sum(N_totalobservablenormal195_array_1)
N_totalobservablenormal195_10 = np.sum(N_totalobservablenormal195_array_10)
N_totalobservablenormal195_30 = np.sum(N_totalobservablenormal195_array_30)
N_totalobservablenormal195_100 = np.sum(N_totalobservablenormal195_array_100)
N_totalobservablenormal195_1000 = np.sum(N_totalobservablenormal195_array_1000)
N_totalrecoverablenormal195 = np.sum(N_totalrecoverablenormal195_array)
N_totalrecoverablenormal195_03 = np.sum(N_totalrecoverablenormal195_array_03)
N_totalrecoverablenormal195_1 = np.sum(N_totalrecoverablenormal195_array_1)
N_totalrecoverablenormal195_10 = np.sum(N_totalrecoverablenormal195_array_10)
N_totalrecoverablenormal195_30 = np.sum(N_totalrecoverablenormal195_array_30)
N_totalrecoverablenormal195_100 = np.sum(N_totalrecoverablenormal195_array_100)
N_totalrecoverablenormal195_1000 = np.sum(N_totalrecoverablenormal195_array_1000)
wholerecoverypercent_normal = (N_totalrecoverablenormal/N_totalobservablenormal)*100
wholerecoverypercent_normal_03 = (N_totalrecoverablenormal_03/N_totalobservablenormal_03)*100
wholerecoverypercent_normal_1 = (N_totalrecoverablenormal_1/N_totalobservablenormal_1)*100
wholerecoverypercent_normal_10 = (N_totalrecoverablenormal_10/N_totalobservablenormal_10)*100
wholerecoverypercent_normal_30 = (N_totalrecoverablenormal_30/N_totalobservablenormal_30)*100
wholerecoverypercent_normal_100 = (N_totalrecoverablenormal_100/N_totalobservablenormal_100)*100
wholerecoverypercent_normal_1000 = (N_totalrecoverablenormal_1000/N_totalobservablenormal_1000)*100
sigmanormal = ((N_totalrecoverablenormal**(1/2))/N_totalobservablenormal)*100
sigmanormal_03 = ((N_totalrecoverablenormal_03**(1/2))/N_totalobservablenormal_03)*100
sigmanormal_1 = ((N_totalrecoverablenormal_1**(1/2))/N_totalobservablenormal_1)*100
sigmanormal_10 = ((N_totalrecoverablenormal_10**(1/2))/N_totalobservablenormal_10)*100
sigmanormal_30 = ((N_totalrecoverablenormal_30**(1/2))/N_totalobservablenormal_30)*100
sigmanormal_100 = ((N_totalrecoverablenormal_100**(1/2))/N_totalobservablenormal_100)*100
sigmanormal_1000 = ((N_totalrecoverablenormal_1000**(1/2))/N_totalobservablenormal_1000)*100
overallrecoverypercent_normal = (N_totalrecoverablenormal/N_totalnormal)*100
overallrecoverypercent_normal_03 = (N_totalrecoverablenormal_03/N_totalnormal_03)*100
overallrecoverypercent_normal_1 = (N_totalrecoverablenormal_1/N_totalnormal_1)*100
overallrecoverypercent_normal_10 = (N_totalrecoverablenormal_10/N_totalnormal_10)*100
overallrecoverypercent_normal_30 = (N_totalrecoverablenormal_30/N_totalnormal_30)*100
overallrecoverypercent_normal_100 = (N_totalrecoverablenormal_100/N_totalnormal_100)*100
overallrecoverypercent_normal_1000 = (N_totalrecoverablenormal_1000/N_totalnormal_1000)*100
overallsigmanormal = ((N_totalrecoverablenormal**(1/2))/N_totalnormal)*100
overallsigmanormal_03 = ((N_totalrecoverablenormal_03**(1/2))/N_totalnormal_03)*100
overallsigmanormal_1 = ((N_totalrecoverablenormal_1**(1/2))/N_totalnormal_1)*100
overallsigmanormal_10 = ((N_totalrecoverablenormal_10**(1/2))/N_totalnormal_10)*100
overallsigmanormal_30 = ((N_totalrecoverablenormal_30**(1/2))/N_totalnormal_30)*100
overallsigmanormal_100 = ((N_totalrecoverablenormal_100**(1/2))/N_totalnormal_100)*100
overallsigmanormal_1000 = ((N_totalrecoverablenormal_1000**(1/2))/N_totalnormal_1000)*100
wholerecoverypercent_normal22 = (N_totalrecoverablenormal22/N_totalobservablenormal22)*100
wholerecoverypercent_normal22_03 = (N_totalrecoverablenormal22_03/N_totalobservablenormal22_03)*100
wholerecoverypercent_normal22_1 = (N_totalrecoverablenormal22_1/N_totalobservablenormal22_1)*100
wholerecoverypercent_normal22_10 = (N_totalrecoverablenormal22_10/N_totalobservablenormal22_10)*100
wholerecoverypercent_normal22_30 = (N_totalrecoverablenormal22_30/N_totalobservablenormal22_30)*100
wholerecoverypercent_normal22_100 = (N_totalrecoverablenormal22_100/N_totalobservablenormal22_100)*100
wholerecoverypercent_normal22_1000 = (N_totalrecoverablenormal22_1000/N_totalobservablenormal22_1000)*100
sigmanormal22 = ((N_totalrecoverablenormal22**(1/2))/N_totalobservablenormal22)*100
sigmanormal22_03 = ((N_totalrecoverablenormal22_03**(1/2))/N_totalobservablenormal22_03)*100
sigmanormal22_1 = ((N_totalrecoverablenormal22_1**(1/2))/N_totalobservablenormal22_1)*100
sigmanormal22_10 = ((N_totalrecoverablenormal22_10**(1/2))/N_totalobservablenormal22_10)*100
sigmanormal22_30 = ((N_totalrecoverablenormal22_30**(1/2))/N_totalobservablenormal22_30)*100
sigmanormal22_100 = ((N_totalrecoverablenormal22_100**(1/2))/N_totalobservablenormal22_100)*100
sigmanormal22_1000 = ((N_totalrecoverablenormal22_1000**(1/2))/N_totalobservablenormal22_1000)*100
overallrecoverypercent_normal22 = (N_totalrecoverablenormal22/N_totalnormal22)*100
overallrecoverypercent_normal22_03 = (N_totalrecoverablenormal22_03/N_totalnormal22_03)*100
overallrecoverypercent_normal22_1 = (N_totalrecoverablenormal22_1/N_totalnormal22_1)*100
overallrecoverypercent_normal22_10 = (N_totalrecoverablenormal22_10/N_totalnormal22_10)*100
overallrecoverypercent_normal22_30 = (N_totalrecoverablenormal22_30/N_totalnormal22_30)*100
overallrecoverypercent_normal22_100 = (N_totalrecoverablenormal22_100/N_totalnormal22_100)*100
overallrecoverypercent_normal22_1000 = (N_totalrecoverablenormal22_1000/N_totalnormal22_1000)*100
overallsigmanormal22 = ((N_totalrecoverablenormal22**(1/2))/N_totalnormal22)*100
overallsigmanormal22_03 = ((N_totalrecoverablenormal22_03**(1/2))/N_totalnormal22_03)*100
overallsigmanormal22_1 = ((N_totalrecoverablenormal22_1**(1/2))/N_totalnormal22_1)*100
overallsigmanormal22_10 = ((N_totalrecoverablenormal22_10**(1/2))/N_totalnormal22_10)*100
overallsigmanormal22_30 = ((N_totalrecoverablenormal22_30**(1/2))/N_totalnormal22_30)*100
overallsigmanormal22_100 = ((N_totalrecoverablenormal22_100**(1/2))/N_totalnormal22_100)*100
overallsigmanormal22_1000 = ((N_totalrecoverablenormal22_1000**(1/2))/N_totalnormal22_1000)*100
wholerecoverypercent_normal195 = (N_totalrecoverablenormal195/N_totalobservablenormal195)*100
wholerecoverypercent_normal195_03 = (N_totalrecoverablenormal195_03/N_totalobservablenormal195_03)*100
wholerecoverypercent_normal195_1 = (N_totalrecoverablenormal195_1/N_totalobservablenormal195_1)*100
wholerecoverypercent_normal195_10 = (N_totalrecoverablenormal195_10/N_totalobservablenormal195_10)*100
wholerecoverypercent_normal195_30 = (N_totalrecoverablenormal195_30/N_totalobservablenormal195_30)*100
wholerecoverypercent_normal195_100 = (N_totalrecoverablenormal195_100/N_totalobservablenormal195_100)*100
wholerecoverypercent_normal195_1000 = (N_totalrecoverablenormal195_1000/N_totalobservablenormal195_1000)*100
sigmanormal195 = ((N_totalrecoverablenormal195**(1/2))/N_totalobservablenormal195)*100
sigmanormal195_03 = ((N_totalrecoverablenormal195_03**(1/2))/N_totalobservablenormal195_03)*100
sigmanormal195_1 = ((N_totalrecoverablenormal195_1**(1/2))/N_totalobservablenormal195_1)*100
sigmanormal195_10 = ((N_totalrecoverablenormal195_10**(1/2))/N_totalobservablenormal195_10)*100
sigmanormal195_30 = ((N_totalrecoverablenormal195_30**(1/2))/N_totalobservablenormal195_30)*100
sigmanormal195_100 = ((N_totalrecoverablenormal195_100**(1/2))/N_totalobservablenormal195_100)*100
sigmanormal195_1000 = ((N_totalrecoverablenormal195_1000**(1/2))/N_totalobservablenormal195_1000)*100
overallrecoverypercent_normal195 = (N_totalrecoverablenormal195/N_totalnormal195)*100
overallrecoverypercent_normal195_03 = (N_totalrecoverablenormal195_03/N_totalnormal195_03)*100
overallrecoverypercent_normal195_1 = (N_totalrecoverablenormal195_1/N_totalnormal195_1)*100
overallrecoverypercent_normal195_10 = (N_totalrecoverablenormal195_10/N_totalnormal195_10)*100
overallrecoverypercent_normal195_30 = (N_totalrecoverablenormal195_30/N_totalnormal195_30)*100
overallrecoverypercent_normal195_100 = (N_totalrecoverablenormal195_100/N_totalnormal195_100)*100
overallrecoverypercent_normal195_1000 = (N_totalrecoverablenormal195_1000/N_totalnormal195_1000)*100
overallsigmanormal195 = ((N_totalrecoverablenormal195**(1/2))/N_totalnormal195)*100
overallsigmanormal195_03 = ((N_totalrecoverablenormal195_03**(1/2))/N_totalnormal195_03)*100
overallsigmanormal195_1 = ((N_totalrecoverablenormal195_1**(1/2))/N_totalnormal195_1)*100
overallsigmanormal195_10 = ((N_totalrecoverablenormal195_10**(1/2))/N_totalnormal195_10)*100
overallsigmanormal195_30 = ((N_totalrecoverablenormal195_30**(1/2))/N_totalnormal195_30)*100
overallsigmanormal195_100 = ((N_totalrecoverablenormal195_100**(1/2))/N_totalnormal195_100)*100
overallsigmanormal195_1000 = ((N_totalrecoverablenormal195_1000**(1/2))/N_totalnormal195_1000)*100\
print("N_totalnormal = ", N_totalnormal, "and in log = ", np.log10(N_totalnormal), "**** N_totalobservablenormal = ", N_totalobservablenormal, "and in log = ", np.log10(N_totalobservablenormal), "**** N_totalrecoverablenormal = ", N_totalrecoverablenormal, "and in log = ", np.log10(N_totalrecoverablenormal))
print("N_totalnormal_03 = ", N_totalnormal_03, "and in log = ", np.log10(N_totalnormal_03), "**** N_totalobservablenormal_03 = ", N_totalobservablenormal_03, "and in log = ", np.log10(N_totalobservablenormal_03), "**** N_totalrecoverablenormal_03 = ", N_totalrecoverablenormal_03, "and in log = ", np.log10(N_totalrecoverablenormal_03))
print("N_totalnormal_1 = ", N_totalnormal_1, "and in log = ", np.log10(N_totalnormal_1), "**** N_totalobservablenormal_1 = ", N_totalobservablenormal_1, "and in log = ", np.log10(N_totalobservablenormal_1), "**** N_totalrecoverablenormal_1 = ", N_totalrecoverablenormal_1, "and in log = ", np.log10(N_totalrecoverablenormal_1))
print("N_totalnormal_10 = ", N_totalnormal_10, "and in log = ", np.log10(N_totalnormal_10), "**** N_totalobservablenormal_10 = ", N_totalobservablenormal_10, "and in log = ", np.log10(N_totalobservablenormal_10), "**** N_totalrecoverablenormal_10 = ", N_totalrecoverablenormal_10, "and in log = ", np.log10(N_totalrecoverablenormal_10))
print("N_totalnormal_30 = ", N_totalnormal_30, "and in log = ", np.log10(N_totalnormal_30), "**** N_totalobservablenormal_30 = ", N_totalobservablenormal_30, "and in log = ", np.log10(N_totalobservablenormal_30), "**** N_totalrecoverablenormal_30 = ", N_totalrecoverablenormal_30, "and in log = ", np.log10(N_totalrecoverablenormal_30))
print("N_totalnormal_100 = ", N_totalnormal_100, "and in log = ", np.log10(N_totalnormal_100), "**** N_totalobservablenormal_100 = ", N_totalobservablenormal_100, "and in log = ", np.log10(N_totalobservablenormal_100), "**** N_totalrecoverablenormal_100 = ", N_totalrecoverablenormal_100, "and in log = ", np.log10(N_totalrecoverablenormal_100))
print("N_totalnormal_1000 = ", N_totalnormal_1000, "and in log = ", np.log10(N_totalnormal_1000), "**** N_totalobservablenormal_1000 = ", N_totalobservablenormal_1000, "and in log = ", np.log10(N_totalobservablenormal_1000), "**** N_totalrecoverablenormal_1000 = ", N_totalrecoverablenormal_1000, "and in log = ", np.log10(N_totalrecoverablenormal_1000))
print("********************************")
print("wholerecoverypercent_normal = $", wholerecoverypercent_normal, "/pm", sigmanormal, "$")
print("wholerecoverypercent_normal_03 = $", wholerecoverypercent_normal_03, "/pm", sigmanormal_03, "$")
print("wholerecoverypercent_normal_1 = $", wholerecoverypercent_normal_1, "/pm", sigmanormal_1, "$")
print("wholerecoverypercent_normal_10 = $", wholerecoverypercent_normal_10, "/pm", sigmanormal_10, "$")
print("wholerecoverypercent_normal_30 = $", wholerecoverypercent_normal_30, "/pm", sigmanormal_30, "$")
print("wholerecoverypercent_normal_100 = $", wholerecoverypercent_normal_100, "/pm", sigmanormal_100, "$")
print("wholerecoverypercent_normal_1000 = $", wholerecoverypercent_normal_1000, "/pm", sigmanormal_1000, "$")
print("********************************")
print("overallrecoverypercent_normal = $", overallrecoverypercent_normal, "/pm", overallsigmanormal)
print("overallrecoverypercent_normal_03 = $", overallrecoverypercent_normal_03, "/pm", overallsigmanormal_03)
print("overallrecoverypercent_normal_1 = $", overallrecoverypercent_normal_1, "/pm", overallsigmanormal_1)
print("overallrecoverypercent_normal_10 = $", overallrecoverypercent_normal_10, "/pm", overallsigmanormal_10)
print("overallrecoverypercent_normal_30 = $", overallrecoverypercent_normal_30, "/pm", overallsigmanormal_30)
print("overallrecoverypercent_normal_100 = $", overallrecoverypercent_normal_100, "/pm", overallsigmanormal_100)
print("overallrecoverypercent_normal_1000 = $", overallrecoverypercent_normal_1000, "/pm", overallsigmanormal_1000)
print("################################")
print("N_totalnormal22 = ", N_totalnormal22, "and in log = ", np.log10(N_totalnormal22), "**** N_totalobservablenormal22 = ", N_totalobservablenormal22, "and in log = ", np.log10(N_totalobservablenormal22), "**** N_totalrecoverablenormal22 = ", N_totalrecoverablenormal22, "and in log = ", np.log10(N_totalrecoverablenormal22))
print("N_totalnormal22_03 = ", N_totalnormal22_03, "and in log = ", np.log10(N_totalnormal22_03), "**** N_totalobservablenormal22_03 = ", N_totalobservablenormal22_03, "and in log = ", np.log10(N_totalobservablenormal22_03), "**** N_totalrecoverablenormal22_03 = ", N_totalrecoverablenormal22_03, "and in log = ", np.log10(N_totalrecoverablenormal22_03))
print("N_totalnormal22_1 = ", N_totalnormal22_1, "and in log = ", np.log10(N_totalnormal22_1), "**** N_totalobservablenormal22_1 = ", N_totalobservablenormal22_1, "and in log = ", np.log10(N_totalobservablenormal22_1), "**** N_totalrecoverablenormal22_1 = ", N_totalrecoverablenormal22_1, "and in log = ", np.log10(N_totalrecoverablenormal22_1))
print("N_totalnormal22_10 = ", N_totalnormal22_10, "and in log = ", np.log10(N_totalnormal22_10), "**** N_totalobservablenormal22_10 = ", N_totalobservablenormal22_10, "and in log = ", np.log10(N_totalobservablenormal22_10), "**** N_totalrecoverablenormal22_10 = ", N_totalrecoverablenormal22_10, "and in log = ", np.log10(N_totalrecoverablenormal22_10))
print("N_totalnormal22_30 = ", N_totalnormal22_30, "and in log = ", np.log10(N_totalnormal22_30), "**** N_totalobservablenormal22_30 = ", N_totalobservablenormal22_30, "and in log = ", np.log10(N_totalobservablenormal22_30), "**** N_totalrecoverablenormal22_30 = ", N_totalrecoverablenormal22_30, "and in log = ", np.log10(N_totalrecoverablenormal22_30))
print("N_totalnormal22_100 = ", N_totalnormal22_100, "and in log = ", np.log10(N_totalnormal22_100), "**** N_totalobservablenormal22_100 = ", N_totalobservablenormal22_100, "and in log = ", np.log10(N_totalobservablenormal22_100), "**** N_totalrecoverablenormal22_100 = ", N_totalrecoverablenormal22_100, "and in log = ", np.log10(N_totalrecoverablenormal22_100))
print("N_totalnormal22_1000 = ", N_totalnormal22_1000, "and in log = ", np.log10(N_totalnormal22_1000), "**** N_totalobservablenormal22_1000 = ", N_totalobservablenormal22_1000, "and in log = ", np.log10(N_totalobservablenormal22_1000), "**** N_totalrecoverablenormal22_1000 = ", N_totalrecoverablenormal22_1000, "and in log = ", np.log10(N_totalrecoverablenormal22_1000))
print("********************************")
print("wholerecoverypercent_normal22 = $", wholerecoverypercent_normal22, "/pm", sigmanormal22, "$")
print("wholerecoverypercent_normal22_03 = $", wholerecoverypercent_normal22_03, "/pm", sigmanormal22_03, "$")
print("wholerecoverypercent_normal22_1 = $", wholerecoverypercent_normal22_1, "/pm", sigmanormal22_1, "$")
print("wholerecoverypercent_normal22_10 = $", wholerecoverypercent_normal22_10, "/pm", sigmanormal22_10, "$")
print("wholerecoverypercent_normal22_30 = $", wholerecoverypercent_normal22_30, "/pm", sigmanormal22_30, "$")
print("wholerecoverypercent_normal22_100 = $", wholerecoverypercent_normal22_100, "/pm", sigmanormal22_100, "$")
print("wholerecoverypercent_normal22_1000 = $", wholerecoverypercent_normal22_1000, "/pm", sigmanormal22_1000, "$")
print("********************************")
print("overallrecoverypercent_normal22 = $", overallrecoverypercent_normal22, "/pm", overallsigmanormal22, "$")
print("overallrecoverypercent_normal22_03 = $", overallrecoverypercent_normal22_03, "/pm", overallsigmanormal22_03, "$")
print("overallrecoverypercent_normal22_1 = $", overallrecoverypercent_normal22_1, "/pm", overallsigmanormal22_1, "$")
print("overallrecoverypercent_normal22_10 = $", overallrecoverypercent_normal22_10, "/pm", overallsigmanormal22_10, "$")
print("overallrecoverypercent_normal22_30 = $", overallrecoverypercent_normal22_30, "/pm", overallsigmanormal22_30, "$")
print("overallrecoverypercent_normal22_100 = $", overallrecoverypercent_normal22_100, "/pm", overallsigmanormal22_100, "$")
print("overallrecoverypercent_normal22_1000 = $", overallrecoverypercent_normal22_1000, "/pm", overallsigmanormal22_1000, "$")
print("###############################")
print("N_totalnormal195 = ", N_totalnormal195, "and in log = ", np.log10(N_totalnormal195), "**** N_totalobservablenormal195 = ", N_totalobservablenormal195, "and in log = ", np.log10(N_totalobservablenormal195), "**** N_totalrecoverablenormal195 = ", N_totalrecoverablenormal195, "and in log = ", np.log10(N_totalrecoverablenormal195))
print("N_totalnormal195_03 = ", N_totalnormal195_03, "and in log = ", np.log10(N_totalnormal195_03), "**** N_totalobservablenormal195_03 = ", N_totalobservablenormal195_03, "and in log = ", np.log10(N_totalobservablenormal195_03), "**** N_totalrecoverablenormal195_03 = ", N_totalrecoverablenormal195_03, "and in log = ", np.log10(N_totalrecoverablenormal195_03))
print("N_totalnormal195_1 = ", N_totalnormal195_1, "and in log = ", np.log10(N_totalnormal195_1), "**** N_totalobservablenormal195_1 = ", N_totalobservablenormal195_1, "and in log = ", np.log10(N_totalobservablenormal195_1), "**** N_totalrecoverablenormal195_1 = ", N_totalrecoverablenormal195_1, "and in log = ", np.log10(N_totalrecoverablenormal195_1))
print("N_totalnormal195_10 = ", N_totalnormal195_10, "and in log = ", np.log10(N_totalnormal195_10), "**** N_totalobservablenormal195_10 = ", N_totalobservablenormal195_10, "and in log = ", np.log10(N_totalobservablenormal195_10), "**** N_totalrecoverablenormal195_10 = ", N_totalrecoverablenormal195_10, "and in log = ", np.log10(N_totalrecoverablenormal195_10))
print("N_totalnormal195_30 = ", N_totalnormal195_30, "and in log = ", np.log10(N_totalnormal195_30), "**** N_totalobservablenormal195_30 = ", N_totalobservablenormal195_30, "and in log = ", np.log10(N_totalobservablenormal195_30), "**** N_totalrecoverablenormal195_30 = ", N_totalrecoverablenormal195_30, "and in log = ", np.log10(N_totalrecoverablenormal195_30))
print("N_totalnormal195_100 = ", N_totalnormal195_100, "and in log = ", np.log10(N_totalnormal195_100), "**** N_totalobservablenormal195_100 = ", N_totalobservablenormal195_100, "and in log = ", np.log10(N_totalobservablenormal195_100), "**** N_totalrecoverablenormal195_100 = ", N_totalrecoverablenormal195_100, "and in log = ", np.log10(N_totalrecoverablenormal195_100))
print("N_totalnormal195_1000 = ", N_totalnormal195_1000, "and in log = ", np.log10(N_totalnormal195_1000), "**** N_totalobservablenormal195_1000 = ", N_totalobservablenormal195_1000, "and in log = ", np.log10(N_totalobservablenormal195_1000), "**** N_totalrecoverablenormal195_1000 = ", N_totalrecoverablenormal195_1000, "and in log = ", np.log10(N_totalrecoverablenormal195_1000))
print("********************************")
print("wholerecoverypercent_normal195 = $", wholerecoverypercent_normal195, "/pm", sigmanormal195, "$")
print("wholerecoverypercent_normal195_03 = $", wholerecoverypercent_normal195_03, "/pm", sigmanormal195_03, "$")
print("wholerecoverypercent_normal195_1 = $", wholerecoverypercent_normal195_1, "/pm", sigmanormal195_1, "$")
print("wholerecoverypercent_normal195_10 = $", wholerecoverypercent_normal195_10, "/pm", sigmanormal195_10, "$")
print("wholerecoverypercent_normal195_30 = $", wholerecoverypercent_normal195_30, "/pm", sigmanormal195_30, "$")
print("wholerecoverypercent_normal195_100 = $", wholerecoverypercent_normal195_100, "/pm", sigmanormal195_100, "$")
print("wholerecoverypercent_normal195_1000 = $", wholerecoverypercent_normal195_1000, "/pm", sigmanormal195_1000, "$")
print("********************************")
print("overallrecoverypercent_normal195 = $", overallrecoverypercent_normal195, "/pm", overallsigmanormal195, "$")
print("overallrecoverypercent_normal195_03 = $", overallrecoverypercent_normal195_03, "/pm", overallsigmanormal195_03, "$")
print("overallrecoverypercent_normal195_1 = $", overallrecoverypercent_normal195_1, "/pm", overallsigmanormal195_1, "$")
print("overallrecoverypercent_normal195_10 = $", overallrecoverypercent_normal195_10, "/pm", overallsigmanormal195_10, "$")
print("overallrecoverypercent_normal195_30 = $", overallrecoverypercent_normal195_30, "/pm", overallsigmanormal195_30, "$")
print("overallrecoverypercent_normal195_100 = $", overallrecoverypercent_normal195_100, "/pm", overallsigmanormal195_100, "$")
print("overallrecoverypercent_normal195_1000 = $", overallrecoverypercent_normal195_1000, "/pm", overallsigmanormal195_1000, "$")
print("#############################")
print("binarypercent_22 = $", (N_totalnormal22/N_totalnormal)*100, "/pm", ((N_totalnormal22**(1/2))/N_totalnormal)*100, "$")
print("binarypercent_195 = $", (N_totalnormal195/N_totalnormal)*100, "/pm", ((N_totalnormal195**(1/2))/N_totalnormal)*100, "$")
print("binarypercent_03 = $", (N_totalnormal_03/N_totalnormal)*100, "/pm", ((N_totalnormal_03**(1/2))/N_totalnormal)*100, "$")
print("binarypercent_1 = $", (N_totalnormal_1/N_totalnormal)*100, "/pm", ((N_totalnormal_1**(1/2))/N_totalnormal)*100, "$")
print("binarypercent_10 = $", (N_totalnormal_10/N_totalnormal)*100, "/pm", ((N_totalnormal_10**(1/2))/N_totalnormal)*100, "$")
print("binarypercent_30 = $", (N_totalnormal_30/N_totalnormal)*100, "/pm", ((N_totalnormal_30**(1/2))/N_totalnormal)*100, "$")
print("binarypercent_100 = $", (N_totalnormal_100/N_totalnormal)*100, "/pm", ((N_totalnormal_100**(1/2))/N_totalnormal)*100, "$")
print("binarypercent_1000 = $", (N_totalnormal_1000/N_totalnormal)*100, "/pm", ((N_totalnormal_1000**(1/2))/N_totalnormal)*100, "$")
print("observablepercent_03 = $", (N_totalobservablenormal_03/N_totalnormal_03)*100, "/pm", ((N_totalobservablenormal_03**(1/2))/N_totalnormal_03)*100, "$")
print("observablepercent_1 = $", (N_totalobservablenormal_1/N_totalnormal_1)*100, "/pm", ((N_totalobservablenormal_1**(1/2))/N_totalnormal_1)*100, "$")
print("observablepercent_10 = $", (N_totalobservablenormal_10/N_totalnormal_10)*100, "/pm", ((N_totalobservablenormal_10**(1/2))/N_totalnormal_10)*100, "$")
print("observablepercent_30 = $", (N_totalobservablenormal_30/N_totalnormal_30)*100, "/pm", ((N_totalobservablenormal_30**(1/2))/N_totalnormal_30)*100, "$")
print("observablepercent_100 = $", (N_totalobservablenormal_100/N_totalnormal_100)*100, "/pm", ((N_totalobservablenormal_100**(1/2))/N_totalnormal_100)*100, "$")
print("observablepercent_1000 = $", (N_totalobservablenormal_1000/N_totalnormal_1000)*100, "/pm", ((N_totalobservablenormal_1000**(1/2))/N_totalnormal_1000)*100, "$")
print("observablepercent = $", (N_totalobservablenormal/N_totalnormal)*100, "/pm", ((N_totalobservablenormal**(1/2))/N_totalnormal)*100, "$")
print("observablepercent22 = $", (N_totalobservablenormal22/N_totalnormal22)*100, "/pm", ((N_totalobservablenormal22**(1/2))/N_totalnormal22)*100, "$")
print("observablepercent195 = $", (N_totalobservablenormal195/N_totalnormal195)*100, "/pm", ((N_totalobservablenormal195**(1/2))/N_totalnormal195)*100, "$")
for filefast_ in sorted(allFiles_fast):
filename = filefast_[69:] #when file path no longer has /old in it, will be filefast_[65:]
fileid = filename.strip('output_file.csv')
print ("I'm starting " + fileid)
datfast = pd.read_csv(filefast_, sep = ',', header=2)
PeriodIn = datfast['p'] # input period -- 'p' in data file
##########################################################
datfast1 = pd.read_csv(filefast_, sep = ',', header=0, nrows=1)
N_tri = datfast1["NstarsTRILEGAL"][0]
Nall = len(PeriodIn)
m1hAll0, m1b = np.histogram(datfast["m1"], bins=mbins)
dm1 = np.diff(m1b)
m1val = m1b[:-1] + dm1/2.
fb = np.sum(m1hAll0/Nall*fbFit(m1val))
N_mult = N_tri*fb
##########################################################
if len(PeriodIn) == 0.:
continue
if N_tri == 0:
continue
else:
PeriodOut = datfast['LSM_PERIOD'] #LSM_PERIOD in data file
appMagMean = datfast['appMagMean'] #apparent magnitude, will use to make cuts for 24 (default), 22, and then Kepler's range (?? -- brighter than LSST can manage-- to 19) OR 19.5 (SNR = 10)
observable = datfast.loc[PeriodOut != -999].index
observable_03 = datfast.loc[(PeriodIn <= 0.3) & (PeriodOut != -999)].index
observable_1 = datfast.loc[(PeriodIn <= 1) & (PeriodOut != -999)].index
observable_10 = datfast.loc[(PeriodIn <= 10) & (PeriodOut != -999)].index
observable_30 = datfast.loc[(PeriodIn <= 30) & (PeriodOut != -999)].index
observable_100 = datfast.loc[(PeriodIn <= 100) & (PeriodOut != -999)].index
observable_1000 = datfast.loc[(PeriodIn <= 1000) & (PeriodOut != -999)].index
observable_22 = datfast.loc[(PeriodOut != -999) & (appMagMean <= 22.)].index
observable_03_22 = datfast.loc[(PeriodIn <= 0.3) & (PeriodOut != -999) & (appMagMean <= 22.)].index
observable_1_22 = datfast.loc[(PeriodIn <= 1) & (PeriodOut != -999) & (appMagMean <= 22.)].index
observable_10_22 = datfast.loc[(PeriodIn <= 10) & (PeriodOut != -999) & (appMagMean <= 22.)].index
observable_30_22 = datfast.loc[(PeriodIn <= 30) & (PeriodOut != -999) & (appMagMean <= 22.)].index
observable_100_22 = datfast.loc[(PeriodIn <= 100) & (PeriodOut != -999) & (appMagMean <= 22.)].index
observable_1000_22 = datfast.loc[(PeriodIn <= 1000) & (PeriodOut != -999) & (appMagMean <= 22.)].index
observable_195 = datfast.loc[(PeriodOut != -999) & (appMagMean <= 19.5)].index
observable_03_195 = datfast.loc[(PeriodIn <= 0.3) & (PeriodOut != -999) & (appMagMean <= 19.5)].index
observable_1_195 = datfast.loc[(PeriodIn <= 1) & (PeriodOut != -999) & (appMagMean <= 19.5)].index
observable_10_195 = datfast.loc[(PeriodIn <= 10) & (PeriodOut != -999) & (appMagMean <= 19.5)].index
observable_30_195 = datfast.loc[(PeriodIn <= 30) & (PeriodOut != -999) & (appMagMean <= 19.5)].index
observable_100_195 = datfast.loc[(PeriodIn <= 100) & (PeriodOut != -999) & (appMagMean <= 19.5)].index
observable_1000_195 = datfast.loc[(PeriodIn <= 1000) & (PeriodOut != -999) & (appMagMean <= 19.5)].index
fullP = abs(PeriodOut - PeriodIn)/PeriodIn
halfP = abs(PeriodOut - 0.5*PeriodIn)/(0.5*PeriodIn)
twiceP = abs(PeriodOut - 2*PeriodIn)/(2*PeriodIn)
recoverable = datfast.loc[(PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP))].index
recoverable_03 = datfast.loc[(PeriodIn <= 0.3) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP))].index
recoverable_1 = datfast.loc[(PeriodIn <= 1) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP))].index
recoverable_10 = datfast.loc[(PeriodIn <= 10) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP))].index
recoverable_30 = datfast.loc[(PeriodIn <= 30) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP))].index
recoverable_100 = datfast.loc[(PeriodIn <= 100) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP))].index
recoverable_1000 = datfast.loc[(PeriodIn <= 1000) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP))].index
recoverable_22 = datfast.loc[(PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP)) & (appMagMean <= 22.)].index
recoverable_03_22 = datfast.loc[(PeriodIn <= 0.3) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP)) & (appMagMean <= 22.)].index
recoverable_1_22 = datfast.loc[(PeriodIn <= 1) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP)) & (appMagMean <= 22.)].index
recoverable_10_22 = datfast.loc[(PeriodIn <= 10) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP)) & (appMagMean <= 22.)].index
recoverable_30_22 = datfast.loc[(PeriodIn <= 30) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP)) & (appMagMean <= 22.)].index
recoverable_100_22 = datfast.loc[(PeriodIn <= 100) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP)) & (appMagMean <= 22.)].index
recoverable_1000_22 = datfast.loc[(PeriodIn <= 1000) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP)) & (appMagMean <= 22.)].index
recoverable_195 = datfast.loc[(PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP)) & (appMagMean <= 19.5)].index
recoverable_03_195 = datfast.loc[(PeriodIn <= 0.3) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP)) & (appMagMean <= 19.5)].index
recoverable_1_195 = datfast.loc[(PeriodIn <= 1) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP)) & (appMagMean <= 19.5)].index
recoverable_10_195 = datfast.loc[(PeriodIn <= 10) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP)) & (appMagMean <= 19.5)].index
recoverable_30_195 = datfast.loc[(PeriodIn <= 30) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP)) & (appMagMean <= 19.5)].index
recoverable_100_195 = datfast.loc[(PeriodIn <= 100) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP)) & (appMagMean <= 19.5)].index
recoverable_1000_195 = datfast.loc[(PeriodIn <= 1000) & (PeriodOut != -999) & ((fullP < cutP) | (halfP < cutP) | (twiceP < cutP)) & (appMagMean <= 19.5)].index
P03 = datfast.loc[PeriodIn <= 0.3].index
P1 = datfast.loc[PeriodIn <= 1].index
P10 = datfast.loc[PeriodIn <= 10].index
P30 = datfast.loc[PeriodIn <= 30].index
P100 = datfast.loc[PeriodIn <= 100].index
P1000 = datfast.loc[PeriodIn <= 1000].index
P_22 = datfast.loc[appMagMean <= 22.].index
P03_22 = datfast.loc[(PeriodIn <= 0.3) & (appMagMean <= 22.)].index
P1_22 = datfast.loc[(PeriodIn <= 1) & (appMagMean <= 22.)].index
P10_22 = datfast.loc[(PeriodIn <= 10) & (appMagMean <= 22.)].index
P30_22 = datfast.loc[(PeriodIn <= 30) & (appMagMean <= 22.)].index
P100_22 = datfast.loc[(PeriodIn <= 100) & (appMagMean <= 22.)].index
P1000_22 = datfast.loc[(PeriodIn <= 1000) & (appMagMean <= 22.)].index
P_195 = datfast.loc[appMagMean <= 19.5].index
P03_195 = datfast.loc[(PeriodIn <= 0.3) & (appMagMean <= 19.5)].index
P1_195 = datfast.loc[(PeriodIn <= 1) & (appMagMean <= 19.5)].index
P10_195 = datfast.loc[(PeriodIn <= 10) & (appMagMean <= 19.5)].index
P30_195 = datfast.loc[(PeriodIn <= 30) & (appMagMean <= 19.5)].index
P100_195 = datfast.loc[(PeriodIn <= 100) & (appMagMean <= 19.5)].index
P1000_195 = datfast.loc[(PeriodIn <= 1000) & (appMagMean <= 19.5)].index
N_all = (len(PeriodIn)/len(PeriodIn))*N_mult
N_all03 = (len(P03)/len(PeriodIn))*N_mult
N_all1 = (len(P1)/len(PeriodIn))*N_mult
N_all10 = (len(P10)/len(PeriodIn))*N_mult
N_all30 = (len(P30)/len(PeriodIn))*N_mult
N_all100 = (len(P100)/len(PeriodIn))*N_mult
N_all1000 = (len(P1000)/len(PeriodIn))*N_mult
N_all_22 = (len(P_22)/len(PeriodIn))*N_mult
N_all03_22 = (len(P03_22)/len(PeriodIn))*N_mult
N_all1_22 = (len(P1_22)/len(PeriodIn))*N_mult
N_all10_22 = (len(P10_22)/len(PeriodIn))*N_mult
N_all30_22 = (len(P30_22)/len(PeriodIn))*N_mult
N_all100_22 = (len(P100_22)/len(PeriodIn))*N_mult
N_all1000_22 = (len(P1000_22)/len(PeriodIn))*N_mult
N_all_195 = (len(P_195)/len(PeriodIn))*N_mult
N_all03_195 = (len(P03_195)/len(PeriodIn))*N_mult
N_all1_195 = (len(P1_195)/len(PeriodIn))*N_mult
N_all10_195 = (len(P10_195)/len(PeriodIn))*N_mult
N_all30_195 = (len(P30_195)/len(PeriodIn))*N_mult
N_all100_195 = (len(P100_195)/len(PeriodIn))*N_mult
N_all1000_195 = (len(P1000_195)/len(PeriodIn))*N_mult
N_obs = (len(observable)/len(PeriodIn))*N_mult
N_obs03 = (len(observable_03)/len(PeriodIn))*N_mult
N_obs1 = (len(observable_1)/len(PeriodIn))*N_mult
N_obs10 = (len(observable_10)/len(PeriodIn))*N_mult
N_obs30 = (len(observable_30)/len(PeriodIn))*N_mult
N_obs100 = (len(observable_100)/len(PeriodIn))*N_mult
N_obs1000 = (len(observable_1000)/len(PeriodIn))*N_mult
N_obs_22 = (len(observable_22)/len(PeriodIn))*N_mult
N_obs03_22 = (len(observable_03_22)/len(PeriodIn))*N_mult
N_obs1_22 = (len(observable_1_22)/len(PeriodIn))*N_mult
N_obs10_22 = (len(observable_10_22)/len(PeriodIn))*N_mult
N_obs30_22 = (len(observable_30_22)/len(PeriodIn))*N_mult
N_obs100_22 = (len(observable_100_22)/len(PeriodIn))*N_mult
N_obs1000_22 = (len(observable_1000_22)/len(PeriodIn))*N_mult
N_obs_195 = (len(observable_195)/len(PeriodIn))*N_mult
N_obs03_195 = (len(observable_03_195)/len(PeriodIn))*N_mult
N_obs1_195 = (len(observable_1_195)/len(PeriodIn))*N_mult
N_obs10_195 = (len(observable_10_195)/len(PeriodIn))*N_mult
N_obs30_195 = (len(observable_30_195)/len(PeriodIn))*N_mult
N_obs100_195 = (len(observable_100_195)/len(PeriodIn))*N_mult
N_obs1000_195 = (len(observable_1000_195)/len(PeriodIn))*N_mult
N_rec = (len(recoverable)/len(PeriodIn))*N_mult
N_rec03 = (len(recoverable_03)/len(PeriodIn))*N_mult
N_rec1 = (len(recoverable_1)/len(PeriodIn))*N_mult
N_rec10 = (len(recoverable_10)/len(PeriodIn))*N_mult
N_rec30 = (len(recoverable_30)/len(PeriodIn))*N_mult
N_rec100 = (len(recoverable_100)/len(PeriodIn))*N_mult
N_rec1000 = (len(recoverable_1000)/len(PeriodIn))*N_mult
N_rec_22 = (len(recoverable_22)/len(PeriodIn))*N_mult
N_rec03_22 = (len(recoverable_03_22)/len(PeriodIn))*N_mult
N_rec1_22 = (len(recoverable_1_22)/len(PeriodIn))*N_mult
N_rec10_22 = (len(recoverable_10_22)/len(PeriodIn))*N_mult
N_rec30_22 = (len(recoverable_30_22)/len(PeriodIn))*N_mult
N_rec100_22 = (len(recoverable_100_22)/len(PeriodIn))*N_mult
N_rec1000_22 = (len(recoverable_1000_22)/len(PeriodIn))*N_mult
N_rec_195 = (len(recoverable_195)/len(PeriodIn))*N_mult
N_rec03_195 = (len(recoverable_03_195)/len(PeriodIn))*N_mult
N_rec1_195 = (len(recoverable_1_195)/len(PeriodIn))*N_mult
N_rec10_195 = (len(recoverable_10_195)/len(PeriodIn))*N_mult
N_rec30_195 = (len(recoverable_30_195)/len(PeriodIn))*N_mult
N_rec100_195 = (len(recoverable_100_195)/len(PeriodIn))*N_mult
N_rec1000_195 = (len(recoverable_1000_195)/len(PeriodIn))*N_mult
N_totalfast_array.append(float(N_all))
N_totalobservablefast_array.append(float(N_obs))
N_totalrecoverablefast_array.append(float(N_rec))
N_totalfast_array_03.append(float(N_all03))
N_totalobservablefast_array_03.append(float(N_obs03))
N_totalrecoverablefast_array_03.append(float(N_rec03))
N_totalfast_array_1.append(float(N_all1))
N_totalobservablefast_array_1.append(float(N_obs1))
N_totalrecoverablefast_array_1.append(float(N_rec1))
N_totalfast_array_10.append(float(N_all10))
N_totalobservablefast_array_10.append(float(N_obs10))
N_totalrecoverablefast_array_10.append(float(N_rec10))
N_totalfast_array_30.append(float(N_all30))
N_totalobservablefast_array_30.append(float(N_obs30))
N_totalrecoverablefast_array_30.append(float(N_rec30))
N_totalfast_array_100.append(float(N_all100))
N_totalobservablefast_array_100.append(float(N_obs100))
N_totalrecoverablefast_array_100.append(float(N_rec100))
N_totalfast_array_1000.append(float(N_all1000))
N_totalobservablefast_array_1000.append(float(N_obs1000))
N_totalrecoverablefast_array_1000.append(float(N_rec1000))
N_totalfast22_array.append(float(N_all_22))
N_totalobservablefast22_array.append(float(N_obs_22))
N_totalrecoverablefast22_array.append(float(N_rec_22))
N_totalfast22_array_03.append(float(N_all03_22))
N_totalobservablefast22_array_03.append(float(N_obs03_22))
N_totalrecoverablefast22_array_03.append(float(N_rec03_22))
N_totalfast22_array_1.append(float(N_all1_22))
N_totalobservablefast22_array_1.append(float(N_obs1_22))
N_totalrecoverablefast22_array_1.append(float(N_rec1_22))
N_totalfast22_array_10.append(float(N_all10_22))
N_totalobservablefast22_array_10.append(float(N_obs10_22))
N_totalrecoverablefast22_array_10.append(float(N_rec10_22))
N_totalfast22_array_30.append(float(N_all30_22))
N_totalobservablefast22_array_30.append(float(N_obs30_22))
N_totalrecoverablefast22_array_30.append(float(N_rec30_22))
N_totalfast22_array_100.append(float(N_all100_22))
N_totalobservablefast22_array_100.append(float(N_obs100_22))
N_totalrecoverablefast22_array_100.append(float(N_rec100_22))
N_totalfast22_array_1000.append(float(N_all1000_22))
N_totalobservablefast22_array_1000.append(float(N_obs1000_22))
N_totalrecoverablefast22_array_1000.append(float(N_rec1000_22))
N_totalfast195_array.append(float(N_all_195))
N_totalobservablefast195_array.append(float(N_obs_195))
N_totalrecoverablefast195_array.append(float(N_rec_195))
N_totalfast195_array_03.append(float(N_all03_195))
N_totalobservablefast195_array_03.append(float(N_obs03_195))
N_totalrecoverablefast195_array_03.append(float(N_rec03_195))
N_totalfast195_array_1.append(float(N_all1_195))
N_totalobservablefast195_array_1.append(float(N_obs1_195))
N_totalrecoverablefast195_array_1.append(float(N_rec1_195))
N_totalfast195_array_10.append(float(N_all10_195))
N_totalobservablefast195_array_10.append(float(N_obs10_195))
N_totalrecoverablefast195_array_10.append(float(N_rec10_195))
N_totalfast195_array_30.append(float(N_all30_195))
N_totalobservablefast195_array_30.append(float(N_obs30_195))
N_totalrecoverablefast195_array_30.append(float(N_rec30_195))
N_totalfast195_array_100.append(float(N_all100_195))
N_totalobservablefast195_array_100.append(float(N_obs100_195))
N_totalrecoverablefast195_array_100.append(float(N_rec100_195))
N_totalfast195_array_1000.append(float(N_all1000_195))
N_totalobservablefast195_array_1000.append(float(N_obs1000_195))
N_totalrecoverablefast195_array_1000.append(float(N_rec1000_195))
N_totalfast = np.sum(N_totalfast_array)
N_totalfast_03 = np.sum(N_totalfast_array_03)
N_totalfast_1 = np.sum(N_totalfast_array_1)
N_totalfast_10 = np.sum(N_totalfast_array_10)
N_totalfast_30 = np.sum(N_totalfast_array_30)
N_totalfast_100 = np.sum(N_totalfast_array_100)
N_totalfast_1000 = np.sum(N_totalfast_array_1000)
N_totalobservablefast = np.sum(N_totalobservablefast_array)
N_totalobservablefast_03 = np.sum(N_totalobservablefast_array_03)
N_totalobservablefast_1 = np.sum(N_totalobservablefast_array_1)
N_totalobservablefast_10 = np.sum(N_totalobservablefast_array_10)
N_totalobservablefast_30 = np.sum(N_totalobservablefast_array_30)
N_totalobservablefast_100 = np.sum(N_totalobservablefast_array_100)
N_totalobservablefast_1000 = np.sum(N_totalobservablefast_array_1000)
N_totalrecoverablefast = np.sum(N_totalrecoverablefast_array)
N_totalrecoverablefast_03 = np.sum(N_totalrecoverablefast_array_03)
N_totalrecoverablefast_1 = np.sum(N_totalrecoverablefast_array_1)
N_totalrecoverablefast_10 = np.sum(N_totalrecoverablefast_array_10)
N_totalrecoverablefast_30 = np.sum(N_totalrecoverablefast_array_30)
N_totalrecoverablefast_100 = np.sum(N_totalrecoverablefast_array_100)
N_totalrecoverablefast_1000 = np.sum(N_totalrecoverablefast_array_1000)
N_totalfast22 = np.sum(N_totalfast22_array)
N_totalfast22_03 = np.sum(N_totalfast22_array_03)
N_totalfast22_1 = np.sum(N_totalfast22_array_1)
N_totalfast22_10 = np.sum(N_totalfast22_array_10)
N_totalfast22_30 = np.sum(N_totalfast22_array_30)
N_totalfast22_100 = np.sum(N_totalfast22_array_100)
N_totalfast22_1000 = np.sum(N_totalfast22_array_1000)
N_totalobservablefast22 = np.sum(N_totalobservablefast22_array)
N_totalobservablefast22_03 = np.sum(N_totalobservablefast22_array_03)
N_totalobservablefast22_1 = np.sum(N_totalobservablefast22_array_1)
N_totalobservablefast22_10 = np.sum(N_totalobservablefast22_array_10)
N_totalobservablefast22_30 = np.sum(N_totalobservablefast22_array_30)
N_totalobservablefast22_100 = np.sum(N_totalobservablefast22_array_100)
N_totalobservablefast22_1000 = np.sum(N_totalobservablefast22_array_1000)
N_totalrecoverablefast22 = np.sum(N_totalrecoverablefast22_array)
N_totalrecoverablefast22_03 = np.sum(N_totalrecoverablefast22_array_03)
N_totalrecoverablefast22_1 = np.sum(N_totalrecoverablefast22_array_1)
N_totalrecoverablefast22_10 = np.sum(N_totalrecoverablefast22_array_10)
N_totalrecoverablefast22_30 = np.sum(N_totalrecoverablefast22_array_30)
N_totalrecoverablefast22_100 = np.sum(N_totalrecoverablefast22_array_100)
N_totalrecoverablefast22_1000 = np.sum(N_totalrecoverablefast22_array_1000)
N_totalfast195 = np.sum(N_totalfast195_array)
N_totalfast195_03 = np.sum(N_totalfast195_array_03)
N_totalfast195_1 = np.sum(N_totalfast195_array_1)
N_totalfast195_10 = np.sum(N_totalfast195_array_10)
N_totalfast195_30 = np.sum(N_totalfast195_array_30)
N_totalfast195_100 = np.sum(N_totalfast195_array_100)
N_totalfast195_1000 = np.sum(N_totalfast195_array_1000)
N_totalobservablefast195 = np.sum(N_totalobservablefast195_array)
N_totalobservablefast195_03 = np.sum(N_totalobservablefast195_array_03)
N_totalobservablefast195_1 = np.sum(N_totalobservablefast195_array_1)
N_totalobservablefast195_10 = np.sum(N_totalobservablefast195_array_10)
N_totalobservablefast195_30 = np.sum(N_totalobservablefast195_array_30)
N_totalobservablefast195_100 = np.sum(N_totalobservablefast195_array_100)
N_totalobservablefast195_1000 = np.sum(N_totalobservablefast195_array_1000)
N_totalrecoverablefast195 = np.sum(N_totalrecoverablefast195_array)
N_totalrecoverablefast195_03 = np.sum(N_totalrecoverablefast195_array_03)
N_totalrecoverablefast195_1 = np.sum(N_totalrecoverablefast195_array_1)
N_totalrecoverablefast195_10 = np.sum(N_totalrecoverablefast195_array_10)
N_totalrecoverablefast195_30 = np.sum(N_totalrecoverablefast195_array_30)
N_totalrecoverablefast195_100 = np.sum(N_totalrecoverablefast195_array_100)
N_totalrecoverablefast195_1000 = np.sum(N_totalrecoverablefast195_array_1000)
wholerecoverypercent_fast = (N_totalrecoverablefast/N_totalobservablefast)*100
wholerecoverypercent_fast_03 = (N_totalrecoverablefast_03/N_totalobservablefast_03)*100
wholerecoverypercent_fast_1 = (N_totalrecoverablefast_1/N_totalobservablefast_1)*100
wholerecoverypercent_fast_10 = (N_totalrecoverablefast_10/N_totalobservablefast_10)*100
wholerecoverypercent_fast_30 = (N_totalrecoverablefast_30/N_totalobservablefast_30)*100
wholerecoverypercent_fast_100 = (N_totalrecoverablefast_100/N_totalobservablefast_100)*100
wholerecoverypercent_fast_1000 = (N_totalrecoverablefast_1000/N_totalobservablefast_1000)*100
sigmafast = ((N_totalrecoverablefast**(1/2))/N_totalobservablefast)*100
sigmafast_03 = ((N_totalrecoverablefast_03**(1/2))/N_totalobservablefast_03)*100
sigmafast_1 = ((N_totalrecoverablefast_1**(1/2))/N_totalobservablefast_1)*100
sigmafast_10 = ((N_totalrecoverablefast_10**(1/2))/N_totalobservablefast_10)*100
sigmafast_30 = ((N_totalrecoverablefast_30**(1/2))/N_totalobservablefast_30)*100
sigmafast_100 = ((N_totalrecoverablefast_100**(1/2))/N_totalobservablefast_100)*100
sigmafast_1000 = ((N_totalrecoverablefast_1000**(1/2))/N_totalobservablefast_1000)*100
overallrecoverypercent_fast = (N_totalrecoverablefast/N_totalfast)*100
overallrecoverypercent_fast_03 = (N_totalrecoverablefast_03/N_totalfast_03)*100
overallrecoverypercent_fast_1 = (N_totalrecoverablefast_1/N_totalfast_1)*100
overallrecoverypercent_fast_10 = (N_totalrecoverablefast_10/N_totalfast_10)*100
overallrecoverypercent_fast_30 = (N_totalrecoverablefast_30/N_totalfast_30)*100
overallrecoverypercent_fast_100 = (N_totalrecoverablefast_100/N_totalfast_100)*100
overallrecoverypercent_fast_1000 = (N_totalrecoverablefast_1000/N_totalfast_1000)*100
overallsigmafast = ((N_totalrecoverablefast**(1/2))/N_totalfast)*100
overallsigmafast_03 = ((N_totalrecoverablefast_03**(1/2))/N_totalfast_03)*100
overallsigmafast_1 = ((N_totalrecoverablefast_1**(1/2))/N_totalfast_1)*100
overallsigmafast_10 = ((N_totalrecoverablefast_10**(1/2))/N_totalfast_10)*100
overallsigmafast_30 = ((N_totalrecoverablefast_30**(1/2))/N_totalfast_30)*100
overallsigmafast_100 = ((N_totalrecoverablefast_100**(1/2))/N_totalfast_100)*100
overallsigmafast_1000 = ((N_totalrecoverablefast_1000**(1/2))/N_totalfast_1000)*100
wholerecoverypercent_fast22 = (N_totalrecoverablefast22/N_totalobservablefast22)*100
wholerecoverypercent_fast22_03 = (N_totalrecoverablefast22_03/N_totalobservablefast22_03)*100
wholerecoverypercent_fast22_1 = (N_totalrecoverablefast22_1/N_totalobservablefast22_1)*100
wholerecoverypercent_fast22_10 = (N_totalrecoverablefast22_10/N_totalobservablefast22_10)*100
wholerecoverypercent_fast22_30 = (N_totalrecoverablefast22_30/N_totalobservablefast22_30)*100
wholerecoverypercent_fast22_100 = (N_totalrecoverablefast22_100/N_totalobservablefast22_100)*100
wholerecoverypercent_fast22_1000 = (N_totalrecoverablefast22_1000/N_totalobservablefast22_1000)*100
sigmafast22 = ((N_totalrecoverablefast22**(1/2))/N_totalobservablefast22)*100
sigmafast22_03 = ((N_totalrecoverablefast22_03**(1/2))/N_totalobservablefast22_03)*100
sigmafast22_1 = ((N_totalrecoverablefast22_1**(1/2))/N_totalobservablefast22_1)*100
sigmafast22_10 = ((N_totalrecoverablefast22_10**(1/2))/N_totalobservablefast22_10)*100
sigmafast22_30 = ((N_totalrecoverablefast22_30**(1/2))/N_totalobservablefast22_30)*100
sigmafast22_100 = ((N_totalrecoverablefast22_100**(1/2))/N_totalobservablefast22_100)*100
sigmafast22_1000 = ((N_totalrecoverablefast22_1000**(1/2))/N_totalobservablefast22_1000)*100
overallrecoverypercent_fast22 = (N_totalrecoverablefast22/N_totalfast22)*100
overallrecoverypercent_fast22_03 = (N_totalrecoverablefast22_03/N_totalfast22_03)*100
overallrecoverypercent_fast22_1 = (N_totalrecoverablefast22_1/N_totalfast22_1)*100
overallrecoverypercent_fast22_10 = (N_totalrecoverablefast22_10/N_totalfast22_10)*100
overallrecoverypercent_fast22_30 = (N_totalrecoverablefast22_30/N_totalfast22_30)*100
overallrecoverypercent_fast22_100 = (N_totalrecoverablefast22_100/N_totalfast22_100)*100
overallrecoverypercent_fast22_1000 = (N_totalrecoverablefast22_1000/N_totalfast22_1000)*100
overallsigmafast22 = ((N_totalrecoverablefast22**(1/2))/N_totalfast22)*100
overallsigmafast22_03 = ((N_totalrecoverablefast22_03**(1/2))/N_totalfast22_03)*100
overallsigmafast22_1 = ((N_totalrecoverablefast22_1**(1/2))/N_totalfast22_1)*100
overallsigmafast22_10 = ((N_totalrecoverablefast22_10**(1/2))/N_totalfast22_10)*100
overallsigmafast22_30 = ((N_totalrecoverablefast22_30**(1/2))/N_totalfast22_30)*100
overallsigmafast22_100 = ((N_totalrecoverablefast22_100**(1/2))/N_totalfast22_100)*100
overallsigmafast22_1000 = ((N_totalrecoverablefast22_1000**(1/2))/N_totalfast22_1000)*100
wholerecoverypercent_fast195 = (N_totalrecoverablefast195/N_totalobservablefast195)*100
wholerecoverypercent_fast195_03 = (N_totalrecoverablefast195_03/N_totalobservablefast195_03)*100
wholerecoverypercent_fast195_1 = (N_totalrecoverablefast195_1/N_totalobservablefast195_1)*100
wholerecoverypercent_fast195_10 = (N_totalrecoverablefast195_10/N_totalobservablefast195_10)*100
wholerecoverypercent_fast195_30 = (N_totalrecoverablefast195_30/N_totalobservablefast195_30)*100
wholerecoverypercent_fast195_100 = (N_totalrecoverablefast195_100/N_totalobservablefast195_100)*100
wholerecoverypercent_fast195_1000 = (N_totalrecoverablefast195_1000/N_totalobservablefast195_1000)*100
sigmafast195 = ((N_totalrecoverablefast195**(1/2))/N_totalobservablefast195)*100
sigmafast195_03 = ((N_totalrecoverablefast195_03**(1/2))/N_totalobservablefast195_03)*100
sigmafast195_1 = ((N_totalrecoverablefast195_1**(1/2))/N_totalobservablefast195_1)*100
sigmafast195_10 = ((N_totalrecoverablefast195_10**(1/2))/N_totalobservablefast195_10)*100
sigmafast195_30 = ((N_totalrecoverablefast195_30**(1/2))/N_totalobservablefast195_30)*100
sigmafast195_100 = ((N_totalrecoverablefast195_100**(1/2))/N_totalobservablefast195_100)*100
sigmafast195_1000 = ((N_totalrecoverablefast195_1000**(1/2))/N_totalobservablefast195_1000)*100
overallrecoverypercent_fast195 = (N_totalrecoverablefast195/N_totalfast195)*100
overallrecoverypercent_fast195_03 = (N_totalrecoverablefast195_03/N_totalfast195_03)*100
overallrecoverypercent_fast195_1 = (N_totalrecoverablefast195_1/N_totalfast195_1)*100
overallrecoverypercent_fast195_10 = (N_totalrecoverablefast195_10/N_totalfast195_10)*100
overallrecoverypercent_fast195_30 = (N_totalrecoverablefast195_30/N_totalfast195_30)*100
overallrecoverypercent_fast195_100 = (N_totalrecoverablefast195_100/N_totalfast195_100)*100
overallrecoverypercent_fast195_1000 = (N_totalrecoverablefast195_1000/N_totalfast195_1000)*100
overallsigmafast195 = ((N_totalrecoverablefast195**(1/2))/N_totalfast195)*100
overallsigmafast195_03 = ((N_totalrecoverablefast195_03**(1/2))/N_totalfast195_03)*100
overallsigmafast195_1 = ((N_totalrecoverablefast195_1**(1/2))/N_totalfast195_1)*100
overallsigmafast195_10 = ((N_totalrecoverablefast195_10**(1/2))/N_totalfast195_10)*100
overallsigmafast195_30 = ((N_totalrecoverablefast195_30**(1/2))/N_totalfast195_30)*100
overallsigmafast195_100 = ((N_totalrecoverablefast195_100**(1/2))/N_totalfast195_100)*100
overallsigmafast195_1000 = ((N_totalrecoverablefast195_1000**(1/2))/N_totalfast195_1000)*100\
print("N_totalfast = ", N_totalfast, "and in log = ", np.log10(N_totalfast), "**** N_totalobservablefast = ", N_totalobservablefast, "and in log = ", np.log10(N_totalobservablefast), "**** N_totalrecoverablefast = ", N_totalrecoverablefast, "and in log = ", np.log10(N_totalrecoverablefast))
print("N_totalfast_03 = ", N_totalfast_03, "and in log = ", np.log10(N_totalfast_03), "**** N_totalobservablefast_03 = ", N_totalobservablefast_03, "and in log = ", np.log10(N_totalobservablefast_03), "**** N_totalrecoverablefast_03 = ", N_totalrecoverablefast_03, "and in log = ", np.log10(N_totalrecoverablefast_03))
print("N_totalfast_1 = ", N_totalfast_1, "and in log = ", np.log10(N_totalfast_1), "**** N_totalobservablefast_1 = ", N_totalobservablefast_1, "and in log = ", np.log10(N_totalobservablefast_1), "**** N_totalrecoverablefast_1 = ", N_totalrecoverablefast_1, "and in log = ", np.log10(N_totalrecoverablefast_1))
print("N_totalfast_10 = ", N_totalfast_10, "and in log = ", np.log10(N_totalfast_10), "**** N_totalobservablefast_10 = ", N_totalobservablefast_10, "and in log = ", np.log10(N_totalobservablefast_10), "**** N_totalrecoverablefast_10 = ", N_totalrecoverablefast_10, "and in log = ", np.log10(N_totalrecoverablefast_10))
print("N_totalfast_30 = ", N_totalfast_30, "and in log = ", np.log10(N_totalfast_30), "**** N_totalobservablefast_30 = ", N_totalobservablefast_30, "and in log = ", np.log10(N_totalobservablefast_30), "**** N_totalrecoverablefast_30 = ", N_totalrecoverablefast_30, "and in log = ", np.log10(N_totalrecoverablefast_30))
print("N_totalfast_100 = ", N_totalfast_100, "and in log = ", np.log10(N_totalfast_100), "**** N_totalobservablefast_100 = ", N_totalobservablefast_100, "and in log = ", np.log10(N_totalobservablefast_100), "**** N_totalrecoverablefast_100 = ", N_totalrecoverablefast_100, "and in log = ", np.log10(N_totalrecoverablefast_100))
print("N_totalfast_1000 = ", N_totalfast_1000, "and in log = ", np.log10(N_totalfast_1000), "**** N_totalobservablefast_1000 = ", N_totalobservablefast_1000, "and in log = ", np.log10(N_totalobservablefast_1000), "**** N_totalrecoverablefast_1000 = ", N_totalrecoverablefast_1000, "and in log = ", np.log10(N_totalrecoverablefast_1000))
print("********************************")
print("wholerecoverypercent_fast = $", wholerecoverypercent_fast, "/pm", sigmafast, "$")
print("wholerecoverypercent_fast_03 = $", wholerecoverypercent_fast_03, "/pm", sigmafast_03, "$")
print("wholerecoverypercent_fast_1 = $", wholerecoverypercent_fast_1, "/pm", sigmafast_1, "$")
print("wholerecoverypercent_fast_10 = $", wholerecoverypercent_fast_10, "/pm", sigmafast_10, "$")
print("wholerecoverypercent_fast_30 = $", wholerecoverypercent_fast_30, "/pm", sigmafast_30, "$")
print("wholerecoverypercent_fast_100 = $", wholerecoverypercent_fast_100, "/pm", sigmafast_100, "$")
print("wholerecoverypercent_fast_1000 = $", wholerecoverypercent_fast_1000, "/pm", sigmafast_1000, "$")
print("********************************")
print("overallrecoverypercent_fast = $", overallrecoverypercent_fast, "/pm", overallsigmafast, "$")
print("overallrecoverypercent_fast_03 = $", overallrecoverypercent_fast_03, "/pm", overallsigmafast_03, "$")
print("overallrecoverypercent_fast_1 = $", overallrecoverypercent_fast_1, "/pm", overallsigmafast_1, "$")
print("overallrecoverypercent_fast_10 = $", overallrecoverypercent_fast_10, "/pm", overallsigmafast_10, "$")
print("overallrecoverypercent_fast_30 = $", overallrecoverypercent_fast_30, "/pm", overallsigmafast_30, "$")
print("overallrecoverypercent_fast_100 = $", overallrecoverypercent_fast_100, "/pm", overallsigmafast_100, "$")
print("overallrecoverypercent_fast_1000 = $", overallrecoverypercent_fast_1000, "/pm", overallsigmafast_1000, "$")
print("################################")
print("N_totalfast22 = ", N_totalfast22, "and in log = ", np.log10(N_totalfast22), "**** N_totalobservablefast22 = ", N_totalobservablefast22, "and in log = ", np.log10(N_totalobservablefast22), "**** N_totalrecoverablefast22 = ", N_totalrecoverablefast22, "and in log = ", np.log10(N_totalrecoverablefast22))
print("N_totalfast22_03 = ", N_totalfast22_03, "and in log = ", np.log10(N_totalfast22_03), "**** N_totalobservablefast22_03 = ", N_totalobservablefast22_03, "and in log = ", np.log10(N_totalobservablefast22_03), "**** N_totalrecoverablefast22_03 = ", N_totalrecoverablefast22_03, "and in log = ", | np.log10(N_totalrecoverablefast22_03) | numpy.log10 |
# -*- coding: utf-8 -*-
from __future__ import division, print_function, absolute_import
import numpy as np
import threading
try:
# Python 2
import Queue as queue
except Exception:
# Python 3
import queue
import tensorflow as tf
from . import utils
class DataFlow(object):
""" Data Flow.
Base class for using real time pre-processing and controlling data flow.
Supports pipelining for faster computation.
Arguments:
coord: `Coordinator`. A Tensorflow coordinator.
num_threads: `int`. Total number of simultaneous threads to process data.
max_queue: `int`. Maximum number of data stored in a queue.
shuffle: `bool`. If True, data will be shuffle.
continuous: `bool`. If True, when an epoch is over, same data will be
feeded again.
ensure_data_order: `bool`. Ensure that data order is keeped when using
'next' to retrieve data (Processing will be slower).
dprep_dict: dict. Optional data pre-processing parameter for performing
real time data pre-processing. Keys must be placeholders and values
`DataPreprocessing` subclass object.
daug_dict: dict. Optional data augmentation parameter for performing
real time data augmentation. Keys must be placeholders and values
`DataAugmentation` subclass object.
"""
def __init__(self, coord, num_threads=8, max_queue=32, shuffle=False,
continuous=False, ensure_data_order=False,
dprep_dict=None, daug_dict=None):
self.coord = coord
self.num_threads = num_threads
self.max_queue = max_queue
self.shuffle = shuffle
self.continuous = continuous
if ensure_data_order:
self.num_threads = 1
self.max_queue = 1
self.dprep_dict = dprep_dict
self.daug_dict = daug_dict
self.interrupted = False
class FeedDictFlow(DataFlow):
""" FeedDictFlow.
Generate a stream of batches from a dataset. It uses two queues, one for
generating batch of data ids, and the other one to load data and apply pre
processing. If continuous is `True`, data flow will never ends until `stop`
is invoked, or `coord` interrupt threads.
Arguments:
feed_dict: `dict`. A TensorFlow formatted feed dict (with placeholders
as keys and data as values).
coord: `Coordinator`. A Tensorflow coordinator.
num_threads: `int`. Total number of simultaneous threads to process data.
max_queue: `int`. Maximum number of data stored in a queue.
shuffle: `bool`. If True, data will be shuffle.
continuous: `bool`. If True, when an epoch is over, same data will be
feeded again.
ensure_data_order: `bool`. Ensure that data order is keeped when using
'next' to retrieve data (Processing will be slower).
dprep_dict: dict. Optional data pre-processing parameter for performing
real time data pre-processing. Keys must be placeholders and values
`DataPreprocessing` subclass object.
daug_dict: dict. Optional data augmentation parameter for performing
real time data augmentation. Keys must be placeholders and values
`DataAugmentation` subclass object.
index_array: `list`. An optional list of index to be used instead of
using the whole dataset indexes (Useful for validation split).
"""
def __init__(self, feed_dict, coord, batch_size=128, num_threads=8,
max_queue=32, shuffle=False, continuous=False,
ensure_data_order=False, dprep_dict=None, daug_dict=None,
index_array=None):
super(FeedDictFlow, self).__init__(coord, num_threads, max_queue,
shuffle, continuous,
ensure_data_order,
dprep_dict,
daug_dict)
self.feed_dict = feed_dict
self.batch_size = batch_size
self.n_samples = len(utils.get_dict_first_element(feed_dict))
# Queue holding batch ids
self.batch_ids_queue = queue.Queue(self.max_queue)
# Queue holding data ready feed dicts
self.feed_dict_queue = queue.Queue(self.max_queue)
# Create samples index array
self.index_array = | np.arange(self.n_samples) | numpy.arange |
# -*- coding: utf-8 -*-
# Copyright (C) 2020. Huawei Technologies Co., Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base Tuner."""
import logging
import numpy as np
from .tuner_model import TunerModel
from .acquire_function import expected_improvement, thompson_sampling
LOG = logging.getLogger("vega.hpo")
class TunerBuilder(object):
"""A Base class for Tuner."""
def __init__(self, search_space, gridding=False, tuner='GP'):
"""Init TunerBuilder.
:param search_space: [SearchSpace]
:param gridding:
"""
self.min_count_score = 1
self.search_space = search_space
self.params = search_space.params()
self.tuner = tuner
self._init_model(tuner)
self._best_score = -1 * float('inf')
self._best_params = None
self.grid = gridding
self.feature_raw = None
self.label_raw = np.array([])
self.feature = np.array([])
self.label = np.array([])
self.fited = False
def _init_model(self, tuner_model):
"""Init model by tuner_model.
:param tuner_model:
:return:
"""
self.model = TunerModel(tuner_model, self.min_count_score, self.params)
if self.model is None:
LOG.error('Tuner model not exist, model=%s', tuner_model)
def add(self, feature, label):
"""Add feature and label to train model.
:param feature:
:param label:
:return:
"""
if ('RandSearch' in self.tuner) | ('GridSearch' in self.tuner):
LOG.info('%s not need to use add()', self.tuner)
return
if isinstance(feature, dict):
feature = [feature]
label = [label]
if len(feature) < 1:
LOG.warning('Function add() failed, len(feature)<1')
return
if len(feature) != len(label):
raise ValueError("The input hyperparameter list length is not "
"equal to the input score list length!")
self._add_feature_and_label(feature, label)
self.label_raw = np.append(self.label_raw, label)
# transform hyperparameter based on its dtype
feature_trans = | np.array([], dtype=np.float64) | numpy.array |
from abstract import AbstractRobotInterface
import gym
from gym import spaces
import numpy as np
import os
import pybullet as pb
import rospkg
import subprocess
from costar_task_plan.simulation.world import *
class TurtlebotInterface(AbstractRobotInterface):
'''
Defines action space for the Turtlebot mobile robot arm.
'''
xacro_filename = 'robot/create_circles_kinect.urdf.xacro'
urdf_filename = 'create_circles_kinect.urdf'
arm_name = "None"
gripper_name = "None"
base_name = "turtlebot"
left_wheel_index = 6
right_wheel_index = 7
def __init__(self, *args, **kwargs):
super(TurtlebotInterface, self).__init__(*args, **kwargs)
def load(self):
'''
This is an example of a function that allows you to load a robot from
file based on command line arguments. It just needs to find the
appropriate directory, use xacro to create a temporary robot urdf,
and then load that urdf with PyBullet.
'''
rospack = rospkg.RosPack()
path = rospack.get_path('costar_simulation')
filename = os.path.join(path, self.xacro_filename)
urdf_filename = os.path.join(path, 'robot', self.urdf_filename)
urdf = open(urdf_filename, "w")
# Recompile the URDF to make sure it's up to date
subprocess.call(['rosrun', 'xacro', 'xacro.py', filename], stdout=urdf)
self.handle = pb.loadURDF(urdf_filename)
self.grasp_idx = self.findGraspFrame()
#self.loadKinematicsFromURDF(urdf_filename, "base_link")
return self.handle
def mobile(self):
return True
def getState(self):
(pos, rot) = pb.getBasePositionAndOrientation(self.handle)
return SimulationRobotState(robot=self,
base_pos=pos,
base_rot=rot)
def place(self, pos, rot, joints):
pass
def arm(self, cmd, mode=pb.POSITION_CONTROL):
pass
def gripperCloseCommand(cls):
'''
Return the closed position for this gripper.
'''
return | np.array([0.0]) | numpy.array |
import numpy as np
import pandas as pd
from numpy.testing import assert_, assert_equal, assert_allclose, assert_raises
from statsmodels.tsa.arima import specification, params
def test_init():
# Test initialization of the params
# Basic test, with 1 of each parameter
exog = pd.DataFrame([[0]], columns=['a'])
spec = specification.SARIMAXSpecification(
exog=exog, order=(1, 1, 1), seasonal_order=(1, 1, 1, 4))
p = params.SARIMAXParams(spec=spec)
# Test things copied over from spec
assert_equal(p.spec, spec)
assert_equal(p.exog_names, ['a'])
assert_equal(p.ar_names, ['ar.L1'])
assert_equal(p.ma_names, ['ma.L1'])
assert_equal(p.seasonal_ar_names, ['ar.S.L4'])
assert_equal(p.seasonal_ma_names, ['ma.S.L4'])
assert_equal(p.param_names, ['a', 'ar.L1', 'ma.L1', 'ar.S.L4', 'ma.S.L4',
'sigma2'])
assert_equal(p.k_exog_params, 1)
assert_equal(p.k_ar_params, 1)
assert_equal(p.k_ma_params, 1)
assert_equal(p.k_seasonal_ar_params, 1)
assert_equal(p.k_seasonal_ma_params, 1)
assert_equal(p.k_params, 6)
# Initial parameters should all be NaN
assert_equal(p.params, np.nan)
assert_equal(p.ar_params, [np.nan])
| assert_equal(p.ma_params, [np.nan]) | numpy.testing.assert_equal |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Tests for algorithms related to association rules.
"""
from paretoset.algorithms_numpy import paretoset_naive, paretoset_efficient, pareto_rank_naive, crowding_distance
from paretoset.algorithms_numba import paretoset_jit, BNL
import pytest
import numpy as np
import itertools
seeds = list(range(99))
dtypes = [np.float, np.int]
bools = [True, False]
paretoset_algorithms = [paretoset_naive, paretoset_efficient, paretoset_jit, BNL]
paretorank_algorithms = [pareto_rank_naive]
def generate_problem_simplex(n, d):
"""Generate D dimensional data on the D-1 dimensional simplex."""
# https://cs.stackexchange.com/questions/3227/uniform-sampling-from-a-simplex
data = np.random.randn(n, d - 1)
data = np.hstack((np.zeros(n).reshape(-1, 1), data, np.ones(n).reshape(-1, 1)))
diffs = data[:, 1:] - data[:, :-1]
assert diffs.shape == (n, d)
return diffs
class TestParetoSetImplementations:
@pytest.mark.parametrize("seed, dtype, distinct", itertools.product(seeds, dtypes, bools))
def test_on_random_instances(self, seed, dtype, distinct):
"""Test that the algorithms all return the same answer."""
# Generate a random instance
np.random.seed(seed)
n_costs = np.random.randint(1, 99)
n_objectives = np.random.randint(1, 4)
costs = np.random.randn(n_costs, n_objectives)
# Convert to dtype, this creates some duplicates when `dtype` is integer
costs = np.array(costs, dtype=dtype)
# Compute the answers
masks = [algo(costs, distinct=distinct) for algo in paretoset_algorithms]
# At least one element must be in the mask
assert all(np.sum(m) > 0 for m in masks)
# Check pairwise that the answers are identical
for m1, m2 in zip(masks[:-1], masks[1:]):
assert np.all(m1 == m2)
@pytest.mark.parametrize("algorithm", paretoset_algorithms)
def test_case_distinct_1(self, algorithm):
"""Test the `distinct` parameter on a simple example"""
costs = np.array([[1, 1], [0, 1], [0, 1]])
ranks_distinct = algorithm(costs, distinct=True)
assert np.all(ranks_distinct == np.array([False, True, False]))
ranks_non_distinct = algorithm(costs, distinct=False)
assert np.all(ranks_non_distinct == np.array([False, True, True]))
@pytest.mark.parametrize("algorithm", paretoset_algorithms)
def test_case_distinct_2(self, algorithm):
"""Test the `distinct` parameter on a simple example"""
costs = np.array([[1, 0], [1, 0], [0, 1], [0, 1]])
ranks_distinct = algorithm(costs, distinct=True)
assert np.all(ranks_distinct == np.array([True, False, True, False]))
ranks_non_distinct = algorithm(costs, distinct=False)
assert np.all(ranks_non_distinct)
@pytest.mark.parametrize("algorithm", paretoset_algorithms)
def test_case_distinct_3(self, algorithm):
"""Test the `distinct` parameter on a simple example"""
costs = np.array([[1, 0], [1, 0], [0, 1], [0, 1], [0.5, 0.5], [0.5, 0.5]])
ranks_distinct = algorithm(costs, distinct=True)
assert np.all(ranks_distinct == np.array([True, False, True, False, True, False]))
ranks_non_distinct = algorithm(costs, distinct=False)
assert np.all(ranks_non_distinct)
@pytest.mark.parametrize("algorithm", paretoset_algorithms)
def test_case_distinct_4(self, algorithm):
"""Test the `distinct` parameter on a simple example"""
costs = np.array([[0], [0]])
ranks_distinct = algorithm(costs, distinct=True)
assert np.all(ranks_distinct == np.array([True, False]))
ranks_non_distinct = algorithm(costs, distinct=False)
assert np.all(ranks_non_distinct)
@pytest.mark.parametrize("seed, algorithm", itertools.product(seeds, paretoset_algorithms))
def test_invariance_under_permutations(self, seed, algorithm):
"""Test that the algorithm in invariant under random permutations of data."""
# Create some random data
np.random.seed(seed)
n_costs = np.random.randint(1, 9)
n_objectives = np.random.randint(1, 4)
costs = np.random.randint(low=-1, high=1, size=(n_costs, n_objectives))
# Get masks
mask_distinct = algorithm(costs, distinct=True)
ranks_non_distinct = algorithm(costs, distinct=False)
# Permute the data
permutation = np.random.permutation(np.arange(n_costs))
assert np.sum(mask_distinct) > 0
assert np.sum(ranks_non_distinct) > 0
# When `distinct` is set to `False`, permutation invariance should hold
assert np.all(ranks_non_distinct[permutation] == algorithm(costs[permutation], distinct=False))
# Equally many should me marked in the mask, regardless of `distinct` or not
assert np.sum(mask_distinct[permutation]) == np.sum(algorithm(costs[permutation], distinct=True))
assert np.sum(ranks_non_distinct[permutation]) == np.sum(algorithm(costs[permutation], distinct=False))
@pytest.mark.parametrize("seed, algorithm", itertools.product(seeds, paretoset_algorithms))
def test_equal_values(self, seed, algorithm):
"""For each group of identical data in the Pareto set: if `distinct`,
the first index should be True and everything else False.
If not `distinct`, the group should all be True.
"""
# Generate random data
np.random.seed(seed)
n_costs = np.random.randint(1, 99)
n_objectives = np.random.randint(1, 4)
costs = | np.random.randint(low=-1, high=1, size=(n_costs, n_objectives)) | numpy.random.randint |
# Tools for model testing and evaluation
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from copy import deepcopy
from sklearn.pipeline import Pipeline
from sklearn.preprocessing import StandardScaler
from sklearn.model_selection import KFold
from sklearn.metrics import r2_score, mean_absolute_error
def multi_min(arrays):
"""
Return the minimum scalar value of multiple arrays
Args:
arrays: list of numpy arrays
"""
mins = []
for arr in arrays:
mins.append(np.min(arr))
return min(mins)
def multi_max(arrays):
"""
Return the maximum scalar value of multiple arrays
Args:
arrays: list of numpy arrays
"""
maxs = []
for arr in arrays:
maxs.append(np.max(arr))
return max(maxs)
class repeating_KFold():
"""
KFold splitter that performs multiple independent splits of the dataset. For use with sklearn and mlxtend functions/classes that take a splitter object
Intended for use with shuffle=True to reduce bias for one particular train-test split
Args:
repeat: int, number of times to repeat
n_splits: number of splits
shuffle: if True, shuffle dataset before splitting
random_state: specify a random state for shuffle
"""
def __init__(self,repeat,n_splits,shuffle=True,random_state=None):
self.repeat = repeat
self.n_splits = n_splits
self.shuffle = shuffle
self.random_state = random_state
self.kf = KFold(n_splits,shuffle)
# set seeds for consistency if random state specified
if self.random_state is not None:
r = np.random.RandomState(self.random_state)
self.seeds = r.choice(np.arange(0,repeat*10,1),self.repeat,replace=False)
else:
self.seeds = [None]*self.repeat
def split(self,X,y=None,groups=None):
for n,seed in zip(range(self.repeat),self.seeds):
self.kf.random_state = seed
for train,test in self.kf.split(X,y,groups):
yield train,test
def get_n_splits(self,X=None,y=None,groups=None):
return self.n_splits*self.repeat
def KFold_cv(estimator,X,y,sample_weight=None,n_splits=5,pipeline_learner_step='auto',pred_int=0,random_state=None):
"""
Perform k-fold cross-validation
Args:
estimator: sklearn estimator instance
X: data matrix (nxm)
y: response (n-vector)
sample_weight: weights for fitting data. If None, defaults to equal weights
n_splits: number of folds. Default 5
pipeline_learner_step: if estimator is a Pipeline instance, index of the learner step
pred_int: prediction interval to calculate (i.e., 0.5 indicates 50% interval). If 0, do not calculate prediction interval
random_state: random state for KFold shuffle
Returns:
actual: acutal y values for test folds
pred: predicted y values for test folds
train_scores: list of training r2 scores
test_scores: list of test r2 scores
pred_lb: lower bound of prediction interval. Vector of zeros if pred_int==0
pred_ub: upper bound of prediction interval. Vector of zeros if pred_int==0
"""
if random_state is not None:
kf = KFold(n_splits,shuffle=True,random_state=random_state)
else:
kf = KFold(n_splits,shuffle=True)
if len(X)!=len(y):
raise ValueError('X and y must have same first dimension')
# if y is pandas series, convert to array. No info required from Series object
if type(y)==pd.core.series.Series:
y = y.values
train_scores = np.empty(n_splits)
test_scores = np.empty(n_splits)
actual = np.zeros_like(y)
pred = np.zeros_like(y)
pred_lb = np.zeros_like(y)
pred_ub = np.zeros_like(y)
for i, (train_index,test_index) in enumerate(kf.split(X)):
if type(X)==pd.core.frame.DataFrame:
X_train, X_test = X.iloc[train_index,:], X.iloc[test_index,:]
else:
X_train, X_test = X[train_index], X[test_index]
y_train, y_test = y[train_index], y[test_index]
if sample_weight is not None:
w_train = sample_weight[train_index]
w_test = sample_weight[test_index]
if sample_weight is not None:
if type(estimator)==Pipeline:
#if estimator is a Pipeline, need to specify name of learning step in fit_params for sample_weight
if pipeline_learner_step=='auto':
# determine which step is the learner based on existence of _estimator_type attribute
step_objects = [step[1] for step in estimator.steps]
objdirs = [dir(obj) for obj in step_objects]
learner_idx = np.where(['_estimator_type' in d for d in objdirs])[0]
if len(learner_idx)==1:
pipeline_learner_step = learner_idx[0]
else:
raise Exception("Can''t determine pipeline_learner_step. Must specify manually")
est_name = estimator.steps[pipeline_learner_step][0]
estimator.fit(X_train,y_train,**{f'{est_name}__sample_weight':w_train})
else:
estimator.fit(X_train,y_train,sample_weight=w_train)
train_scores[i] = estimator.score(X_train,y_train,sample_weight=w_train)
test_scores[i] = estimator.score(X_test,y_test,sample_weight=w_test)
else:
# not all estimators' fit() methods accept sample_weight arg - can't just pass None
estimator.fit(X_train,y_train)
train_scores[i] = estimator.score(X_train,y_train)
test_scores[i] = estimator.score(X_test,y_test)
actual[test_index] = y_test
pred[test_index] = estimator.predict(X_test)
if pred_int > 0:
pred_lb[test_index],pred_ub[test_index] = predict_interval(estimator,X_test,pred_int)
return actual, pred, train_scores, test_scores, pred_lb, pred_ub
def repeated_KFold_cv(estimator,X,y,repeat,sample_weight=None,n_splits=5,pipeline_learner_step='auto',pred_int=0,random_state=None):
"""
Perform k-fold cross-validation with multiple random splits
Args:
estimator: sklearn estimator instance
X: data matrix (nxm)
y: response (n-vector)
repeat: number of times to repeat KFold CV
sample_weight: weights for fitting data. If None, defaults to equal weights
n_splits: number of folds. Default 5
pipeline_learner_step: if estimator is a Pipeline instance, index of the learner step
pred_int: prediction interval to calculate (i.e., 0.5 indicates 50% interval). If 0, do not calculate prediction interval
random_state: random state for KFold shuffle
Returns:
actuals: list of actual y vectors for all CV repetitions
preds: list of predicted y vectors for all CV repetitions
agg_test_scores: list of r2 scores for all CV repetitions
agg_test_maes: list of MAEs for all CV repetitions
pred_lbs: list of prediction interval lower bounds for all CV repetitions. All zeros if pred_int==0
pred_ubs: list of prediction interval upper bounds for all CV repetitions. All zeros if pred_int==0
"""
actuals = np.empty((repeat,len(y)))
preds = np.empty_like(actuals)
agg_test_scores = np.empty(repeat)
agg_test_maes = np.empty(repeat)
pred_lbs = np.empty_like(actuals)
pred_ubs = np.empty_like(actuals)
# set seeds for consistency if specified
if random_state is not None:
r = np.random.RandomState(random_state)
seeds = r.choice(np.arange(0,repeat*10,1),repeat,replace=False)
else:
seeds = [None]*repeat
for n in range(repeat):
act,pred,train,test,lb,ub = KFold_cv(estimator,X,y,sample_weight,n_splits,pipeline_learner_step,pred_int,random_state=seeds[n])
agg_test_score = r2_score(act,pred,sample_weight=sample_weight)
agg_mae = mean_absolute_error(act,pred,sample_weight=sample_weight)
actuals[n] = act
preds[n] = pred
agg_test_scores[n] = agg_test_score
agg_test_maes[n] = agg_mae
pred_lbs[n] = lb
pred_ubs[n] = ub
return actuals, preds, agg_test_scores, agg_test_maes, pred_lbs, pred_ubs
def KFold_pva(estimator,X,y,sample_weight=None,n_splits=5,pipeline_learner_step='auto',random_state=None,ax=None,xerr=None,pred_int=0,show_metrics=['r2','mae'],text_kw={},s=10,line_kw={'zorder':0,'c':'#1f77b4'},**scatter_kw):
"""
Perform k-fold cross-validation and plot predicted vs. actual for test set
Args:
estimator: sklearn estimator instance
X: data matrix (nxm)
y: response (n-vector)
sample_weight: vector of sample weights. If None, equal weights assigned
n_splits: number of folds. Default 5
random_state: random state for KFold shuffle
ax: axis on which to plot
show_metrics: list of metrics to calculate and annotate on plot. Options: 'r2', 'mae'
text_kw: kwargs for metric text; passed to plt.text()
s: marker size
line_kw: kwargs for ideal x=y line
scatter_kw: kwargs to pass to plt.scatter()
Returns:
train_scores: k-array of train scores
test_scores: k-array of test scores
agg_test_score: overall test score (r2) considering all test folds together
"""
y, y_pred, train_scores, test_scores, pred_lb, pred_ub = KFold_cv(estimator,X,y,sample_weight,n_splits,pipeline_learner_step,pred_int,random_state)
agg_test_score = r2_score(y,y_pred,sample_weight=sample_weight)
if pred_int > 0:
yerr = np.array([y_pred-pred_lb,pred_ub-y_pred])
else:
yerr = None
ax = pred_v_act_plot(y,y_pred,sample_weight,ax,xerr,yerr,show_metrics,text_kw,s,line_kw,**scatter_kw)
return train_scores, test_scores, agg_test_score
def repeated_KFold_pva(estimator,X,y,repeat,plot_type='series',sample_weight=None,n_splits=5,pipeline_learner_step=1,random_state=None,
ax=None,xerr=None,pred_int=0,show_metrics=['r2','mae'],text_kw={},s=10,line_kw={'zorder':0,'c':'#1f77b4'},**scatter_kw):
"""
Perform k-fold cross-validation and plot predicted vs. actual for test set
Args:
estimator: sklearn estimator instance
X: data matrix (nxm)
y: response (n-vector)
repeat: number of times to repeat KFold CV
sample_weight: weights for fitting data. If None, defaults to equal weights
n_splits: number of folds. Default 5
pipeline_learner_step: if estimator is a Pipeline instance, index of the learner step
random_state: random state to determine random seeds for KFold shuffles
ax: axis on which to plot
show_metrics: list of metrics to calculate and annotate on plot. Options: 'r2', 'mae'
text_kw: kwargs for metric text; passed to plt.text()
s: marker size
scatter_kw: kwargs to pass to plt.scatter()
Returns:
train_scores: k-array of train scores
test_scores: k-array of test scores
tot_test_score: overall test score (r2) considering all test folds together
"""
actuals, preds, agg_test_scores, agg_test_maes, pred_lbs, pred_ubs = repeated_KFold_cv(estimator,X,y,repeat,sample_weight,n_splits,pipeline_learner_step,pred_int,random_state)
if plot_type=='series':
# plot each repetition as a separate series
for y,y_pred,lb,ub in zip(actuals, preds,pred_lbs,pred_ubs):
if pred_int > 0:
yerr = np.array([y_pred-lb,ub-y_pred])
else:
y_err = None
ax = pred_v_act_plot(y,y_pred,sample_weight,ax,xerr,yerr,show_metrics=None,text_kw=text_kw,s=s,line_kw=line_kw,**scatter_kw)
elif plot_type=='mean':
# average predicted values for each point across repetitions
y = np.mean(actuals,axis=0)
y_pred = np.mean(preds,axis=0)
if pred_int > 0:
pred_std = np.std(preds,axis=0)
lerr = np.mean(preds-pred_lbs,axis=0)
uerr = np.mean(pred_ubs-preds,axis=0)
# add the variance between CV repetitions to the prediction interval
yerr = np.array([(pred_std**2 + lerr**2)**0.5,(pred_std**2 + uerr**2)**0.5])
else:
yerr = None
ax = pred_v_act_plot(y,y_pred,sample_weight,ax,xerr,yerr,show_metrics=None,text_kw=text_kw,s=s,line_kw=line_kw,**scatter_kw)
# metrics need to be aggregated across repetitions
metric_txt = ''
for metric in show_metrics:
if metric=='r2':
metric_txt += '$r^2: \ {}$\n'.format(round(np.mean(agg_test_scores),3))
elif metric=='mae':
mae_scale = int(np.ceil(np.log10(np.mean(agg_test_maes))))
if mae_scale < 3:
mae_round = 3 - mae_scale
else:
mae_round = 0
metric_txt += 'MAE: {}\n'.format(round(np.mean(agg_test_maes),mae_round))
if len(metric_txt) > 0:
x = text_kw.pop('x',0.05)
y = text_kw.pop('y',0.95)
ax.text(x,y,metric_txt,transform=ax.transAxes,va='top',**text_kw)
return actuals, preds, agg_test_scores, agg_test_maes, pred_lbs, pred_ubs
def plot_pva(estimator,X,y,sample_weight=None,ax=None,xerr=None,pred_int=0,show_metrics=['r2','mae'],text_kw={},s=10,line_kw={'zorder':0,'c':'#1f77b4'},**scatter_kw):
"""
Plot predicted vs. actual for fitted estimator
Args:
estimator: fitted sklearn estimator instance
X: data matrix (nxm)
y: response (n-vector)
sample_weight: sample weights. Only used to calculate metrics (r2, mae)
ax: axis on which to plot
xerr: scalar or array of x (actual) errors/uncertainties
pred_int: if True, estimate and plot prediction intervals
show_metrics: list of metrics to calculate and annotate on plot. Options: 'r2', 'mae'
text_kw: kwargs for metric text; passed to plt.text()
s: marker size
"""
y_pred = estimator.predict(X)
if pred_int > 0:
lb,ub = predict_interval(estimator,X,pred_int)
yerr = np.array([y_pred-lb,ub-y_pred])
else:
yerr = None
ax = pred_v_act_plot(y,y_pred,sample_weight,ax,xerr,yerr,show_metrics,text_kw,s,line_kw,**scatter_kw)
def pred_v_act_plot(y,y_pred,sample_weight=None,ax=None,xerr=None,yerr=None,show_metrics=['r2','mae'],text_kw={},s=10,line_kw={'zorder':0,'c':'#1f77b4'},legend=True,**scatter_kw):
"""
Plot predicted vs. actual
Args:
y: actual values
y_pred: predictions
sample_weight: sample weights. Only used to calculate metrics (r2, mae)
ax: axis on which to plot
xerr: scalar or array of x (actual) errors/uncertainties
yerr: scalar or array of y (prediction) uncertainties
show_metrics: list of metrics to calculate and annotate on plot. Options: 'r2', 'mae'
text_kw: kwargs for metric text; passed to plt.text()
s: marker size
"""
if ax is None:
fig, ax = plt.subplots()
if xerr is None and yerr is None:
ax.scatter(y,y_pred,s=s,**scatter_kw)
else:
ax.errorbar(y,y_pred,xerr=xerr,yerr=yerr,ms=s,**scatter_kw)
axmin = multi_min([y,y_pred])
axmax = multi_max([y,y_pred])
ax.plot([axmin,axmax],[axmin,axmax],**line_kw,label='Ideal')
metric_txt = ''
if show_metrics is not None:
for metric in show_metrics:
if metric=='r2':
r2 = r2_score(y,y_pred,sample_weight=sample_weight)
metric_txt += '$r^2: \ {}$\n'.format(round(r2,3))
elif metric=='mae':
test_mae = mean_absolute_error(y,y_pred,sample_weight=sample_weight)
mae_scale = int(np.ceil(np.log10(test_mae)))
if mae_scale < 3:
mae_round = 3 - mae_scale
else:
mae_round = 0
metric_txt += 'MAE: {}\n'.format(round(test_mae,mae_round))
if len(metric_txt) > 0:
x = text_kw.pop('x',0.05)
y = text_kw.pop('y',0.95)
ax.text(x,y,metric_txt,transform=ax.transAxes,va='top',**text_kw)
ax.set_xlabel('Actual')
ax.set_ylabel('Predicted')
if legend:
ax.legend(loc='lower right')
return ax
def predict_interval(model, X, percent=0.682):
if type(X)==pd.core.frame.DataFrame:
X = X.values
y_pred = np.array([tree.predict(X) for tree in model.estimators_])
lper = 100*(0.5 - percent/2)
uper = 100*(0.5 + percent/2)
lb = np.percentile(y_pred,lper,axis=0)
ub = np.percentile(y_pred,uper,axis=0)
# y_pred = model.predict(X)
# lb = y_pred - 10
# ub = y_pred + 10
return lb,ub
class GridSearchRepeatedCV():
def __init__(self,estimator,param_grid):
self.estimator = deepcopy(estimator)
self.param_grid = param_grid
def fit(self,X,y,repeat,sample_weight=None,n_splits=5,pipeline_learner_step='auto',random_state=None):
meshgrid = np.meshgrid(*self.param_grid.values())
self.param_meshgrid_ = dict(zip(self.param_grid.keys(),meshgrid))
self.grid_scores_ = np.zeros_like(meshgrid[0],dtype='float')
self.grid_params_ = np.empty_like(meshgrid[0],dtype='object')
# iterate over parameter combinations
for idx, tmpvalue in | np.ndenumerate(meshgrid[0]) | numpy.ndenumerate |
# -*- coding: utf-8 -*-
"""Routines for multiple scattering. The first half of the module contains functions to explicitly compute the
coupling matrix entries. The second half of the module contains functions for the preparation of lookup tables that
are used to approximate the coupling matrices by interoplation."""
from numba import complex128,int64,jit
from scipy.signal.filter_design import bessel
from tqdm import tqdm
import matplotlib.pyplot as plt
import numpy as np
import scipy.interpolate
import scipy.special
import smuthi.coordinates as coord
import smuthi.cuda_sources as cu
import smuthi.field_expansion as fldex
import smuthi.layers as lay
import smuthi.spherical_functions as sf
import smuthi.vector_wave_functions as vwf
import sys
try:
import pycuda.autoinit
import pycuda.driver as drv
from pycuda import gpuarray
from pycuda.compiler import SourceModule
import pycuda.cumath
except:
pass
@jit(complex128(complex128[:], complex128[:]),
nopython=True, cache=True, nogil=True)
def numba_trapz(y, x):
out = 0.0 + 0.0j
#TODO implement some (optional) advanced summation?
#e.g. https://github.com/nschloe/accupy/blob/master/accupy/sums.py
#or better Sum2 from https://doi.org/10.1137/030601818 (Algorithm 4.4)
#Note, that this may need to have exact summation for x and y, and exact product.
for i in range( len(y) - 2 ):
out += (x[i+1]-x[i]) * (y[i+1] + y[i])/2.0
return out
@jit((complex128[:], complex128[:,:,:],
complex128[:,:,:,:],complex128[:,:,:], int64),
nopython=True,cache=True
,nogil=True
# , parallel=True
)
def eval_BeLBe(BeLBe, BeL, B1, ejkz, n2):
for k in range(len(BeLBe)):
for iplmn2 in range(2):
for pol in range(2):
BeLBe[k] += BeL[pol, iplmn2, k] * B1[pol, iplmn2, n2, k
] * ejkz[1, 1 - iplmn2, k]
def layer_mediated_coupling_block(vacuum_wavelength, receiving_particle, emitting_particle, layer_system,
k_parallel='default', show_integrand=False):
"""Layer-system mediated particle coupling matrix :math:`W^R` for two particles. This routine is explicit, but slow.
Args:
vacuum_wavelength (float): Vacuum wavelength :math:`\lambda` (length unit)
receiving_particle (smuthi.particles.Particle): Particle that receives the scattered field
emitting_particle (smuthi.particles.Particle): Particle that emits the scattered field
layer_system (smuthi.layers.LayerSystem): Stratified medium in which the coupling takes place
k_parallel (numpy ndarray): In-plane wavenumbers for Sommerfeld integral
If 'default', use smuthi.coordinates.default_k_parallel
show_integrand (bool): If True, the norm of the integrand is plotted.
Returns:
Layer mediated coupling matrix block as numpy array.
"""
if type(k_parallel) == str and k_parallel == 'default':
k_parallel = coord.default_k_parallel
omega = coord.angular_frequency(vacuum_wavelength)
# index specs
lmax1 = receiving_particle.l_max
mmax1 = receiving_particle.m_max
lmax2 = emitting_particle.l_max
mmax2 = emitting_particle.m_max
blocksize1 = fldex.blocksize(lmax1, mmax1)
blocksize2 = fldex.blocksize(lmax2, mmax2)
# cylindrical coordinates of relative position vectors
rs1 = np.array(receiving_particle.position)
rs2 = np.array(emitting_particle.position)
rs2s1 = rs1 - rs2
rhos2s1 = np.linalg.norm(rs2s1[0:2])
phis2s1 = np.arctan2(rs2s1[1], rs2s1[0])
is1 = layer_system.layer_number(rs1[2])
ziss1 = rs1[2] - layer_system.reference_z(is1)
is2 = layer_system.layer_number(rs2[2])
ziss2 = rs2[2] - layer_system.reference_z(is2)
# wave numbers
kis1 = omega * layer_system.refractive_indices[is1]
kis2 = omega * layer_system.refractive_indices[is2]
kzis1 = coord.k_z(k_parallel=k_parallel, k=kis1)
kzis2 = coord.k_z(k_parallel=k_parallel, k=kis2)
# phase factors
ejkz = np.zeros((2, 2, len(k_parallel)), dtype=complex) # indices are: particle, plus/minus, kpar_idx
ejkz[0, 0, :] = np.exp(1j * kzis1 * ziss1)
ejkz[0, 1, :] = np.exp(- 1j * kzis1 * ziss1)
ejkz[1, 0, :] = np.exp(1j * kzis2 * ziss2)
ejkz[1, 1, :] = np.exp(- 1j * kzis2 * ziss2)
# layer response
L = np.zeros((2, 2, 2, len(k_parallel)), dtype=complex) # polarization, pl/mn1, pl/mn2, kpar_idx
for pol in range(2):
L[pol, :, :, :] = lay.layersystem_response_matrix(pol, layer_system.thicknesses,
layer_system.refractive_indices, k_parallel, omega, is2, is1)
# transformation coefficients
B = [np.zeros((2, 2, blocksize1, len(k_parallel)), dtype=complex),
np.zeros((2, 2, blocksize2, len(k_parallel)), dtype=complex)]
# list index: particle, np indices: pol, plus/minus, n, kpar_idx
m_vec = [np.zeros(blocksize1, dtype=int), np.zeros(blocksize2, dtype=int)]
# precompute spherical functions
ct = kzis1 / kis1
st = k_parallel / kis1
_, pilm_list_pl, taulm_list_pl = sf.legendre_normalized(ct, st, lmax1)
_, pilm_list_mn, taulm_list_mn = sf.legendre_normalized(-ct, st, lmax1)
pilm = (pilm_list_pl, pilm_list_mn)
taulm = (taulm_list_pl, taulm_list_mn)
for tau in range(2):
for m in range(-mmax1, mmax1 + 1):
for l in range(max(1, abs(m)), lmax1 + 1):
n = fldex.multi_to_single_index(tau, l, m, lmax1, mmax1)
m_vec[0][n] = m
for iplmn in range(2):
for pol in range(2):
B[0][pol, iplmn, n, :] = vwf.transformation_coefficients_vwf(tau, l, m, pol, pilm_list=pilm[iplmn],
taulm_list=taulm[iplmn], dagger=True)
ct = kzis2 / kis2
st = k_parallel / kis2
_, pilm_list_pl, taulm_list_pl = sf.legendre_normalized(ct, st, lmax2)
_, pilm_list_mn, taulm_list_mn = sf.legendre_normalized(-ct, st, lmax2)
pilm = (pilm_list_pl, pilm_list_mn)
taulm = (taulm_list_pl, taulm_list_mn)
for tau in range(2):
for m in range(-mmax2, mmax2 + 1):
for l in range(max(1, abs(m)), lmax2 + 1):
n = fldex.multi_to_single_index(tau, l, m, lmax2, mmax2)
m_vec[1][n] = m
for iplmn in range(2):
for pol in range(2):
B[1][pol, iplmn, n, :] = vwf.transformation_coefficients_vwf(tau, l, m, pol, pilm_list=pilm[iplmn],
taulm_list=taulm[iplmn], dagger=False)
# bessel function and jacobi factor
bessel_list = []
for dm in range(lmax1 + lmax2 + 1):
bessel_list.append(scipy.special.jv(dm, k_parallel * rhos2s1))
jacobi_vector = k_parallel / (kzis2 * kis2)
m2_minus_m1 = m_vec[1] - m_vec[0][np.newaxis].T
wr_const = 4 * (1j) ** abs(m2_minus_m1) * np.exp(1j * m2_minus_m1 * phis2s1)
integral = np.zeros((blocksize1, blocksize2), dtype=complex)
for n1 in range(blocksize1):
BeL = np.zeros((2, 2, len(k_parallel)), dtype=complex) # indices are: pol, plmn2, n1, kpar_idx
for iplmn1 in range(2):
for pol in range(2):
BeL[pol, :, :] += (L[pol, iplmn1, :, :]
* B[0][pol, iplmn1, n1, :]
* ejkz[0, iplmn1, :])
for n2 in range(blocksize2):
bessel_full = bessel_list[abs(m_vec[0][n1] - m_vec[1][n2])]
BeLBe = np.zeros((len(k_parallel)), dtype=complex)
eval_BeLBe(BeLBe, BeL, B[1], ejkz, n2)
integrand = bessel_full * jacobi_vector * BeLBe
integral[n1,n2] = numba_trapz(integrand, k_parallel)
wr = wr_const * integral
return wr
def layer_mediated_coupling_matrix(vacuum_wavelength, particle_list, layer_system, k_parallel='default'):
"""Layer system mediated particle coupling matrix W^R for a particle collection in a layered medium.
Args:
vacuum_wavelength (float): Wavelength in length unit
particle_list (list of smuthi.particles.Particle obejcts: Scattering particles
layer_system (smuthi.layers.LayerSystem): The stratified medium
k_parallel (numpy.ndarray or str): In-plane wavenumber for Sommerfeld integrals.
If 'default', smuthi.coordinates.default_k_parallel
Returns:
Ensemble coupling matrix as numpy array.
"""
# indices
blocksizes = [fldex.blocksize(particle.l_max, particle.m_max) for particle in particle_list]
# initialize result
wr = np.zeros((sum(blocksizes), sum(blocksizes)), dtype=complex)
for s1, particle1 in enumerate(particle_list):
idx1 = np.array(range(sum(blocksizes[:s1]), sum(blocksizes[:s1]) + blocksizes[s1]))
for s2, particle2 in enumerate(particle_list):
idx2 = range(sum(blocksizes[:s2]), sum(blocksizes[:s2]) + blocksizes[s2])
wr[idx1[:, None], idx2] = layer_mediated_coupling_block(vacuum_wavelength, particle1, particle2,
layer_system, k_parallel)
return wr
def direct_coupling_block(vacuum_wavelength, receiving_particle, emitting_particle, layer_system):
"""Direct particle coupling matrix :math:`W` for two particles. This routine is explicit, but slow.
Args:
vacuum_wavelength (float): Vacuum wavelength :math:`\lambda` (length unit)
receiving_particle (smuthi.particles.Particle): Particle that receives the scattered field
emitting_particle (smuthi.particles.Particle): Particle that emits the scattered field
layer_system (smuthi.layers.LayerSystem): Stratified medium in which the coupling takes place
Returns:
Direct coupling matrix block as numpy array.
"""
omega = coord.angular_frequency(vacuum_wavelength)
# index specs
lmax1 = receiving_particle.l_max
mmax1 = receiving_particle.m_max
lmax2 = emitting_particle.l_max
mmax2 = emitting_particle.m_max
blocksize1 = fldex.blocksize(lmax1, mmax1)
blocksize2 = fldex.blocksize(lmax2, mmax2)
# initialize result
w = np.zeros((blocksize1, blocksize2), dtype=complex)
# check if particles are in same layer
rS1 = receiving_particle.position
rS2 = emitting_particle.position
iS1 = layer_system.layer_number(rS1[2])
iS2 = layer_system.layer_number(rS2[2])
if iS1 == iS2 and not emitting_particle == receiving_particle:
k = omega * layer_system.refractive_indices[iS1]
dx = rS1[0] - rS2[0]
dy = rS1[1] - rS2[1]
dz = rS1[2] - rS2[2]
d = np.sqrt(dx**2 + dy**2 + dz**2)
cos_theta = dz / d
sin_theta = np.sqrt(dx**2 + dy**2) / d
phi = np.arctan2(dy, dx)
# spherical functions
bessel_h = [sf.spherical_hankel(n, k * d) for n in range(lmax1 + lmax2 + 1)]
legendre, _, _ = sf.legendre_normalized(cos_theta, sin_theta, lmax1 + lmax2)
# the particle coupling operator is the transpose of the SVWF translation operator
# therefore, (l1,m1) and (l2,m2) are interchanged:
for m1 in range(-mmax1, mmax1 + 1):
for m2 in range(-mmax2, mmax2 + 1):
eimph = np.exp(1j * (m2 - m1) * phi)
for l1 in range(max(1, abs(m1)), lmax1 + 1):
for l2 in range(max(1, abs(m2)), lmax2 + 1):
A, B = complex(0), complex(0)
for ld in range(max(abs(l1 - l2), abs(m1 - m2)), l1 + l2 + 1): # if ld<abs(m1-m2) then P=0
a5, b5 = vwf.ab5_coefficients(l2, m2, l1, m1, ld)
A += a5 * bessel_h[ld] * legendre[ld][abs(m1 - m2)]
B += b5 * bessel_h[ld] * legendre[ld][abs(m1 - m2)]
A, B = eimph * A, eimph * B
for tau1 in range(2):
n1 = fldex.multi_to_single_index(tau1, l1, m1, lmax1, mmax1)
for tau2 in range(2):
n2 = fldex.multi_to_single_index(tau2, l2, m2, lmax2, mmax2)
if tau1 == tau2:
w[n1, n2] = A
else:
w[n1, n2] = B
return w
def direct_coupling_matrix(vacuum_wavelength, particle_list, layer_system):
"""Return the direct particle coupling matrix W for a particle collection in a layered medium.
Args:
vacuum_wavelength (float): Wavelength in length unit
particle_list (list of smuthi.particles.Particle obejcts: Scattering particles
layer_system (smuthi.layers.LayerSystem): The stratified medium
Returns:
Ensemble coupling matrix as numpy array.
"""
# indices
blocksizes = [fldex.blocksize(particle.l_max, particle.m_max)
for particle in particle_list]
# initialize result
w = np.zeros((sum(blocksizes), sum(blocksizes)), dtype=complex)
for s1, particle1 in enumerate(particle_list):
idx1 = np.array(range(sum(blocksizes[:s1]), sum(blocksizes[:s1+1])))
for s2, particle2 in enumerate(particle_list):
idx2 = range(sum(blocksizes[:s2]), sum(blocksizes[:s2+1]))
w[idx1[:, None], idx2] = direct_coupling_block(vacuum_wavelength, particle1, particle2, layer_system)
return w
def volumetric_coupling_lookup_table(vacuum_wavelength, particle_list, layer_system, k_parallel='default',
resolution=None):
"""Prepare Sommerfeld integral lookup table to allow for a fast calculation of the coupling matrix by interpolation.
This function is called when not all particles are on the same z-position.
Args:
vacuum_wavelength (float): Vacuum wavelength in length units
particle_list (list): List of particle objects
layer_system (smuthi.layers.LayerSystem): Stratified medium
k_parallel (numpy.ndarray or str): In-plane wavenumber for Sommerfeld integrals.
If 'default', smuthi.coordinates.default_k_parallel
resolution (float): Spatial resolution of lookup table in length units. (default: vacuum_wavelength / 100)
Smaller means more accurate but higher memory footprint
Returns:
(tuple): tuple containing:
w_pl (ndarray): Coupling lookup for z1 + z2, indices are [rho, z, n1, n2]. Includes layer mediated coupling.
w_mn (ndarray): Coupling lookup for z1 + z2, indices are [rho, z, n1, n2]. Includes layer mediated and
direct coupling.
rho_array (ndarray): Values for the radial distance considered for the lookup (starting from negative
numbers to allow for simpler cubic interpolation without distinction of cases
for lookup edges
sz_array (ndarray): Values for the sum of z-coordinates (z1 + z2) considered for the lookup
dz_array (ndarray): Values for the difference of z-coordinates (z1 - z2) considered for the lookup
"""
sys.stdout.write('Prepare 3D particle coupling lookup:\n')
sys.stdout.flush()
if resolution is None:
resolution = vacuum_wavelength / 100
sys.stdout.write('Setting lookup resolution to %f\n'%resolution)
sys.stdout.flush()
l_max = max([particle.l_max for particle in particle_list])
m_max = max([particle.m_max for particle in particle_list])
blocksize = fldex.blocksize(l_max, m_max)
particle_x_array = np.array([particle.position[0] for particle in particle_list])
particle_y_array = np.array([particle.position[1] for particle in particle_list])
particle_z_array = np.array([particle.position[2] for particle in particle_list])
particle_rho_array = np.sqrt((particle_x_array[:, None] - particle_x_array[None, :]) ** 2
+ (particle_y_array[:, None] - particle_y_array[None, :]) ** 2)
dz_min = particle_z_array.min() - particle_z_array.max()
dz_max = particle_z_array.max() - particle_z_array.min()
sz_min = 2 * particle_z_array.min()
sz_max = 2 * particle_z_array.max()
rho_array = np.arange(- 3 * resolution, particle_rho_array.max() + 3 * resolution, resolution)
sz_array = np.arange(sz_min - 3 * resolution, sz_max + 3 * resolution, resolution)
dz_array = np.arange(dz_min - 3 * resolution, dz_max + 3 * resolution, resolution)
len_rho = len(rho_array)
len_sz = len(sz_array)
len_dz = len(dz_array)
assert len_sz == len_dz
i_s = layer_system.layer_number(particle_list[0].position[2])
k_is = layer_system.wavenumber(i_s, vacuum_wavelength)
z_is = layer_system.reference_z(i_s)
# direct -----------------------------------------------------------------------------------------------------------
w = np.zeros((len_rho, len_dz, blocksize, blocksize), dtype=np.complex64)
sys.stdout.write('Lookup table memory footprint: ' + size_format(2 * w.nbytes) + '\n')
sys.stdout.flush()
r_array = np.sqrt(dz_array[None, :]**2 + rho_array[:, None]**2)
r_array[r_array==0] = 1e-20
ct = dz_array[None, :] / r_array
st = rho_array[:, None] / r_array
legendre, _, _ = sf.legendre_normalized(ct, st, 2 * l_max)
bessel_h = []
for dm in tqdm(range(2 * l_max + 1), desc='Spherical Hankel lookup ', file=sys.stdout,
bar_format='{l_bar}{bar}| elapsed: {elapsed} remaining: {remaining}'):
bessel_h.append(sf.spherical_hankel(dm, k_is * r_array))
pbar = tqdm(total=blocksize**2,
desc='Direct coupling ',
file=sys.stdout,
bar_format='{l_bar}{bar}| elapsed: {elapsed} remaining: {remaining}')
for m1 in range(-m_max, m_max+1):
for m2 in range(-m_max, m_max+1):
for l1 in range(max(1, abs(m1)), l_max + 1):
for l2 in range(max(1, abs(m2)), l_max + 1):
A = np.zeros((len_rho, len_dz), dtype=complex)
B = np.zeros((len_rho, len_dz), dtype=complex)
for ld in range(max(abs(l1 - l2), abs(m1 - m2)), l1 + l2 + 1): # if ld<abs(m1-m2) then P=0
a5, b5 = vwf.ab5_coefficients(l2, m2, l1, m1, ld) # remember that w = A.T
A += a5 * bessel_h[ld] * legendre[ld][abs(m1 - m2)] # remember that w = A.T
B += b5 * bessel_h[ld] * legendre[ld][abs(m1 - m2)] # remember that w = A.T
for tau1 in range(2):
n1 = fldex.multi_to_single_index(tau1, l1, m1, l_max, m_max)
for tau2 in range(2):
n2 = fldex.multi_to_single_index(tau2, l2, m2, l_max, m_max)
if tau1 == tau2:
w[:, :, n1, n2] = A
else:
w[:, :, n1, n2] = B
pbar.update()
pbar.close()
# switch off direct coupling contribution near rho=0:
w[rho_array < particle_rho_array[~np.eye(particle_rho_array.shape[0],dtype=bool)].min() / 2, :, :, :] = 0
# layer mediated ---------------------------------------------------------------------------------------------------
sys.stdout.write('Layer mediated coupling : ...')
sys.stdout.flush()
if type(k_parallel) == str and k_parallel == 'default':
k_parallel = coord.default_k_parallel
kz_is = coord.k_z(k_parallel=k_parallel, k=k_is)
len_kp = len(k_parallel)
# phase factors
epljksz = np.exp(1j * kz_is[None, :] * (sz_array[:, None] - 2 * z_is)) # z, k
emnjksz = np.exp(- 1j * kz_is[None, :] * (sz_array[:, None] - 2 * z_is))
epljkdz = np.exp(1j * kz_is[None, :] * dz_array[:, None])
emnjkdz = np.exp(- 1j * kz_is[None, :] * dz_array[:, None])
# layer response
L = np.zeros((2, 2, 2, len_kp), dtype=complex) # pol, pl/mn1, pl/mn2, kp
for pol in range(2):
L[pol, :, :, :] = lay.layersystem_response_matrix(pol, layer_system.thicknesses,
layer_system.refractive_indices, k_parallel,
coord.angular_frequency(vacuum_wavelength), i_s, i_s)
# transformation coefficients
B_dag = np.zeros((2, 2, blocksize, len_kp), dtype=complex) # pol, pl/mn, n, kp
B = np.zeros((2, 2, blocksize, len_kp), dtype=complex) # pol, pl/mn, n, kp
ct_k = kz_is / k_is
st_k = k_parallel / k_is
_, pilm_pl, taulm_pl = sf.legendre_normalized(ct_k, st_k, l_max)
_, pilm_mn, taulm_mn = sf.legendre_normalized(-ct_k, st_k, l_max)
m_list = [None for i in range(blocksize)]
for tau in range(2):
for m in range(-m_max, m_max + 1):
for l in range(max(1, abs(m)), l_max + 1):
n = fldex.multi_to_single_index(tau, l, m, l_max, m_max)
m_list[n] = m
for pol in range(2):
B_dag[pol, 0, n, :] = vwf.transformation_coefficients_vwf(tau, l, m, pol, pilm_list=pilm_pl,
taulm_list=taulm_pl, dagger=True)
B_dag[pol, 1, n, :] = vwf.transformation_coefficients_vwf(tau, l, m, pol, pilm_list=pilm_mn,
taulm_list=taulm_mn, dagger=True)
B[pol, 0, n, :] = vwf.transformation_coefficients_vwf(tau, l, m, pol, pilm_list=pilm_pl,
taulm_list=taulm_pl, dagger=False)
B[pol, 1, n, :] = vwf.transformation_coefficients_vwf(tau, l, m, pol, pilm_list=pilm_mn,
taulm_list=taulm_mn, dagger=False)
# pairs of (n1, n2), listed by abs(m1-m2)
n1n2_combinations = [[] for dm in range(2*m_max+1)]
for n1 in range(blocksize):
m1 = m_list[n1]
for n2 in range(blocksize):
m2 = m_list[n2]
n1n2_combinations[abs(m1-m2)].append((n1,n2))
wr_pl = np.zeros((len_rho, len_dz, blocksize, blocksize), dtype=np.complex64)
wr_mn = np.zeros((len_rho, len_dz, blocksize, blocksize), dtype=np.complex64)
dkp = np.diff(k_parallel)
if cu.use_gpu:
re_dkp_d = gpuarray.to_gpu(np.float32(dkp.real))
im_dkp_d = gpuarray.to_gpu(np.float32(dkp.imag))
kernel_source_code = cu.volume_lookup_assembly_code %(blocksize, len_rho, len_sz, len_kp)
helper_function = SourceModule(kernel_source_code).get_function("helper")
cuda_blocksize = 128
cuda_gridsize = (len_rho * len_sz + cuda_blocksize - 1) // cuda_blocksize
re_dwr_d = gpuarray.to_gpu(np.zeros((len_rho, len_sz), dtype=np.float32))
im_dwr_d = gpuarray.to_gpu(np.zeros((len_rho, len_sz), dtype=np.float32))
pbar = tqdm(total=blocksize**2,
desc='Layer mediated coupling ',
file=sys.stdout,
bar_format='{l_bar}{bar}| elapsed: {elapsed} remaining: {remaining}')
for dm in range(2*m_max+1):
bessel = scipy.special.jv(dm, (k_parallel[None,:]*rho_array[:,None]))
besjac = bessel * (k_parallel / (kz_is * k_is))[None,:]
for n1n2 in n1n2_combinations[dm]:
n1 = n1n2[0]
m1 = m_list[n1]
n2 = n1n2[1]
m2 = m_list[n2]
belbee_pl = np.zeros((len_dz, len_kp), dtype=complex)
belbee_mn = np.zeros((len_dz, len_kp), dtype=complex)
for pol in range(2):
belbee_pl += ((L[pol, 0, 1, :] * B_dag[pol, 0, n1, :] * B[pol, 1, n2, :])[None, :] * epljksz
+ (L[pol, 1, 0, :] * B_dag[pol, 1, n1, :] * B[pol, 0, n2, :])[None, :] * emnjksz)
belbee_mn += ((L[pol, 0, 0, :] * B_dag[pol, 0, n1, :] * B[pol, 0, n2, :])[None, :] * epljkdz
+ (L[pol, 1, 1, :] * B_dag[pol, 1, n1, :] * B[pol, 1, n2, :])[None, :] * emnjkdz)
if cu.use_gpu:
re_belbee_pl_d = gpuarray.to_gpu(np.float32(belbee_pl[None, :, :].real))
im_belbee_pl_d = gpuarray.to_gpu(np.float32(belbee_pl[None, :, :].imag))
re_belbee_mn_d = gpuarray.to_gpu(np.float32(belbee_mn[None, :, :].real))
im_belbee_mn_d = gpuarray.to_gpu(np.float32(belbee_mn[None, :, :].imag))
re_besjac_d = gpuarray.to_gpu(np.float32(besjac[:, None, :].real))
im_besjac_d = gpuarray.to_gpu(np.float32(besjac[:, None, :].imag))
helper_function(re_besjac_d.gpudata, im_besjac_d.gpudata, re_belbee_pl_d.gpudata,
im_belbee_pl_d.gpudata, re_dkp_d.gpudata, im_dkp_d.gpudata, re_dwr_d.gpudata,
im_dwr_d.gpudata, block=(cuda_blocksize, 1, 1), grid=(cuda_gridsize, 1))
wr_pl[:, :, n1, n2] = 4 * (1j)**abs(m2 - m1) * (re_dwr_d.get() + 1j * im_dwr_d.get())
helper_function(re_besjac_d.gpudata, im_besjac_d.gpudata, re_belbee_mn_d.gpudata,
im_belbee_mn_d.gpudata, re_dkp_d.gpudata, im_dkp_d.gpudata, re_dwr_d.gpudata,
im_dwr_d.gpudata, block=(cuda_blocksize, 1, 1), grid=(cuda_gridsize, 1))
wr_mn[:, :, n1, n2] = 4 * (1j)**abs(m2 - m1) * (re_dwr_d.get() + 1j * im_dwr_d.get())
else:
integrand = besjac[:, None, :] * belbee_pl[None, :, :]
wr_pl[:, :, n1, n2] = 2 * (1j)**abs(m2 - m1) * ((integrand[:, :, :-1] + integrand[:, :, 1:])
* dkp[None, None, :]).sum(axis=-1) # trapezoidal rule
integrand = besjac[:, None, :] * belbee_mn[None, :, :]
wr_mn[:, :, n1, n2] = 2 * (1j)**abs(m2 - m1) * ((integrand[:, :, :-1] + integrand[:, :, 1:])
* dkp[None, None, :]).sum(axis=-1)
pbar.update()
pbar.close()
return wr_pl, w + wr_mn, rho_array, sz_array, dz_array
def radial_coupling_lookup_table(vacuum_wavelength, particle_list, layer_system, k_parallel='default', resolution=None):
"""Prepare Sommerfeld integral lookup table to allow for a fast calculation of the coupling matrix by interpolation.
This function is called when all particles are on the same z-position.
Args:
vacuum_wavelength (float): Vacuum wavelength in length units
particle_list (list): List of particle objects
layer_system (smuthi.layers.LayerSystem): Stratified medium
k_parallel (numpy.ndarray or str): In-plane wavenumber for Sommerfeld integrals.
If 'default', smuthi.coordinates.default_k_parallel
resolution (float): Spatial resolution of lookup table in length units. (default: vacuum_wavelength / 100)
Smaller means more accurate but higher memory footprint
Returns:
(tuple) tuple containing:
lookup_table (ndarray): Coupling lookup, indices are [rho, n1, n2].
rho_array (ndarray): Values for the radial distance considered for the lookup (starting from negative
numbers to allow for simpler cubic interpolation without distinction of cases
at rho=0)
"""
sys.stdout.write('Prepare radial particle coupling lookup:\n')
sys.stdout.flush()
if resolution is None:
resolution = vacuum_wavelength / 100
sys.stdout.write('Setting lookup resolution to %f\n'%resolution)
sys.stdout.flush()
l_max = max([particle.l_max for particle in particle_list])
m_max = max([particle.m_max for particle in particle_list])
blocksize = fldex.blocksize(l_max, m_max)
x_array = np.array([particle.position[0] for particle in particle_list])
y_array = | np.array([particle.position[1] for particle in particle_list]) | numpy.array |
import pytest
from keras_contrib.losses import dice_loss
from keras_contrib.utils.test_utils import is_tf_keras
from keras import backend as K
import numpy as np
def test_dice_loss_shapes_scalar():
y_true = np.random.randn(3, 4)
y_pred = np.random.randn(3, 4)
L = dice_loss(
K.variable(y_true),
K.variable(y_pred), )
assert K.is_tensor(L), 'should be a Tensor'
assert L.shape == ()
assert K.eval(L).shape == ()
def test_dice_loss_for_same_array():
y_true = np.random.randn(3, 4)
y_pred = y_true.copy()
L = dice_loss(
K.variable(y_true),
K.variable(y_pred), )
assert K.eval(L) == 0, 'loss should be zero'
def test_dice_loss_for_zero_array():
y_true = | np.array([1]) | numpy.array |
import numpy as np
import cv2
import warnings
warnings.filterwarnings('ignore')
import matplotlib
matplotlib.use('Qt5Agg')
import matplotlib.pyplot as plt
import os
import scipy
import imageio
from scipy.ndimage import gaussian_filter1d, gaussian_filter
from sklearn import linear_model
from sklearn.model_selection import train_test_split
from matplotlib.colors import ListedColormap
import statsmodels.api as sm
import pandas as pd
from statsmodels.stats.anova import AnovaRM
from sklearn import linear_model
from helper_code.registration_funcs import model_arena, get_arena_details
from helper_code.processing_funcs import speed_colors
from helper_code.analysis_funcs import *
from important_code.shuffle_test import permutation_test, permutation_correlation
plt.rcParams.update({'font.size': 30})
def plot_traversals(self):
''' plot all traversals across the arena '''
# initialize parameters
sides = ['back', 'front']
# sides = ['back']
types = ['spontaneous'] #, 'evoked']
fast_color = np.array([.5, 1, .5])
slow_color = np.array([1, .9, .9])
edge_vector_color = np.array([1, .95, .85])
homing_vector_color = np.array([.725, .725, .725])
edge_vector_color = np.array([.98, .9, .6])**4
homing_vector_color = np.array([0, 0, 0])
non_escape_color = np.array([0,0,0])
condition_colors = [[.5,.5,.5], [.3,.5,.8], [0,.7,1]]
time_thresh = 15 #20 for ev comparison
speed_thresh = 2
p = 0
HV_cutoff = .681 # .5 for exploratory analysis
# initialize figures
fig, fig2, fig3, ax, ax2, ax3 = initialize_figures_traversals(self) #, types = len(types)+1)
# initialize lists for stats
all_data = []
all_conditions = []
edge_vector_time_all = np.array([])
# loop over spontaneous vs evoked
for t, type in enumerate(types):
# loop over experiments and conditions
for c, (experiment, condition) in enumerate(zip(self.experiments, self.conditions)):
strategies = [0, 0, 0]
# extract experiments from nested list
sub_experiments, sub_conditions = extract_experiments(experiment, condition)
# initialize the arena
arena, arena_color, scaling_factor, obstacle = initialize_arena(self, sub_experiments, sub_conditions)
path_ax, path_fig = get_arena_plot(obstacle, sub_conditions, sub_experiments)
# initialize edginess
all_traversals_edgy = {}
all_traversals_homy = {}
proportion_edgy = {}
for s in sides:
all_traversals_edgy[s] = []
all_traversals_homy[s] = []
proportion_edgy[s] = []
m = 0
# loop over each experiment and condition
for e, (experiment, condition) in enumerate(zip(sub_experiments, sub_conditions)):
# loop over each mouse in the experiment
for i, mouse in enumerate(self.analysis[experiment][condition]['back traversal']):
mouse_data = []
print(mouse)
# loop over back and front sides
for s, start in enumerate(sides):
if start == 'front' and type == 'evoked': continue
# find all the paths across the arena
traversal = self.analysis[experiment][condition][start + ' traversal'][mouse]
# get the duration of those paths
# duration = traversal[t*5+3]
if traversal:
if traversal[t*5]:
x_end_loc = np.array([x_loc[-1] * scaling_factor for x_loc in np.array(traversal[t * 5 + 0])[:, 0]])
if traversal[4] < 10: continue
number_of_edge_vectors = np.sum((np.array(traversal[t*5+3]) < speed_thresh) * \
(np.array(traversal[t*5+2]) > HV_cutoff) * \
# (abs(x_end_loc - 50) < 30) * \
(np.array(traversal[t*5+1]) < time_thresh*30*60) ) / min(traversal[4], time_thresh) * time_thresh
# print(traversal[4])
number_of_homing_vectors = np.sum((np.array(traversal[t*5+3]) < speed_thresh) * \
(np.array(traversal[t*5+2]) < HV_cutoff) * \
# (abs(x_end_loc - 50) < 30) * \
(np.array(traversal[t*5+1]) < time_thresh*30*60) )/ min(traversal[4], time_thresh) * time_thresh
all_traversals_edgy[start].append( number_of_edge_vectors )
all_traversals_homy[start].append(number_of_homing_vectors)
# print(number_of_edge_vectors)
mouse_data.append(number_of_edge_vectors)
# get the time of edge vectors
if condition == 'obstacle' and 'wall' in experiment:
edge_vector_idx = ( (np.array(traversal[t * 5 + 3]) < speed_thresh) * (np.array(traversal[t * 5 + 2]) > HV_cutoff) )
edge_vector_time = np.array(traversal[t*5+1])[edge_vector_idx] / 30 / 60
edge_vector_time_all = np.concatenate((edge_vector_time_all, edge_vector_time))
# prop_edgy = np.sum((np.array(traversal[t*5 + 3]) < speed_thresh) * \
# (np.array(traversal[t*5 + 2]) > HV_cutoff) * \
# (np.array(traversal[t * 5 + 1]) < time_thresh * 30 * 60)) / \
# np.sum((np.array(traversal[t * 5 + 3]) < speed_thresh) * \
# (np.array(traversal[t * 5 + 1]) < time_thresh * 30 * 60))
else:
all_traversals_edgy[start].append(0)
all_traversals_homy[start].append(0)
# if np.isnan(prop_edgy): prop_edgy = .5
# prop_edgy = prop_edgy / .35738
# proportion_edgy[start].append(prop_edgy)
traversal_coords = np.array(traversal[t*5+0])
pre_traversal = np.array(traversal[10])
else:
# all_traversals_edginess[start].append(0)
continue
m += .5
# loop over all paths
show = False
if show and traversal:
for trial in range(traversal_coords.shape[0]):
# make sure it qualifies
if traversal[t * 5 + 3][trial] > speed_thresh: continue
if traversal[t*5+1][trial] > time_thresh*30*60: continue
if not len(pre_traversal[0][0]): continue
# if abs(traversal_coords[trial][0][-1]*scaling_factor - 50) > 30: continue
# downsample to get even coverage
# if c == 2 and np.random.random() > (59 / 234): continue
# if c == 1 and np.random.random() > (59 / 94): continue
if traversal[t*5+2][trial]> HV_cutoff: plot_color = edge_vector_color
else: plot_color = homing_vector_color
display_traversal(scaling_factor, traversal_coords, pre_traversal, trial, path_ax, plot_color)
if mouse_data:
# all_data.append(mouse_data)
all_conditions.append(c)
# save image
path_fig.savefig(os.path.join(self.summary_plots_folder, self.labels[c] + ' traversals.eps'), format='eps', bbox_inches='tight', pad_inches=0)
# plot the data
if type == 'spontaneous' and len(sides) > 1:
plot_number_edgy = np.array(all_traversals_edgy['front']).astype(float) + np.array(all_traversals_edgy['back']).astype(float)
plot_number_homy = np.array(all_traversals_homy['front']).astype(float) + np.array(all_traversals_homy['back']).astype(float)
print(np.sum(plot_number_edgy + plot_number_homy))
# plot_proportion_edgy = (np.array(proportion_edgy['front']).astype(float) + np.array(proportion_edgy['back']).astype(float)) / 2
plot_proportion_edgy = plot_number_edgy / (plot_number_edgy + plot_number_homy)
all_data.append(plot_number_edgy)
else:
plot_number_edgy = np.array(all_traversals_edgy[sides[0]]).astype(float)
plot_number_homy = np.array(all_traversals_homy[sides[0]]).astype(float)
plot_proportion_edgy = plot_number_edgy / (plot_number_edgy + plot_number_homy)
# plot_proportion_edgy = np.array(proportion_edgy[sides[0]]).astype(float)
for i, (plot_data, ax0) in enumerate(zip([plot_number_edgy, plot_number_homy], [ax, ax3])): #, plot_proportion_edgy , ax2
print(plot_data)
print(np.sum(plot_data))
# plot each trial
# scatter_axis = scatter_the_axis( (p*4/3+.5/3), plot_data)
ax0.scatter(np.ones_like(plot_data)* (p*4/3+.5/3)* 3 - .2, plot_data, color=[0,0,0, .4], edgecolors='none', s=25, zorder=99)
# do kde
# if i==0: bw = .5
# else: bw = .02
bw = .5
kde = fit_kde(plot_data, bw=bw)
plot_kde(ax0, kde, plot_data, z=4 * p + .8, vertical=True, normto=.3, color=[.5, .5, .5], violin=False, clip=True)
ax0.plot([4 * p + -.2, 4 * p + -.2], [np.percentile(plot_data, 25), np.percentile(plot_data, 75)], color = [0,0,0])
ax0.plot([4 * p + -.4, 4 * p + -.0], [np.percentile(plot_data, 50), np.percentile(plot_data, 50)], color = [1,1,1], linewidth = 2)
# else:
# # kde = fit_kde(plot_data, bw=.03)
# # plot_kde(ax0, kde, plot_data, z=4 * p + .8, vertical=True, normto=1.2, color=[.5, .5, .5], violin=False, clip=True)
# bp = ax0.boxplot([plot_data, [0, 0]], positions=[4 * p + -.2, -10], showfliers=False, zorder=99)
# ax0.set_xlim([-1, 4 * len(self.experiments) - 1])
p+=1
# plot a stacked bar of strategies
# fig3 = plot_strategies(strategies, homing_vector_color, non_escape_color, edge_vector_color)
# fig3.savefig(os.path.join(self.summary_plots_folder, 'Traversal categories - ' + self.labels[c] + '.png'), format='png', bbox_inches = 'tight', pad_inches = 0)
# fig3.savefig(os.path.join(self.summary_plots_folder, 'Traversal categories - ' + self.labels[c] + '.eps'), format='eps', bbox_inches = 'tight', pad_inches = 0)
# make timing hist
plt.figure()
bins = np.arange(0,22.5,2.5)
plt.hist(edge_vector_time_all, bins = bins, color = [0,0,0], weights = np.ones_like(edge_vector_time_all) / 2.5 / m) #condition_colors[c])
plt.ylim([0,2.1])
plt.show()
# # save the plot
fig.savefig(os.path.join(self.summary_plots_folder, 'Traversal # EVS comparison.png'), format='png', bbox_inches='tight', pad_inches=0)
fig.savefig(os.path.join(self.summary_plots_folder, 'Traversal # EVS comparison.eps'), format='eps', bbox_inches='tight', pad_inches=0)
fig3.savefig(os.path.join(self.summary_plots_folder, 'Traversal # HVS comparison.png'), format='png', bbox_inches='tight', pad_inches=0)
fig3.savefig(os.path.join(self.summary_plots_folder, 'Traversal # HVS comparison.eps'), format='eps', bbox_inches='tight', pad_inches=0)
group_A = [[d] for d in all_data[0]]
group_B = [[d] for d in all_data[2]]
permutation_test(group_A, group_B, iterations = 100000, two_tailed = False)
group_A = [[d] for d in all_data[2]]
group_B = [[d] for d in all_data[1]]
permutation_test(group_A, group_B, iterations = 10000, two_tailed = True)
# fig2.savefig(os.path.join(self.summary_plots_folder, 'Traversal proportion edgy.png'), format='png', bbox_inches='tight', pad_inches=0)
# fig2.savefig(os.path.join(self.summary_plots_folder, 'Traversal proportion edgy.eps'), format='eps', bbox_inches='tight', pad_inches=0)
plt.show()
def plot_speed_traces(self, speed = 'absolute'):
''' plot the speed traces '''
max_speed = 60
# loop over experiments and conditions
for c, (experiment, condition) in enumerate(zip(self.experiments, self.conditions)):
# extract experiments from nested list
sub_experiments, sub_conditions = extract_experiments(experiment, condition)
# get the number of trials
number_of_trials = get_number_of_trials(sub_experiments, sub_conditions, self.analysis)
number_of_mice = get_number_of_mice(sub_experiments, sub_conditions, self.analysis)
RT, end_idx, scaling_factor, speed_traces, subgoal_speed_traces, time, time_axis, trial_num = \
initialize_variables(number_of_trials, self,sub_experiments)
# create custom colormap
colormap = speed_colormap(scaling_factor, max_speed, n_bins=256, v_min=0, v_max=max_speed)
# loop over each experiment and condition
for e, (experiment, condition) in enumerate(zip(sub_experiments, sub_conditions)):
# loop over each mouse
for i, mouse in enumerate(self.analysis[experiment][condition]['speed']):
# control analysis
if self.analysis_options['control'] and not mouse=='control': continue
if not self.analysis_options['control'] and mouse=='control': continue
# loop over each trial
for trial in range(len(self.analysis[experiment][condition]['speed'][mouse])):
if trial > 2: continue
trial_num = fill_in_trial_data(RT, condition, end_idx, experiment, mouse, scaling_factor, self,
speed_traces, subgoal_speed_traces, time, trial, trial_num)
# print some useful metrics
print_metrics(RT, end_idx, number_of_mice, number_of_trials)
# put the speed traces on the plot
fig = show_speed_traces(colormap, condition, end_idx, experiment, number_of_trials, speed, speed_traces, subgoal_speed_traces, time_axis, max_speed)
# save the plot
fig.savefig(os.path.join(self.summary_plots_folder,'Speed traces - ' + self.labels[c] + '.png'), format='png', bbox_inches = 'tight', pad_inches = 0)
fig.savefig(os.path.join(self.summary_plots_folder,'Speed traces - ' + self.labels[c] + '.eps'), format='eps', bbox_inches = 'tight', pad_inches = 0)
plt.show()
print('done')
def plot_escape_paths(self):
''' plot the escape paths '''
# initialize parameters
edge_vector_color = [np.array([1, .95, .85]), np.array([.98, .9, .6])**4]
homing_vector_color = [ np.array([.725, .725, .725]), np.array([0, 0, 0])]
non_escape_color = np.array([0,0,0])
fps = 30
escape_duration = 18 #6 #9 for food # 18 for U
min_distance_to_shelter = 30
HV_cutoff = 0.681 #.75 #.7
# initialize all data for stats
all_data = [[], [], [], []]
all_conditions = []
# loop over experiments and conditions
for c, (experiment, condition) in enumerate(zip(self.experiments, self.conditions)):
# extract experiments from nested list
sub_experiments, sub_conditions = extract_experiments(experiment, condition)
# initialize the arena
arena, arena_color, scaling_factor, obstacle = initialize_arena(self, sub_experiments, sub_conditions)
# more arena stuff for this analysis type
arena_reference = arena_color.copy()
arena_color[arena_reference == 245] = 255
get_arena_details(self, experiment=sub_experiments[0])
shelter_location = [s / scaling_factor / 10 for s in self.shelter_location]
# initialize strategy array
strategies = np.array([0,0,0])
path_ax, path_fig = get_arena_plot(obstacle, sub_conditions, sub_experiments)
# loop over each experiment and condition
for e, (experiment, condition) in enumerate(zip(sub_experiments, sub_conditions)):
if 'void' in experiment or 'dark' in experiment or ('off' in experiment and condition == 'no obstacle') or 'quick' in experiment:
escape_duration = 18
elif 'food' in experiment:
escape_duration = 9
else:
escape_duration = 12
# loop over each mouse
for i, mouse in enumerate(self.analysis[experiment][condition]['speed']):
print(mouse)
# control analysis
if self.analysis_options['control'] and not mouse=='control': continue
if not self.analysis_options['control'] and mouse=='control': continue
# color based on visual vs tactile obst avoidance
# if mouse == 'CA7190' or mouse == 'CA3210' or mouse == 'CA3155' or mouse == 'CA8100':
# edge_vector_color = [np.array([.6, .4, .99]),np.array([.6, .4, .99])]
# homing_vector_color = [np.array([.6, .4, .99]),np.array([.6, .4, .99])]
# else:
# edge_vector_color = [np.array([.8, .95, 0]),np.array([.8, .95, 0])]
# homing_vector_color = [np.array([.8, .95, 0]),np.array([.8, .95, 0])]
# show escape paths
show_escape_paths(HV_cutoff, arena, arena_color, arena_reference, c, condition, edge_vector_color, escape_duration, experiment, fps,
homing_vector_color, min_distance_to_shelter, mouse, non_escape_color, scaling_factor, self, shelter_location, strategies, path_ax,
determine_strategy = False) #('dark' in experiment and condition=='obstacle'))
# save image
# scipy.misc.imsave(os.path.join(self.summary_plots_folder, 'Escape paths - ' + self.labels[c] + '.png'), arena_color[:,:,::-1])
imageio.imwrite(os.path.join(self.summary_plots_folder, 'Escape paths - ' + self.labels[c] + '.png'), arena_color[:,:,::-1])
path_fig.savefig(os.path.join(self.summary_plots_folder, 'Escape plot - ' + self.labels[c] + '.png'), format='png', bbox_inches='tight', pad_inches=0)
path_fig.savefig(os.path.join(self.summary_plots_folder, 'Escape plot - ' + self.labels[c] + '.eps'), format='eps', bbox_inches='tight', pad_inches=0)
# plot a stacked bar of strategies
fig = plot_strategies(strategies, homing_vector_color, non_escape_color, edge_vector_color)
fig.savefig(os.path.join(self.summary_plots_folder, 'Escape categories - ' + self.labels[c] + '.png'), format='png', bbox_inches = 'tight', pad_inches = 0)
fig.savefig(os.path.join(self.summary_plots_folder, 'Escape categories - ' + self.labels[c] + '.eps'), format='eps', bbox_inches = 'tight', pad_inches = 0)
plt.show()
print('escape')
# strategies = np.array([4,5,0])
# fig = plot_strategies(strategies, homing_vector_color, non_escape_color, edge_vector_color)
# plt.show()
# fig.savefig(os.path.join(self.summary_plots_folder, 'Trajectory by previous edge-vectors 2.png'), format='png', bbox_inches='tight', pad_inches=0)
# fig.savefig(os.path.join(self.summary_plots_folder, 'Trajectory by previous edge-vectors 2.eps'), format='eps', bbox_inches='tight', pad_inches=0)
# group_A = [[0],[1],[0,0,0],[0,0],[0,1],[1,0],[0,0,0]]
# group_B = [[1,0,0],[0,0,0,0],[0,0,0],[1,0,0],[0,0,0]]
# permutation_test(group_B, group_A, iterations = 10000, two_tailed = False)
obstacle = [[0],[1],[0,0,0],[0,0],[0,1],[1],[0,0,0], [1]]
# obstacle_exp = [[0,1],[0,0,0,0,1],[0,1],[0]]
open_field = [[1,0,0,0,0],[0,0,0,0,0],[0,0,0,0],[1,0,0,0,0,0],[0,0,0,0,0,0],[0,0,0,0,0,0,0,0]]
# U_shaped = [[0,1],[1,1], [1,1], [0,0,1], [0,0,0], [0], [1], [0], [0,1], [0,1,0,0], [0,0,0]]
# permutation_test(open_field, obstacle, iterations = 10000, two_tailed = False)
# do same edgy homing then stop to both
obstacle = [[0],[1],[0,0,0],[0,0],[0,1],[1],[0,0,0], [1], [1], [0,0,0]]
open_field = [[1],[0,0,0],[0,0,0],[1,0,0],[0,0,0],[0,0,1]] #stop at 3 trials
# do same edgy homing then stop to both --> exclude non escapes
obstacle = [[0],[1],[0,0,0],[0],[0,1],[1],[0,0,0], [1], [1], [0,0,0]]
open_field = [[1],[0,0],[0,0,0],[1,0,0],[0,0,0],[0,1]] #stop at 3 trials
def plot_edginess(self):
# initialize parameters
fps = 30
escape_duration = 12 #9 #6
HV_cutoff = .681 #.681
ETD = 10 #10
traj_loc = 40
edge_vector_color = np.array([.98, .9, .6])**5
edge_vector_color = np.array([.99, .94, .6]) ** 3
# edge_vector_color = np.array([.99, .95, .6]) ** 5
homing_vector_color = np.array([0, 0, 0])
# homing_vector_color = np.array([.85, .65, .8])
# edge_vector_color = np.array([.65, .85, .7])
# colors for diff conditions
colors = [np.array([.7, 0, .3]), np.array([0, .8, .5])]
colors = [np.array([.3,.3,.3]), np.array([1, .2, 0]), np.array([0, .8, .4]), np.array([0, .7, .9])]
colors = [np.array([.3, .3, .3]), np.array([1, .2, 0]), np.array([.7, 0, .7]), np.array([0, .7, .9]), np.array([0,1,0])]
# colors = [np.array([0, 0, 0]), np.array([0, 0, 0]),np.array([0, 0, 0]), np.array([0, 0, 0])]
offset = [0,.2, .2, 0]
# initialize figures
fig, fig2, fig3, fig4, _, ax, ax2, ax3 = initialize_figures(self)
# initialize all data for stats
all_data = [[],[],[],[]]
all_conditions = []
mouse_ID = []; m = 1
dist_data_EV_other_all = []
delta_ICs, delta_x_end = [], []
time_to_shelter, was_escape = [], []
repetitions = 1
for rand_select in range(repetitions):
m = -1
# loop over experiments and conditions
for c, (experiment, condition) in enumerate(zip(self.experiments, self.conditions)):
num_trials_total = 0
num_trials_escape = 0
# extract experiments from nested list
sub_experiments, sub_conditions = extract_experiments(experiment, condition)
# get the number of trials
number_of_trials = get_number_of_trials(sub_experiments, sub_conditions, self.analysis)
number_of_mice = get_number_of_mice(sub_experiments, sub_conditions, self.analysis)
t_total = 0
# initialize array to fill in with each trial's data
edginess, end_idx, time_since_down, time_to_shelter, time_to_shelter_all, prev_edginess, scaling_factor, time_in_center, trial_num, _, _, dist_to_SH, dist_to_other_SH = \
initialize_variable_edginess(number_of_trials, self, sub_experiments)
mouse_ID_trial = edginess.copy()
# loop over each experiment and condition
for e, (experiment, condition) in enumerate(zip(sub_experiments, sub_conditions)):
if 'void' in experiment or 'dark' in experiment or ('off' in experiment and condition == 'no obstacle') or 'quick' in experiment:
escape_duration = 18
elif 'food' in experiment:
escape_duration = 12
else: escape_duration = 12
# elif 'up' in experiment and 'probe' in condition:
# escape_duration = 12
# loop over each mouse
for i, mouse in enumerate(self.analysis[experiment][condition]['start time']):
m+=1
# initialize mouse data for stats
mouse_data = [[],[],[],[]]
print(mouse)
skip_mouse = False
if self.analysis_options['control'] and not mouse=='control': continue
if not self.analysis_options['control'] and mouse=='control': continue
# loop over each trial
prev_homings = []
x_edges_used = []
t = 0
for trial in range(len(self.analysis[experiment][condition]['end time'][mouse])):
trial_num += 1
# impose conditions
if 'food' in experiment:
if t > 12: continue
if condition == 'no obstacle' and self.analysis[experiment][condition]['start time'][mouse][trial] < 20: continue
num_trials_total += 1
elif 'void' in experiment:
if t > 5: continue
else:
if t>2: continue
# if trial > 2: continue
num_trials_total += 1
# if trial!=2: continue
# if 'off' in experiment and trial: continue
# if trial < 3 and 'wall down' in experiment: continue
# if condition == 'obstacle' and not 'non' in experiment and \
# self.analysis[experiment][condition]['start time'][mouse][trial] < 20: continue
# if c == 0 and not (trial > 0): continue
# if c == 1 and not (trial): continue
# if c == 2 and not (trial == 0): continue
# if trial and ('lights on off' in experiment and not 'baseline' in experiment): continue
if 'Square' in experiment:
HV_cutoff = .56
HV_cutoff = 0
y_idx = self.analysis[experiment][condition]['path'][mouse][trial][1]
if y_idx[0] * scaling_factor > 50: continue
else:
# skip certain trials
y_start = self.analysis[experiment][condition]['path'][mouse][trial][1][0] * scaling_factor
x_start = self.analysis[experiment][condition]['path'][mouse][trial][0][0] * scaling_factor
# print(y_start)
# print(x_start)
if y_start > 25: continue
if abs(x_start-50) > 30: continue
end_idx[trial_num] = self.analysis[experiment][condition]['end time'][mouse][trial]
RT = self.analysis[experiment][condition]['RT'][mouse][trial]
if np.isnan(end_idx[trial_num]) or (end_idx[trial_num] > escape_duration * fps):
# if not ('up' in experiment and 'probe' in condition and not np.isnan(RT)):
# mouse_data[3].append(0)
continue
''' check for previous edgy homings '''
# if 'dark' in experiment or True:
# num_prev_edge_vectors, x_edge = get_num_edge_vectors(self, experiment, condition, mouse, trial)
# # print(num_prev_edge_vectors)
# if num_prev_edge_vectors and c: continue
# if not num_prev_edge_vectors and not c: continue
# if num_prev_edge_vectors < 3 and (c==0): continue
# if num_prev_edge_vectors > 0 and c < 4: continue
# if t>1 and c == 2: continue
# if num_prev_edge_vectors >= 2: print('prev edgy homing'); continue
# if x_edge in x_edges_used: print('prev edgy escape'); continue
#
# print('-----------' + mouse + '--------------')
#
# if self.analysis[experiment][condition]['edginess'][mouse][trial] <= HV_cutoff:
# print(' HV ')
# else:
# print(' EDGY ')
# # edgy trial has occurred
# print('EDGY TRIAL ' + str(trial))
# x_edges_used.append(x_edge)
#
# # select only *with* prev homings
# if not num_prev_edge_vectors:
# if not x_edge in x_edges_used:
# if self.analysis[experiment][condition]['edginess'][mouse][trial] > HV_cutoff:
# x_edges_used.append(x_edge)
# continue
# print(t)
num_trials_escape += 1
# add data
edginess[trial_num] = self.analysis[experiment][condition]['edginess'][mouse][trial]
time_since_down[trial_num] = np.sqrt((x_start - 50)**2 + (y_start - 50)**2 )# self.analysis[experiment][condition]['start angle'][mouse][trial]
print(edginess[trial_num])
if 'Square' in experiment:
if edginess[trial_num] <=-.3: # and False: #.15
edginess[trial_num] = np.nan
continue
# edginess to current edge as opposed to specific edge
if (('moves left' in experiment and condition == 'no obstacle') \
or ('moves right' in experiment and condition== 'obstacle')): # and False:
if edginess[trial_num] <= -0: # and False:
edginess[trial_num] = np.nan
continue
edginess[trial_num] = edginess[trial_num] - 1
# shelter edginess
if False:
y_pos = self.analysis[experiment][condition]['path'][mouse][trial][1][:int(end_idx[trial_num])] * scaling_factor
x_pos = self.analysis[experiment][condition]['path'][mouse][trial][0][:int(end_idx[trial_num])] * scaling_factor
# get the latter phase traj
y_pos_1 = 55
y_pos_2 = 65
x_pos_1 = x_pos[np.argmin(abs(y_pos - y_pos_1))]
x_pos_2 = x_pos[np.argmin(abs(y_pos - y_pos_2))]
#where does it end up
slope = (y_pos_2 - y_pos_1) / (x_pos_2 - x_pos_1)
intercept = y_pos_1 - x_pos_1 * slope
x_pos_proj = (80 - intercept) / slope
# compared to
x_pos_shelter_R = 40 #40.5 # defined as mean of null dist
# if 'long' in self.labels[c]:
# x_pos_shelter_R += 18
# compute the metric
shelter_edginess = (x_pos_proj - x_pos_shelter_R) / 18
edginess[trial_num] = -shelter_edginess
# if condition == 'obstacle' and 'left' in experiment:edginess[trial_num] = -edginess[trial_num] # for putting conditions together
# get previous edginess #TEMPORARY COMMENT
# if not t:
# SH_data = self.analysis[experiment][condition]['prev homings'][mouse][-1]
# time_to_shelter.append(np.array(SH_data[2]))
# was_escape.append(np.array(SH_data[4]))
if False: # or True:
time_to_shelter, SR = get_prev_edginess(ETD, condition, experiment, mouse, prev_edginess, dist_to_SH, dist_to_other_SH,
scaling_factor, self, traj_loc, trial, trial_num, edginess, delta_ICs, delta_x_end)
print(prev_edginess[trial_num])
print(trial + 1)
print('')
# get time in center
# time_in_center[trial_num] = self.analysis[experiment][condition]['time exploring obstacle'][mouse][trial]
# time_in_center[trial_num] = num_PORHVs
# if num_PORHVs <= 1:
# edginess[trial_num] = np.nan
# continue
# if (prev_edginess[trial_num] < HV_cutoff and not t) or skip_mouse:
# edginess[trial_num] = np.nan
# skip_mouse = True
# continue
''' qualify by prev homings '''
# if prev_edginess[trial_num] < .4: # and c:
# edginess[trial_num] = np.nan
# prev_edginess[trial_num] = np.nan
# continue
num_prev_edge_vectors, x_edge = get_num_edge_vectors(self, experiment, condition, mouse, trial, ETD = 10)
# print(str(num_prev_edge_vectors) + ' EVs')
#
# if not num_prev_edge_vectors >= 1 and c ==0:
# edginess[trial_num] = np.nan
# t+=1
# continue
# if not num_prev_edge_vectors < 1 and c ==1:
# edginess[trial_num] = np.nan
# t+=1
# continue
# print(num_prev_edge_vectors)
# if num_prev_edge_vectors !=0 and c==3:
# edginess[trial_num] = np.nan
# t+=1
# continue
# if num_prev_edge_vectors != 1 and c == 2:
# edginess[trial_num] = np.nan
# t += 1
# continue
# if num_prev_edge_vectors != 2 and num_prev_edge_vectors != 3 and c ==1:
# edginess[trial_num] = np.nan
# t += 1
# continue
#
# if num_prev_edge_vectors < 4 and c ==0:
# edginess[trial_num] = np.nan
# t += 1
# continue
#
# print(trial + 1)
# print(prev_edginess[trial_num])
# print(edginess[trial_num])
# print('')
# print(t)
# get time since obstacle removal?
# time_since_down[trial_num] = self.analysis[experiment][condition]['start time'][mouse][trial] - self.analysis[experiment]['probe']['start time'][mouse][0]
# add data for stats
mouse_data[0].append(int(edginess[trial_num] > HV_cutoff))
mouse_data[1].append(edginess[trial_num])
mouse_data[2].append(prev_edginess[trial_num])
mouse_data[3].append(self.analysis[experiment][condition]['start time'][mouse][trial] - self.analysis[experiment][condition]['start time'][mouse][0])
mouse_ID_trial[trial_num] = m
t += 1
t_total += 1
#append data for stats
if mouse_data[0]:
all_data[0].append(mouse_data[0])
all_data[1].append(mouse_data[1])
all_data[2].append(mouse_data[2])
all_data[3].append(mouse_data[3])
all_conditions.append(c)
mouse_ID.append(m); m+= 1
else:
print(mouse)
print('0 trials')
# get prev homings
time_to_shelter_all.append(time_to_shelter)
dist_data_EV_other_all = np.append(dist_data_EV_other_all, dist_to_other_SH[edginess > HV_cutoff])
# print(t_total)
''' plot edginess by condition '''
# get the data
# data = abs(edginess)
data = edginess
plot_data = data[~np.isnan(data)]
# print(np.percentile(plot_data, 25))
# print(np.percentile(plot_data, 50))
# print(np.percentile(plot_data, 75))
# print(np.mean(plot_data > HV_cutoff))
# plot each trial
scatter_axis = scatter_the_axis(c, plot_data)
ax.scatter(scatter_axis[plot_data>HV_cutoff], plot_data[plot_data>HV_cutoff], color=edge_vector_color[::-1], s=15, zorder = 99)
ax.scatter(scatter_axis[plot_data<=HV_cutoff], plot_data[plot_data<=HV_cutoff], color=homing_vector_color[::-1], s=15, zorder = 99)
bp = ax.boxplot([plot_data, [0,0]], positions = [3 * c - .2, -10], showfliers=False, zorder=99)
plt.setp(bp['boxes'], color=[.5,.5,.5], linewidth = 2)
plt.setp(bp['whiskers'], color=[.5,.5,.5], linewidth = 2)
plt.setp(bp['medians'], linewidth=2)
ax.set_xlim([-1, 3 * len(self.experiments) - 1])
# ax.set_ylim([-.1, 1.15])
ax.set_ylim([-.1, 1.3])
#do kde
try:
if 'Square' in experiment:
kde = fit_kde(plot_data, bw=.06)
plot_kde(ax, kde, plot_data, z=3*c + .3, vertical=True, normto=.8, color=[.5,.5,.5], violin=False, clip=False, cutoff = HV_cutoff+0.0000001, cutoff_colors = [homing_vector_color[::-1], edge_vector_color[::-1]])
ax.set_ylim([-1.5, 1.5])
else:
kde = fit_kde(plot_data, bw=.04)
plot_kde(ax, kde, plot_data, z=3*c + .3, vertical=True, normto=1.3, color=[.5,.5,.5], violin=False, clip=True, cutoff = HV_cutoff, cutoff_colors = [homing_vector_color[::-1], edge_vector_color[::-1]])
except: pass
# plot the polar plot or initial trajectories
# plt.figure(fig4.number)
fig4 = plt.figure(figsize=( 5, 5))
# ax4 = plt.subplot(1,len(self.experiments),len(self.experiments) - c, polar=True)
ax4 = plt.subplot(1, 1, 1, polar=True)
plt.axis('off')
ax.margins(0, 0)
ax.xaxis.set_major_locator(plt.NullLocator())
ax.yaxis.set_major_locator(plt.NullLocator())
ax4.set_xlim([-np.pi / 2 - .1, 0])
# ax4.set_xlim([-np.pi - .1, 0])
mean_value_color = max(0, min(1, np.mean(plot_data)))
mean_value_color = np.sum(plot_data > HV_cutoff) / len(plot_data)
mean_value = np.mean(plot_data)
value_color = mean_value_color * edge_vector_color[::-1] + (1 - mean_value_color) * homing_vector_color[::-1]
ax4.arrow(mean_value + 3 * np.pi / 2, 0, 0, 1.9, color=[abs(v)**1 for v in value_color], alpha=1, width = 0.05, linewidth=2)
ax4.plot([0, 0 + 3 * np.pi / 2], [0, 2.25], color=[.5,.5,.5], alpha=1, linewidth=1, linestyle = '--')
ax4.plot([0, 1 + 3 * np.pi / 2], [0, 2.25], color=[.5,.5,.5], alpha=1, linewidth=1, linestyle = '--')
# ax4.plot([0, -1 + 3 * np.pi / 2], [0, 2.25], color=[.5, .5, .5], alpha=1, linewidth=1, linestyle='--')
scatter_axis_EV = scatter_the_axis_polar(plot_data[plot_data > HV_cutoff], 2.25, 0) #0.05
scatter_axis_HV = scatter_the_axis_polar(plot_data[plot_data <= HV_cutoff], 2.25, 0)
ax4.scatter(plot_data[plot_data > HV_cutoff] + 3 * np.pi/2, scatter_axis_EV, s = 30, color=edge_vector_color[::-1], alpha = .8, edgecolors = None)
ax4.scatter(plot_data[plot_data <= HV_cutoff] + 3 * np.pi/2, scatter_axis_HV, s = 30, color=homing_vector_color[::-1], alpha=.8, edgecolors = None)
fig4.savefig(os.path.join(self.summary_plots_folder, 'Angle comparison - ' + self.labels[c] + '.png'), format='png', transparent=True, bbox_inches='tight', pad_inches=0)
fig4.savefig(os.path.join(self.summary_plots_folder, 'Angle comparison - ' + self.labels[c] + '.eps'), format='eps', transparent=True, bbox_inches='tight', pad_inches=0)
# print(len(plot_data))
if len(plot_data) > 1 and False: # or True:
''' plot the correlation '''
# do both prev homings and time in center # np.array(time_since_down) # 'Time since removal'
for plot_data_corr, fig_corr, ax_corr, data_label in zip([prev_edginess, time_in_center], [fig2, fig3], [ax2, ax3], ['Prior homings','Exploration']): #
plot_data_corr = plot_data_corr[~np.isnan(data)]
# plot data
ax_corr.scatter(plot_data_corr, plot_data, color=colors[c], s=60, alpha=1, edgecolors=colors[c]/2, linewidth=1) #color=[.5, .5, .5] #edgecolors=[.2, .2, .2]
# do correlation
r, p = scipy.stats.pearsonr(plot_data_corr, plot_data)
print(r, p)
# do linear regression
plot_data_corr, prediction = do_linear_regression(plot_data, plot_data_corr)
# plot linear regresssion
ax_corr.plot(plot_data_corr, prediction['Pred'].values, color=colors[c], linewidth=1, linestyle='--', alpha=.7) #color=[.0, .0, .0]
ax_corr.fill_between(plot_data_corr, prediction['lower'].values, prediction['upper'].values, color=colors[c], alpha=.075) #color=[.2, .2, .2]
fig_corr.savefig(os.path.join(self.summary_plots_folder, 'Edginess by ' + data_label + ' - ' + self.labels[c] + '.png'), format='png')
fig_corr.savefig(os.path.join(self.summary_plots_folder, 'Edginess by ' + data_label + ' - ' + self.labels[c] + '.eps'), format='eps')
# test correlation and stats thru permutation test
# data_x = list(np.array(all_data[2])[np.array(all_conditions) == c])
# data_y = list(np.array(all_data[1])[np.array(all_conditions) == c])
# permutation_correlation(data_x, data_y, iterations=10000, two_tailed=False, pool_all = True)
print(num_trials_escape)
print(num_trials_total)
print(num_trials_escape / num_trials_total)
# save the plot
fig.savefig(os.path.join(self.summary_plots_folder, 'Edginess comparison.png'), format='png', bbox_inches='tight', pad_inches=0)
fig.savefig(os.path.join(self.summary_plots_folder, 'Edginess comparison.eps'), format='eps', bbox_inches='tight', pad_inches=0)
# fig5.savefig(os.path.join(self.summary_plots_folder, 'Angle dist comparison.png'), format='png', bbox_inches='tight', pad_inches=0)
# fig5.savefig(os.path.join(self.summary_plots_folder, 'Angle dist comparison.eps'), format='eps', bbox_inches='tight', pad_inches=0)
plt.show()
time_to_shelter_all = np.concatenate(list(flatten(time_to_shelter_all))).astype(float)
np.percentile(time_to_shelter_all, 25)
np.percentile(time_to_shelter_all, 75)
group_A = list(np.array(all_data[0])[np.array(all_conditions) == 2])
group_B = list(np.array(all_data[0])[np.array(all_conditions) == 3])
permutation_test(group_A, group_B, iterations = 10000, two_tailed = False)
group_A = list(np.array(all_data[1])[(np.array(all_conditions) == 1) + (np.array(all_conditions) == 2)])
group_B = list(np.array(all_data[1])[np.array(all_conditions) == 3])
permutation_test(group_A, group_B, iterations = 10000, two_tailed = False)
import pandas
df = pandas.DataFrame(data={"mouse_id": mouse_ID, "condition": all_conditions, "x-data": all_data[2], "y-data": all_data[1]})
df.to_csv("./Foraging Path Types.csv", sep=',', index=False)
group_B = list(flatten(np.array(all_data[0])[np.array(all_conditions) == 1]))
np.sum(group_B) / len(group_B)
np.percentile(abs(time_since_down[edginess < HV_cutoff]), 50)
np.percentile(abs(time_since_down[edginess < HV_cutoff]), 25)
np.percentile(abs(time_since_down[edginess < HV_cutoff]), 75)
np.percentile(abs(time_since_down[edginess > HV_cutoff]), 50)
np.percentile(abs(time_since_down[edginess > HV_cutoff]), 25)
np.percentile(abs(time_since_down[edginess > HV_cutoff]), 75)
group_A = [[d] for d in abs(time_since_down[edginess > HV_cutoff])]
group_B = [[d] for d in abs(time_since_down[edginess < HV_cutoff])]
permutation_test(group_A, group_B, iterations=10000, two_tailed=True)
WE = np.concatenate(was_escape)
TTS_spont = np.concatenate(time_to_shelter)[~WE]
TTS_escape = np.concatenate(time_to_shelter)[WE]
trials = np.array(list(flatten(all_data[3])))
edgy = np.array(list(flatten(all_data[0])))
np.mean(edgy[trials == 0])
np.mean(edgy[trials == 1])
np.mean(edgy[trials == 2])
np.mean(edgy[trials == 3])
np.mean(edgy[trials == 4])
np.mean(edgy[trials == 5])
np.mean(edgy[trials == 6])
np.mean(edgy[trials == 7])
np.mean(edgy[trials == 8])
np.mean(edgy[trials == 9])
np.mean(edgy[trials == 10])
np.mean(edgy[trials == 11])
np.mean(edgy[trials == 12])
np.mean(edgy[trials == 13])
'''
TRADITIONAL METRICS
'''
def plot_metrics_by_strategy(self):
''' plot the escape paths '''
# initialize parameters
edge_vector_color = np.array([1, .95, .85])
homing_vector_color = np.array([.725, .725, .725])
non_escape_color = np.array([0,0,0])
ETD = 10#0
traj_loc = 40
fps = 30
# escape_duration = 12 #12 #9 #12 9 for food 12 for dark
HV_cutoff = .681 #.65
edgy_cutoff = .681
# loop over experiments and conditions
for c, (experiment, condition) in enumerate(zip(self.experiments, self.conditions)):
# extract experiments from nested list
sub_experiments, sub_conditions = extract_experiments(experiment, condition)
# get the number of trials
number_of_trials = get_number_of_trials(sub_experiments, sub_conditions, self.analysis)
number_of_mice = get_number_of_mice(sub_experiments, sub_conditions, self.analysis)
# initialize array to fill in with each trial's data
efficiency, efficiency_RT, end_idx, num_prev_homings_EV, duration_RT, duration, prev_edginess, edginess, _, _, _, _, \
_, _, _, _, _, scaling_factor, time, trial_num, trials, edginess, avg_speed, avg_speed_RT, peak_speed, RT, escape_speed, strategy = \
initialize_variables_efficiency(number_of_trials, self, sub_experiments)
mouse_id = efficiency.copy()
m = 0
# loop over each experiment and condition
for e, (experiment, condition) in enumerate(zip(sub_experiments, sub_conditions)):
# loop over each mouse
for i, mouse in enumerate(self.analysis[experiment][condition]['speed']):
print(mouse)
# control analysis
if self.analysis_options['control'] and not mouse=='control': continue
if not self.analysis_options['control'] and mouse=='control': continue
# loop across all trials
t = 0
for trial in range(len(self.analysis[experiment][condition]['end time'][mouse])):
if 'food' in experiment: escape_duration = 9
else: escape_duration = 12
trial_num += 1
# impose coniditions - escape duration
end_time = self.analysis[experiment][condition]['end time'][mouse][trial]
if np.isnan(end_time) or (end_time > (escape_duration * fps)): continue
# skip certain trials
y_start = self.analysis[experiment][condition]['path'][mouse][trial][1][0] * scaling_factor
x_start = self.analysis[experiment][condition]['path'][mouse][trial][0][0] * scaling_factor
# needs to start at top
if y_start > 25: continue
if abs(x_start - 50) > 30: continue
# get the strategy used
# edgy_escape = self.analysis[experiment][condition]['edginess'][mouse][trial] > edgy_cutoff
# is it a homing vector
# strategy_code = 0
# TEMPORARY COMMENTING
# if not edgy_escape:
# if self.analysis[experiment][condition]['edginess'][mouse][trial] < HV_cutoff: strategy_code = 0 # homing vector
# else: continue
# else:
# get the strategy used -- NUMBER OF PREVIOUS EDGE VECTOR HOMINGS
time_to_shelter, SR = get_prev_edginess(ETD, condition, experiment, mouse, prev_edginess, [], [],
scaling_factor, self, traj_loc, trial, trial_num, edginess, [], [])
if t > 2: continue
# if c == 0 and trial: continue
# if c == 1 and trial != 2: continue
t+=1
# if prev_edginess[trial_num] >= HV_cutoff: strategy_code = 1 # path learning
# elif prev_edginess[trial_num] < HV_cutoff: strategy_code = 2 # map-based
# else: continue
# how many prev homings to that edge: if 0, then map-based, if >1, then PL
if len(self.analysis[experiment]['probe']['start time'][mouse]):
edge_time = self.analysis[experiment]['probe']['start time'][mouse][0] - 1
else: edge_time = 19
edge_time = np.min((edge_time, self.analysis[experiment][condition]['start time'][mouse][trial]))
# print(edge_time)
num_edge_vectors, _ = get_num_edge_vectors(self, experiment, condition, mouse, trial, ETD=ETD, time_threshold=edge_time, other_side = False)
num_edge_vectors = get_num_homing_vectors(self, experiment, condition, mouse, trial, spontaneous = False, time_threshold = edge_time)
print(num_edge_vectors)
# if 'wall up' in experiment and 'no' in condition: num_edge_vectors = 0
# print(num_edge_vectors)
if False or True:
if num_edge_vectors == 1:
strategy_code = 1
# print('EV -- ' + mouse + ' - trial ' + str(trial))
elif num_edge_vectors == 0:
strategy_code = 0
# print('NO EV -- ' + mouse + ' - trial ' + str(trial))
else: continue
else:
strategy_code = 0
strategy[trial_num] = strategy_code
# add data for each metric
RT[trial_num] = self.analysis[experiment][condition]['RT'][mouse][trial]
avg_speed[trial_num] = np.mean(self.analysis[experiment][condition]['speed'][mouse][trial][10*fps : 10*fps+int(end_time)]) * scaling_factor * 30
avg_speed_RT[trial_num] = np.mean(self.analysis[experiment][condition]['speed'][mouse][trial][10*fps + int(RT[trial_num]*30) : 10*fps+int(end_time)]) * scaling_factor * 30
peak_speed[trial_num] = np.max(self.analysis[experiment][condition]['speed'][mouse][trial][10*fps : 10*fps+int(end_time)])*fps*scaling_factor
escape_speed[trial_num] = self.analysis[experiment][condition]['optimal path length'][mouse][trial] * scaling_factor / (end_time/30)
efficiency[trial_num] = np.min((1, self.analysis[experiment][condition]['optimal path length'][mouse][trial] / \
self.analysis[experiment][condition]['full path length'][mouse][trial]))
efficiency_RT[trial_num] = np.min((1, self.analysis[experiment][condition]['optimal RT path length'][mouse][trial] / \
self.analysis[experiment][condition]['RT path length'][mouse][trial]))
duration_RT[trial_num] = (end_time / fps - RT[trial_num]) / self.analysis[experiment][condition]['optimal RT path length'][mouse][trial] / scaling_factor * 100
duration[trial_num] = end_time / fps / self.analysis[experiment][condition]['optimal path length'][mouse][trial] / scaling_factor * 100
# duration[trial_num] = trial
# duration_RT[trial_num] = self.analysis[experiment][condition]['start time'][mouse][trial]
avg_speed[trial_num] = self.analysis[experiment][condition]['time exploring far (pre)'][mouse][trial] / 60
# add data for stats
mouse_id[trial_num] = m
m+=1
# for metric, data in zip(['Reaction time', 'Peak speed', 'Avg speed', 'Path efficiency - RT','Duration - RT', 'Duration'],\
# [RT, peak_speed, avg_speed_RT, efficiency_RT, duration_RT, duration]):
# for metric, data in zip(['Reaction time', 'Avg speed', 'Path efficiency - RT'], #,'Peak speed', 'Duration - RT', 'Duration'], \
# [RT, avg_speed_RT, efficiency_RT]): #peak_speed, , duration_RT, duration
for metric, data in zip(['Path efficiency - RT'], [efficiency_RT]):
# for metric, data in zip([ 'Duration - RT'],
# [ duration_RT]):
# for metric, data in zip(['trial', 'time', 'time exploring back'],
# [duration, duration_RT, avg_speed]):
# format data
x_data = strategy[~np.isnan(data)]
y_data = data[~np.isnan(data)]
if not c: OF_data = y_data
# make figure
fig, ax = plt.subplots(figsize=(11, 9))
plt.axis('off')
# ax.margins(0, 0)
ax.xaxis.set_major_locator(plt.NullLocator())
ax.yaxis.set_major_locator(plt.NullLocator())
# ax.set_title(metric)
if 'Reaction time' in metric:
ax.plot([-.75, 3], [0, 0], linestyle='--', color=[.5, .5, .5, .5])
ax.plot([-.75, 3], [1, 1], linestyle='--', color=[.5, .5, .5, .5])
ax.plot([-.75, 3], [2, 2], linestyle='--', color=[.5, .5, .5, .5])
ax.plot([-.75, 3], [3, 3], linestyle='--', color=[.5, .5, .5, .5])
ax.plot([-.75, 3], [4, 4], linestyle='--', color=[.5, .5, .5, .5])
elif 'Peak speed' in metric:
ax.plot([-.75, 3], [40, 40], linestyle='--', color=[.5, .5, .5, .5])
ax.plot([-.75, 3], [80, 80], linestyle='--', color=[.5, .5, .5, .5])
ax.plot([-.75, 3], [120, 120], linestyle='--', color=[.5, .5, .5, .5])
elif 'Avg speed' in metric:
ax.plot([-.75, 3], [25, 25], linestyle='--', color=[.5, .5, .5, .5])
ax.plot([-.75, 3], [50, 50], linestyle='--', color=[.5, .5, .5, .5])
ax.plot([-.75, 3], [75, 75], linestyle='--', color=[.5, .5, .5, .5])
ax.plot([-.75, 3], [0, 0], linestyle='--', color=[.5, .5, .5, .5])
elif 'Path efficiency' in metric:
ax.plot([-.75, 3], [.5,.5], linestyle='--', color=[.5, .5, .5, .5])
ax.plot([-.75, 3], [.75, .75], linestyle='--', color=[.5, .5, .5, .5])
ax.plot([-.75, 3], [1, 1], linestyle='--', color=[.5, .5, .5, .5])
elif 'Duration' in metric:
ax.plot([-.75, 3], [0, 0], linestyle='--', color=[.5, .5, .5, .5])
ax.plot([-.75, 3], [10, 10], linestyle='--', color=[.5, .5, .5, .5])
ax.plot([-.75, 3], [5, 5], linestyle='--', color=[.5, .5, .5, .5])
elif 'time' == metric:
ax.plot([-.75, 3], [0, 0], linestyle='--', color=[.5, .5, .5, .5])
ax.plot([-.75, 3], [10, 10], linestyle='--', color=[.5, .5, .5, .5])
ax.plot([-.75, 3], [20, 20], linestyle='--', color=[.5, .5, .5, .5])
ax.plot([-.75, 3], [30, 30], linestyle='--', color=[.5, .5, .5, .5])
ax.plot([-.75, 3], [40, 40], linestyle='--', color=[.5, .5, .5, .5])
elif 'exploring' in metric:
ax.plot([-.75, 3], [2.5, 2.5], linestyle='--', color=[.5, .5, .5, .5])
ax.plot([-.75, 3], [5.0, 5.0], linestyle='--', color=[.5, .5, .5, .5])
ax.plot([-.75, 3], [7.5, 7.5], linestyle='--', color=[.5, .5, .5, .5])
ax.plot([-.75, 3], [0, 0], linestyle='--', color=[.5, .5, .5, .5])
#initialize stats array
stats_data = [[], [], []]
# go thru each strategy
for s in [0,1,2]:
# format data
if not np.sum(x_data==s): continue
plot_data = y_data[x_data==s]
median = np.percentile(plot_data, 50);
third_quartile = np.percentile(plot_data, 75);
first_quartile = np.percentile(plot_data, 25)
# print(first_quartile)
# print(median)
# print(third_quartile)
# if 'Reaction' in metric: print(str(first_quartile), str(median), str(third_quartile))
IQR = third_quartile - first_quartile
# remove outliers
if not metric == 'trial':
outliers = abs(plot_data - median) > 2*IQR
# plot_data = plot_data[~outliers]
# plot all data
ax.scatter(np.ones_like(plot_data)*s, plot_data, color=[0,0,0], s=30, zorder = 99)
# plot kde
if 'efficiency' in metric: bw_factor = .02
elif 'speed' in metric or 'efficiency' in metric or metric == 'time': bw_factor = .04
elif 'exploring' in metric: bw_factor = .06
elif 'Duration' in metric: bw_factor = .07
else: bw_factor = .09
kde = fit_kde(plot_data, bw=np.median(y_data)*bw_factor)
plot_kde(ax, kde, plot_data, z= s + .1, vertical=True, normto=.4, color=[.75, .75, .75], violin=False, clip=True)
# plot errorbar
ax.errorbar(s - .15, median, yerr=np.array([[median - first_quartile], [third_quartile - median]]), color=[0, 0, 0], capsize=10, capthick=3, alpha=1, linewidth=3)
ax.scatter(s - .15, median, color=[0, 0, 0], s=175, alpha=1)
# print(len(plot_data))
# get mouse ids for stats
mouse_id_stats = mouse_id[~np.isnan(data)]
mouse_id_stats = mouse_id_stats[x_data==s]
if not metric == 'trial': mouse_id_stats = mouse_id_stats[~outliers]
# for m in np.unique(mouse_id_stats):
# stats_data[s].append( list(plot_data[mouse_id_stats==m]) )
print(metric)
# for ss in [[0,1]]: #, [0,2], [1,2]]:
# group_A = stats_data[ss[0]]
# group_B = stats_data[ss[1]]
# permutation_test(group_A, group_B, iterations=10000, two_tailed=True)
# save figure
fig.savefig(os.path.join(self.summary_plots_folder, metric + ' - ' + self.labels[c] + '.png'), format='png', bbox_inches='tight', pad_inches=0)
fig.savefig(os.path.join(self.summary_plots_folder, metric + ' - ' + self.labels[c] + '.eps'), format='eps', bbox_inches='tight', pad_inches=0)
plt.show()
plt.close('all')
group_A = [[e] for e in tr1_eff]
group_B = [[e] for e in tr3_eff]
group_C = [[e] for e in OF_eff]
permutation_test(group_A, group_B, iterations=10000, two_tailed=True)
permutation_test(group_A, group_C, iterations=10000, two_tailed=True)
permutation_test(group_B, group_C, iterations=10000, two_tailed=True)
'''
DIST OF TURN ANGLES
'''
# def plot_metrics_by_strategy(self):
# ''' plot the escape paths '''
#
# ETD = 10
# traj_loc = 40
#
# fps = 30
# escape_duration = 12
#
# colors = [[.3,.3,.3,.5], [.5,.5,.8, .5]]
#
# # make figure
# fig, ax = plt.subplots(figsize=(11, 9))
# fig2, ax2 = plt.subplots(figsize=(11, 9))
# # plt.axis('off')
# # ax.margins(0, 0)
# # ax.xaxis.set_major_locator(plt.NullLocator())
# # ax.yaxis.set_major_locator(plt.NullLocator())
# all_angles_pre = []
# all_angles_escape = []
#
#
# # loop over experiments and conditions
# for c, (experiment, condition) in enumerate(zip(self.experiments, self.conditions)):
# # extract experiments from nested list
# sub_experiments, sub_conditions = extract_experiments(experiment, condition)
# # get the number of trials
# number_of_trials = get_number_of_trials(sub_experiments, sub_conditions, self.analysis)
# number_of_mice = get_number_of_mice(sub_experiments, sub_conditions, self.analysis)
# # initialize array to fill in with each trial's data
# shape = self.analysis[sub_experiments[0]]['obstacle']['shape']
# scaling_factor = 100 / shape[0]
# turn_angles_pre = []
# turn_angles_escape = []
#
# # loop over each experiment and condition
# for e, (experiment, condition) in enumerate(zip(sub_experiments, sub_conditions)):
# # loop over each mouse
# for i, mouse in enumerate(self.analysis[experiment][condition]['speed']):
# print(mouse)
# # control analysis
# if self.analysis_options['control'] and not mouse=='control': continue
# if not self.analysis_options['control'] and mouse=='control': continue
# # loop across all trials
# t = 0
# for trial in range(len(self.analysis[experiment][condition]['end time'][mouse])):
# # impose coniditions - escape duration
# end_time = self.analysis[experiment][condition]['end time'][mouse][trial]
# if np.isnan(end_time) or (end_time > (escape_duration * fps)): continue
#
#
# ## COMMENT ONE OR THE OTHER IF TESTING PRE OR ESCAPE
# #pre
# # if trial < 2: continue
# # if t: continue
#
# # escape
# if t > 2: continue
#
# # skip certain trials
# y_start = self.analysis[experiment][condition]['path'][mouse][trial][1][0] * scaling_factor
# x_start = self.analysis[experiment][condition]['path'][mouse][trial][0][0] * scaling_factor
# # needs to start at top
# if y_start > 25: continue
# if abs(x_start - 50) > 30: continue
#
# turn_angles_pre.append(list(abs(np.array(self.analysis[experiment][condition]['prev movements'][mouse][trial][3])))) # >145
# turn_angles_escape.append(abs(self.analysis[experiment][condition]['movement'][mouse][trial][2])) # >145
# #
# # turn_angles_pre.append(list(np.array(self.analysis[experiment][condition]['prev movements'][mouse][trial][3])))
# # turn_angles_escape.append(self.analysis[experiment][condition]['movement'][mouse][trial][2])
#
# t += 1
#
#
#
# # format data
# hist_data_pre = np.array(list(flatten(turn_angles_pre)))
# hist_data_escape = np.array(list(flatten(turn_angles_escape)))
#
# # for permutation test
# # all_angles_pre.append(turn_angles_pre)
# # all_angles_escape.append([[tae] for tae in turn_angles_escape])
#
# ax.set_title('Prior movement angles')
# ax2.set_title('Escape movement angles')
# ax.plot([0, 0], [0, .4], linestyle='--', color=[.5, .5, .5, .5])
# ax.plot([90, 90],[0, .4], linestyle='--', color=[.5, .5, .5, .5])
# ax.plot([180, 180],[0, .4], linestyle='--', color=[.5, .5, .5, .5])
# ax2.plot([0, 0], [0, .4], linestyle='--', color=[.5, .5, .5, .5])
# ax2.plot([90, 90],[0, .4], linestyle='--', color=[.5, .5, .5, .5])
# ax2.plot([180, 180],[0, .4], linestyle='--', color=[.5, .5, .5, .5])
#
# # format data
# bin_width = 30
# hist_pre, n, _ = ax.hist(hist_data_pre, bins=np.arange(-0, 180+bin_width, bin_width), color=colors[c], weights = np.ones_like(hist_data_pre) * 1/ len(hist_data_pre))
# hist_escape, n, _ = ax2.hist(hist_data_escape, bins=np.arange(-0, 180+bin_width, bin_width), color=colors[c], weights = np.ones_like(hist_data_escape) * 1/ len(hist_data_escape))
#
# count_pre, n = np.histogram(hist_data_pre, bins=np.arange(-0, 180+bin_width, bin_width))
# count_escape, n = np.histogram(hist_data_escape, bins=np.arange(-0, 180+bin_width, bin_width))
#
# # for chi squared
# all_angles_pre.append(count_pre)
# all_angles_escape.append(count_escape)
#
#
# # save figure
# fig.savefig(os.path.join(self.summary_plots_folder, 'Prior Angle dist.png'), format='png', bbox_inches='tight', pad_inches=0)
# fig.savefig(os.path.join(self.summary_plots_folder, 'Prior Angle dist.eps'), format='eps', bbox_inches='tight', pad_inches=0)
# # save figure
# fig2.savefig(os.path.join(self.summary_plots_folder, 'Escape Angle dist.png'), format='png', bbox_inches='tight', pad_inches=0)
# fig2.savefig(os.path.join(self.summary_plots_folder, 'Escape Angle dist.eps'), format='eps', bbox_inches='tight', pad_inches=0)
#
# plt.show()
#
#
# scipy.stats.chi2_contingency(all_angles_pre)
# scipy.stats.chi2_contingency(all_angles_escape)
#
#
# group_A = all_angles_pre[0]
# group_B = all_angles_pre[1]
# permutation_test(group_A, group_B, iterations = 10000, two_tailed = True)
#
# group_A = all_angles_escape[0]
# group_B = all_angles_escape[1]
# permutation_test(group_A, group_B, iterations = 10000, two_tailed = True)
#
# plt.close('all')
#
# '''
# DIST OF EDGE VECTORS
# '''
# def plot_metrics_by_strategy(self):
# ''' plot the escape paths '''
#
# ETD = 10
# traj_loc = 40
#
# fps = 30
# escape_duration = 12
#
# dist_thresh = 5
# time_thresh = 20
#
# colors = [[.3,.3,.3,.5], [.5,.5,.8, .5]]
#
# # make figure
# fig1, ax1 = plt.subplots(figsize=(11, 9))
# fig2, ax2 = plt.subplots(figsize=(11, 9))
# # plt.axis('off')
# # ax.margins(0, 0)
# # ax.xaxis.set_major_locator(plt.NullLocator())
# # ax.yaxis.set_major_locator(plt.NullLocator())
# all_EVs = []
# all_HVs = []
#
#
# # loop over experiments and conditions
# for c, (experiment, condition) in enumerate(zip(self.experiments, self.conditions)):
# # extract experiments from nested list
# sub_experiments, sub_conditions = extract_experiments(experiment, condition)
# # get the number of trials
# number_of_trials = get_number_of_trials(sub_experiments, sub_conditions, self.analysis)
# number_of_mice = get_number_of_mice(sub_experiments, sub_conditions, self.analysis)
# # initialize array to fill in with each trial's data
# shape = self.analysis[sub_experiments[0]]['obstacle']['shape']
# scaling_factor = 100 / shape[0]
# EVs = []
# HVs = []
# edge_vector_time_exp = []
#
# # loop over each experiment and condition
# for e, (experiment, condition) in enumerate(zip(sub_experiments, sub_conditions)):
# # loop over each mouse
# for i, mouse in enumerate(self.analysis[experiment][condition]['speed']):
# print(mouse)
# # control analysis
# if self.analysis_options['control'] and not mouse=='control': continue
# if not self.analysis_options['control'] and mouse=='control': continue
# # just take the last trial
# trial = len(self.analysis[experiment][condition]['start time'][mouse])-1
# if trial < 0:
# if condition == 'obstacle':
# condition_use = 'no obstacle'
# trial = 0
# elif condition == 'no obstacle':
# condition_use = 'obstacle'
# trial = len(self.analysis[experiment][condition]['start time'][mouse])-1
# if mouse == 'CA7220': trial = 1 #compensate for extra vid
# else: condition_use = condition
#
# # get the prev homings
# SH_data = self.analysis[experiment][condition_use]['prev homings'][mouse][trial]
#
# # get their start time
# homing_time = np.array(SH_data[3])
# edge_vector_time_exp.append(list(homing_time))
#
# # get their x value
# SH_x = np.array(SH_data[0])
#
# # only use spontaneous
# stim_evoked = np.array(SH_data[4])
# SH_x = SH_x[~stim_evoked]
# homing_time = homing_time[~stim_evoked]
#
# # normalize to 20 min
# SH_x = SH_x[homing_time < time_thresh] / np.min((time_thresh, self.analysis[experiment][condition_use]['start time'][mouse][trial])) * 20
#
# # get number of edge vectors
# num_edge_vectors = np.sum(abs(SH_x - 25) < dist_thresh) + np.sum(abs(SH_x - 75) < dist_thresh)
# num_homing_vectors = np.sum(abs(SH_x - 50) < dist_thresh)
# print(num_edge_vectors)
#
#
# # get the prev anti homings
# anti_SH_data = self.analysis[experiment][condition_use]['prev anti-homings'][mouse][trial]
#
# # get their start time
# homing_time = np.array(anti_SH_data[3])
# edge_vector_time_exp.append(list(homing_time))
#
# # get their x value
# anti_SH_x = np.array(anti_SH_data[0])
#
# # limit to 20 min
# anti_SH_x = anti_SH_x[homing_time < time_thresh] / np.min((time_thresh, self.analysis[experiment][condition_use]['start time'][mouse][trial])) * 20
#
# # get number of edge vectors
# num_anti_edge_vectors = np.sum(abs(anti_SH_x - 25) < dist_thresh) + np.sum(abs(anti_SH_x - 75) < dist_thresh)
# num_anti_homing_vectors = np.sum(abs(anti_SH_x - 50) < dist_thresh)
# print(num_anti_edge_vectors)
#
# # append to list
# EVs.append(num_edge_vectors + num_anti_edge_vectors )
# HVs.append(num_edge_vectors + num_anti_edge_vectors - (num_homing_vectors + num_anti_homing_vectors))
# print(EVs)
# all_EVs.append(EVs)
# all_HVs.append(HVs)
#
# # make timing hist
# plt.figure()
# plt.hist(list(flatten(edge_vector_time_exp)), bins=np.arange(0, 22.5, 2.5)) #, color=condition_colors[c])
#
# # plot EVs and HVs
# for plot_data, ax, fig in zip([EVs, HVs], [ax1, ax2], [fig1, fig2]):
#
# scatter_axis = scatter_the_axis(c * 4 / 3 + .5 / 3, plot_data)
# ax.scatter(scatter_axis, plot_data, color=[0, 0, 0], s=25, zorder=99)
# # do kde
# kde = fit_kde(plot_data, bw=.5)
# plot_kde(ax, kde, plot_data, z=4 * c + .8, vertical=True, normto=1.2, color=[.5, .5, .5], violin=False, clip=False) # True)
#
# # save figure
# fig.savefig(os.path.join(self.summary_plots_folder, 'EV dist - ' + self.labels[c] + '.png'), format='png', bbox_inches='tight', pad_inches=0)
# fig.savefig(os.path.join(self.summary_plots_folder, 'EV dist - ' + self.labels[c] + '.eps'), format='eps', bbox_inches='tight', pad_inches=0)
#
#
# plt.show()
#
#
# group_A = all_EVs[1]
# group_B = all_EVs[2]
# permutation_test(group_A, group_B, iterations = 10000, two_tailed = True)
#
# group_A = all_HVs[0]
# group_B = all_HVs[1]
# permutation_test(group_A, group_B, iterations = 10000, two_tailed = True)
#
# plt.close('all')
'''
PREDICTION PLOTS, BY TURN ANGLE OR EXPLORATION/EDGINESS
|
|
v
'''
def plot_prediction(self):
by_angle_not_edginess = False
if by_angle_not_edginess:
# initialize parameters
fps = 30
escape_duration = 12
ETD = 10 #4
traj_loc = 40
# initialize figures
fig1, ax1, fig2, ax2, fig3, ax3 = initialize_figures_prediction(self)
plt.close(fig2); plt.close(fig3)
# loop over experiments and conditions
for c, (experiment, condition) in enumerate(zip(self.experiments, self.conditions)):
# extract experiments from nested list
sub_experiments, sub_conditions = extract_experiments(experiment, condition)
# get the number of trials
number_of_trials = get_number_of_trials(sub_experiments, sub_conditions, self.analysis)
number_of_mice = get_number_of_mice(sub_experiments, sub_conditions, self.analysis)
mouse_trial_list = []
IC_x_all, IC_y_all, IC_angle_all, IC_time_all, turn_angles_all = [], [], [], [], []
# initialize array to fill in with each trial's data
efficiency, efficiency_RT, end_idx, x_pred, y_pred, angle_pred, time_pred, mean_pred, initial_body_angle, initial_x, initial_y, x_edge, _, \
_, _, _, _, scaling_factor, time, trial_num, trials, edginess, prev_edginess, dist_to_SH, dist_to_other_SH, RT_all, avg_speed, _ = \
initialize_variables_efficiency(number_of_trials, self, sub_experiments)
# initialize array to fill in with each trial's data
edginess, end_idx, angle_turned, _, _, prev_edginess, scaling_factor, _, trial_num, _, _, dist_to_SH, dist_to_other_SH = \
initialize_variable_edginess(number_of_trials, self, sub_experiments)
for shuffle_time in [False, True]:
angle_turned_all, x_pred_all, y_pred_all, angle_pred_all, time_pred_all, mean_pred_all = [], [], [], [], [], []
num_repeats = shuffle_time * 499 + 1 #* 19
num_repeats = shuffle_time * 19 + 1 # * 19
prediction_scores_all = []
for r in range(num_repeats):
trial_num = -1
# loop over each experiment and condition
for e, (experiment_real, condition_real) in enumerate(zip(sub_experiments, sub_conditions)):
# loop over each mouse
for i, mouse_real in enumerate(self.analysis[experiment_real][condition_real]['start time']):
if self.analysis_options['control'] and not mouse_real=='control': continue
if not self.analysis_options['control'] and mouse_real=='control': continue
# loop over each trial
prev_homings = []
t = 0
for trial_real in range(len(self.analysis[experiment_real][condition_real]['end time'][mouse_real])):
trial_num += 1
# impose conditions
if t > 2: continue
end_idx[trial_num] = self.analysis[experiment_real][condition_real]['end time'][mouse_real][trial_real]
if np.isnan(end_idx[trial_num]): continue
if (end_idx[trial_num] > escape_duration * fps): continue
# skip certain trials
y_start = self.analysis[experiment_real][condition_real]['path'][mouse_real][trial_real][1][0] * scaling_factor
x_start = self.analysis[experiment_real][condition_real]['path'][mouse_real][trial_real][0][0] * scaling_factor
if y_start > 25: continue
if abs(x_start-50) > 30: continue
# use different data if shuffle:
# if shuffle_time:
# experiment, condition, mouse, trial = mouse_trial_list[np.random.randint(len(mouse_trial_list))]
# else:
# experiment, condition, mouse, trial = experiment_real, condition_real, mouse_real, trial_real
''' just use real mouse '''
experiment, condition, mouse, trial = experiment_real, condition_real, mouse_real, trial_real
''' control ICs, real escape '''
# # get the angle turned during the escape
angle_turned[trial_num] = self.analysis[experiment_real][condition_real]['movement'][mouse_real][trial_real][2]
# angle_turned[trial_num] = abs(self.analysis[experiment_real][condition_real]['edginess'][mouse_real][trial_real])
# get the angle turned, delta x, delta y, and delta phi of previous homings
bout_start_angle = self.analysis[experiment_real][condition_real]['movement'][mouse_real][trial_real][1]
bout_start_position = self.analysis[experiment_real][condition_real]['movement'][mouse_real][trial_real][0]
start_time = self.analysis[experiment_real][condition_real]['start time'][mouse_real][trial_real]
# get initial conditions and endpoint quantities
IC_x = np.array(self.analysis[experiment][condition]['prev movements'][mouse][trial][0][-ETD:])
IC_y = np.array(self.analysis[experiment][condition]['prev movements'][mouse][trial][1][-ETD:])
IC_angle = np.array(self.analysis[experiment][condition]['prev movements'][mouse][trial][2][-ETD:])
IC_time = np.array(self.analysis[experiment][condition]['prev homings'][mouse][trial][3][-ETD:])
turn_angles = np.array(self.analysis[experiment][condition]['prev movements'][mouse][trial][3][-ETD:])
# MOE = 10
# x_edge_trial = self.analysis[experiment][condition]['x edge'][mouse][trial]
# SH_x = np.array(self.analysis[experiment][condition]['prev homings'][mouse][trial][0][-ETD:])
# if x_edge_trial > 50 and np.sum(SH_x > 25 + MOE):
# IC_x = IC_x[SH_x > 25 + MOE]
# IC_y = IC_y[SH_x > 25 + MOE]
# IC_angle = IC_angle[SH_x > 25 + MOE]
# IC_time = IC_time[SH_x > 25 + MOE]
# turn_angles = turn_angles[SH_x > 25 + MOE]
# elif np.sum(SH_x > 75 - MOE):
# IC_x = IC_x[SH_x > 75 - MOE]
# IC_y = IC_y[SH_x > 75 - MOE]
# IC_angle = IC_angle[SH_x > 75 - MOE]
# IC_time = IC_time[SH_x > 75 - MOE]
# turn_angles = turn_angles[SH_x > 75 - MOE]
if not shuffle_time: # gather previous movements
IC_x_all = np.concatenate((IC_x_all, IC_x))
IC_y_all = np.concatenate((IC_y_all, IC_y))
IC_angle_all = np.concatenate((IC_angle_all, IC_angle))
IC_time_all = np.concatenate((IC_time_all, IC_time))
turn_angles_all = np.concatenate((turn_angles_all, turn_angles))
else:
# sample randomly from these movements
random_idx = np.random.choice(len(IC_x_all), len(IC_x_all), replace = False)
IC_x = IC_x_all[random_idx]
IC_y = IC_y_all[random_idx]
IC_angle = IC_angle_all[random_idx]
IC_time = IC_time_all[random_idx]
turn_angles = turn_angles_all[random_idx]
# calculate difference in ICs
delta_x = abs( np.array(IC_x - bout_start_position[0]) )
delta_y = abs( np.array(IC_y - bout_start_position[1]) )
delta_angle = abs( np.array(IC_angle - bout_start_angle) )
delta_angle[delta_angle > 180] = 360 - delta_angle[delta_angle > 180]
delta_time = start_time - np.array(IC_time)
''' prediction data -- angle turned is a function of prev movement and ICs '''
x_weights = (1 / (delta_x+.0001)) / np.sum(1/(delta_x+.0001))
y_weights = (1 / (delta_y+.0001)) / np.sum(1 / (delta_y+.0001))
angle_weights = (1 / (delta_angle+.0001)) / np.sum(1 / (delta_angle+.0001))
time_weights = (1 / (delta_time+.0001)) / np.sum(1 / (delta_time+.0001))
x_pred[trial_num] = np.sum(turn_angles * x_weights)
y_pred[trial_num] = np.sum(turn_angles * y_weights)
angle_pred[trial_num] = np.sum(turn_angles * angle_weights)
time_pred[trial_num] = np.sum(turn_angles * time_weights) * 0
mean_pred[trial_num] = np.mean(turn_angles) * 0
# try mean pred is the *closest* angle to real
# x_pred[trial_num] = 0
# y_pred[trial_num] = 0
# angle_pred[trial_num] = 0
# time_pred[trial_num] = 0
# mean_pred[trial_num] = turn_angles[np.argmin( abs(turn_angles - angle_turned[trial_num]) )]
# ''' turn angle prediction to edginess prediction '''
if not shuffle_time:
edginess[trial_num] = abs(self.analysis[experiment][condition]['edginess'][mouse][trial])
initial_body_angle[trial_num] = self.analysis[experiment_real][condition_real]['movement'][mouse_real][trial_real][1]
initial_x[trial_num] = self.analysis[experiment_real][condition_real]['movement'][mouse_real][trial_real][0][0]
initial_y[trial_num] = self.analysis[experiment_real][condition_real]['movement'][mouse_real][trial_real][0][1]
x_edge[trial_num] = self.analysis[experiment][condition]['x edge'][mouse][trial_real]
# add mouse and trial to list of mice and trials
if not shuffle_time:
mouse_trial_list.append([experiment, condition, mouse, trial])
t+=1
''' concatenate??... '''
# angle_turned_all = np.concatenate((angle_turned_all, angle_turned))
#
# x_pred_all = np.concatenate((x_pred_all, x_pred))
# y_pred_all = np.concatenate((y_pred_all, y_pred))
# angle_pred_all = np.concatenate((angle_pred_all, angle_pred))
# time_pred_all = np.concatenate((time_pred_all, time_pred ))
# mean_pred_all = np.concatenate((mean_pred_all, mean_pred ))
#
#
# IC_angle_array = np.ones((len(angle_turned_all[~np.isnan(angle_turned_all)]), 5))
# angle_metrics = [x_pred_all[~np.isnan(angle_turned_all)], y_pred_all[~np.isnan(angle_turned_all)], angle_pred_all[~np.isnan(angle_turned_all)], \
# time_pred_all[~np.isnan(angle_turned_all)], mean_pred_all[~np.isnan(angle_turned_all)]]
# for i, angle_metric in enumerate(angle_metrics): #
# IC_angle_array[:, i] = angle_metric
#
# # get the data
# predict_data_y_all = [ angle_turned_all[~np.isnan(angle_turned_all)].reshape(-1, 1)] # for the movements input data
''' don't concatenate... '''
IC_angle_array = np.ones((len(angle_turned[~np.isnan(angle_turned)]), 5))
angle_metrics = [x_pred[~np.isnan(angle_turned)], y_pred[~np.isnan(angle_turned)],
angle_pred[~np.isnan(angle_turned)], \
time_pred[~np.isnan(angle_turned)], mean_pred[~np.isnan(angle_turned)]]
for i, angle_metric in enumerate(angle_metrics): #
IC_angle_array[:, i] = angle_metric
# get the data
predict_data_y_all_angle = [angle_turned[~np.isnan(angle_turned)].reshape(-1, 1)] # for the movements input data
predict_data_y_all_edgy = [edginess[~np.isnan(edginess)].reshape(-1, 1)] # for the movements input data
data_y_labels = ['angle']
predict_data_x_all = [IC_angle_array] # turn angles
predict_data_y_all = predict_data_y_all_angle # angles
''' predict edginess from turn angle '''
predict_edginess = True
if predict_edginess:
if not shuffle_time:
initial_body_angle = initial_body_angle[~np.isnan(initial_body_angle)].reshape(-1, 1)
initial_x = initial_x[~np.isnan(initial_x)].reshape(-1, 1)
initial_y = initial_y[~np.isnan(initial_y)].reshape(-1, 1)
x_edge = x_edge[~np.isnan(x_edge)].reshape(-1, 1)
# create the model
LR = linear_model.Ridge(alpha=.1)
# train the model
LR.fit(predict_data_x_all[0], predict_data_y_all_angle[0])
print(LR.score(predict_data_x_all[0], predict_data_y_all_angle[0]))
# get the model prediction
# model_prediction = LR.predict(predict_data_x_all[0])
model_prediction = predict_data_y_all_angle[0]
# predict body angles after turn
predicted_body_angle = initial_body_angle[~np.isnan(initial_body_angle)].reshape(-1, 1) - model_prediction
predicted_body_angle[predicted_body_angle >180] = predicted_body_angle[predicted_body_angle >180] - 360
predicted_body_angle[(predicted_body_angle > 0) * (predicted_body_angle < 90)] = -1 # super edgy to the right
predicted_body_angle[(predicted_body_angle > 0) * (predicted_body_angle > 90)] = 1 # super edgy to the right
# predict position at y = 40; set reasonable boundaries
x_at_40 = np.maximum(15 * np.ones_like(initial_x), np.minimum(90 * np.ones_like(initial_x),
initial_x - (40 - initial_y) / np.tan(np.deg2rad(predicted_body_angle)) ))
# get edginess
y_pos_end = 86.5; x_pos_end = 50; y_edge = 50
slope = (y_pos_end - initial_y) / (x_pos_end - (initial_x+.0001))
intercept = initial_y - initial_x * slope
distance_to_line = abs(40 - slope * x_at_40 - intercept) / np.sqrt((-slope) ** 2 + (1) ** 2)
homing_vector_at_center = (40 - intercept) / slope
# do line from starting position to edge position
slope = (y_edge - initial_y) / (x_edge - initial_x)
intercept = initial_y - initial_x * slope
distance_to_edge = abs(40 - slope * x_at_40 - intercept) / np.sqrt((-slope) ** 2 + (1) ** 2)
# compute the max possible deviation
edge_vector_at_center = (40 - intercept) / slope
line_to_edge_offset = abs(homing_vector_at_center - edge_vector_at_center) # + 5
# get index at center point (wall location)
# prev_edginess = np.maximum(np.zeros_like(distance_to_line), np.minimum(1.2*np.ones_like(distance_to_line),
# (distance_to_line - distance_to_edge + line_to_edge_offset) / (2 * line_to_edge_offset) ))
prev_edginess = abs((distance_to_line - distance_to_edge + line_to_edge_offset) / (2 * line_to_edge_offset))
predict_data_x_all = [prev_edginess] # predicted prev edginess #scipy.stats.zscore(
predict_data_y_all = predict_data_y_all_edgy # edginess
# edgy input colors
input_colors = [ [[0, .6, .4], [.5,.5,.5]], [[0, .6, .4], [.5,.5,.5]], [[.6, 0, .4], [.5,.5,.5]] ]
# split the data for cross val
num_trials = 1000 - 985 * shuffle_time #985
# loop acros angle prediction and traj prediction
for i, (fig, ax, predict_data_x) in enumerate(zip([fig1, fig2, fig3],[ax1, ax2, ax3], predict_data_x_all)):
# get prediction data
predict_data_y = predict_data_y_all[i]
# get color
color = input_colors[i][int(shuffle_time)]
# initialize prediction arrays
prediction_scores = np.zeros(num_trials)
for j in range(num_trials):
test_size = 0.5
# test_size = 0.25
# if shuffle_time: test_size = 0.25
# get x-val set
X_train, X_test, y_train, y_test = train_test_split(predict_data_x, \
predict_data_y, test_size=test_size, random_state=j)
# create the model
LR = linear_model.Ridge(alpha = .1) # .15, .5
# train the model
LR.fit(X_train, y_train)
# get the score
prediction_scores[j] = LR.score(X_test, y_test)
# exclude super negative ones
# prediction_scores = prediction_scores[prediction_scores > np.percentile(prediction_scores, 10)]
# put into larger array
prediction_scores_all = np.concatenate((prediction_scores_all, prediction_scores))
print(np.median(prediction_scores_all))
# exclude super negative ones
# prediction_scores_all = prediction_scores_all[prediction_scores_all > np.percentile(prediction_scores_all, 5)]
#do kde
kde = fit_kde(prediction_scores_all, bw=.03) # .04
plot_kde(ax, kde, prediction_scores_all, z = 0, vertical=False, color=color, violin=False, clip=False) # True)
#plt.show()
fig.savefig(os.path.join(self.summary_plots_folder,'Predictions of ' + data_y_labels[i] + ' - ' + self.labels[c] + '.png'), format='png')
fig.savefig(os.path.join(self.summary_plots_folder,'Predictions of ' + data_y_labels[i] + ' - ' + self.labels[c] + '.eps'), format='eps')
plt.show()
print('hi')
else:
'''
PREDICTION PLOTS EDGINESS OR BY **EXPLORATION**
'''
fps = 30
escape_duration = 12
ETD = 10 #4
traj_loc = 40
# mean_types = ['even', 'space', 'angle'] #, 'time', 'shelter time']
mean_types = ['space', 'angle', 'shelter time'] #, 'escape']
mean_type = 'even'
mean_colors = [[0, .6, .4], [0, .6, .8], [0, .6, .8], [.4, 0, 1] ]
mean_colors = [[0, .6, .4], [.4, 0, .8], [0, .6, .8], [.5, .5, .5]]
# initialize figures
fig1, ax1, fig2, ax2, fig3, ax3 = initialize_figures_prediction(self)
for m, mean_type in enumerate(mean_types):
# loop over experiments and conditions
for c, (experiment, condition) in enumerate(zip(self.experiments, self.conditions)):
# extract experiments from nested list
sub_experiments, sub_conditions = extract_experiments(experiment, condition)
# get the number of trials
number_of_trials = get_number_of_trials(sub_experiments, sub_conditions, self.analysis)
number_of_mice = get_number_of_mice(sub_experiments, sub_conditions, self.analysis)
mouse_trial_list = []
# initialize array to fill in with each trial's data
edginess, end_idx, angle_turned, _, _, prev_edginess, scaling_factor, _, trial_num, prev_movement_and_ICs, data_y_for_prev_movement, dist_to_SH, dist_to_other_SH = \
initialize_variable_edginess(number_of_trials, self, sub_experiments)
# initialize array to fill in with each trial's data
efficiency, efficiency_RT, end_idx, num_prev_homings_EV, num_prev_homings_front_EV, num_prev_homings_other_EV, num_prev_homings_HV, time_exploring_pre, time_exploring_post, distance_exploring_pre, distance_exploring_post, time_exploring_obstacle_pre, \
time_exploring_obstacle_post, time_exploring_far_pre, time_exploring_far_post, time_exploring_edge, time_exploring_other_edge, scaling_factor, time, trial_num, trials, edginess, prev_edginess, dist_to_SH, dist_to_other_SH, RT_all, avg_speed, _ = \
initialize_variables_efficiency(number_of_trials, self, sub_experiments)
for shuffle_time in [False]:
num_repeats = shuffle_time * 19 + 1
for r in range(num_repeats):
trial_num = -1
# loop over each experiment and condition
for e, (experiment_real, condition_real) in enumerate(zip(sub_experiments, sub_conditions)):
# loop over each mouse
for i, mouse_real in enumerate(self.analysis[experiment_real][condition_real]['start time']):
if self.analysis_options['control'] and not mouse_real=='control': continue
if not self.analysis_options['control'] and mouse_real=='control': continue
# loop over each trial
prev_homings = []
t = 0
for trial_real in range(len(self.analysis[experiment_real][condition_real]['end time'][mouse_real])):
trial_num += 1
# impose conditions
if t > 2: continue
end_idx[trial_num] = self.analysis[experiment_real][condition_real]['end time'][mouse_real][trial_real]
if np.isnan(end_idx[trial_num]): continue
if (end_idx[trial_num] > escape_duration * fps): continue
# skip certain trials
y_start = self.analysis[experiment_real][condition_real]['path'][mouse_real][trial_real][1][0] * scaling_factor
x_start = self.analysis[experiment_real][condition_real]['path'][mouse_real][trial_real][0][0] * scaling_factor
if y_start > 25: continue
if abs(x_start-50) > 30: continue
# use different data if shuffle:
if shuffle_time:
experiment, condition, mouse, trial = mouse_trial_list[np.random.randint(len(mouse_trial_list))]
else:
experiment, condition, mouse, trial = experiment_real, condition_real, mouse_real, trial_real
# just add real data for edginess etc
if not shuffle_time:
# add data
edginess[trial_num] = abs(self.analysis[experiment][condition]['edginess'][mouse][trial])
# get previous edginess
time_to_shelter, SR = get_prev_edginess(ETD, condition_real, experiment_real, mouse_real, prev_edginess, dist_to_SH,
dist_to_other_SH, scaling_factor, self, traj_loc, trial_real, trial_num, edginess,
[], [], mean = mean_type, get_initial_conditions=True)
# _, _, prev_edginess_all, elig_idx = get_all_prev_edginess(ETD, condition, experiment, mouse, prev_edginess, dist_to_SH, dist_to_other_SH, scaling_factor, self, traj_loc, trial, trial_num, edginess, [], [])
# add data
fill_in_trial_data_efficiency(ETD, condition, efficiency, efficiency_RT, experiment, mouse, num_prev_homings_EV,
num_prev_homings_front_EV, num_prev_homings_other_EV,
num_prev_homings_HV,
time_exploring_pre, time_exploring_post, distance_exploring_pre, distance_exploring_post,
time_exploring_obstacle_pre,
time_exploring_obstacle_post, time_exploring_far_pre, time_exploring_far_post, time_exploring_edge,
time_exploring_other_edge,
self, time, trial, trial_num, trials, edginess, t)
# add mouse and trial to list of mice and trials
if not shuffle_time:
mouse_trial_list.append([experiment, condition, mouse, trial])
t+=1
# format mean prior trajectory
if not shuffle_time:
prev_edginess = prev_edginess[~np.isnan(edginess)]
exploration_array = np.ones((len(edginess[~np.isnan(edginess)]), 2))
exploration_metrics = [time_exploring_far_pre[~np.isnan(edginess)], time_exploring_far_post[~np.isnan(edginess)]]
for i, exploration_metric in enumerate(exploration_metrics): #
exploration_array[:, i] = exploration_metric
if shuffle_time: # regress out other variable
m = (((np.mean(prev_edginess) * np.mean(exploration_array[:, i])) - np.mean(prev_edginess * exploration_array[:, i])) /
((np.mean(prev_edginess) ** 2) - np.mean(prev_edginess ** 2)))
regressed_data = exploration_array[:, i] - prev_edginess * m
exploration_array[:, i] = regressed_data
if shuffle_time: # regress out exploration from mean prior traj
for exploration_metric in exploration_metrics:
m = (((np.mean(exploration_metric) * np.mean(prev_edginess)) - np.mean(exploration_metric * prev_edginess)) /
((np.mean(exploration_metric) ** 2) - np.mean(exploration_metric ** 2)))
regressed_data = prev_edginess - exploration_array[:, 0] * m
prev_edginess = regressed_data
# get the data
predict_data_y_all = [ edginess[~np.isnan(edginess)].reshape(-1, 1), # for the EXPLORATION input data
edginess[~np.isnan(edginess)].reshape(-1, 1)] # for the mean edginess input data
# turn_angle_for_prev_movement ] # for the movements input data
data_y_labels = ['exploration','trajectory'] #, 'angle']
predict_data_x_all = [exploration_array, # exploration data
prev_edginess.reshape(-1, 1)]#, # mean prev edginess
# prev_movements_and_ICs_array] # all prev homing movements
# edgy input colors
input_colors = [ [[0, .6, .4], [.5,.5,.5]], [[0, .6, .4], [.5,.5,.5]], [[.6, 0, .4], [.5,.5,.5]] ]
# split the data for cross val
num_trials = 1000
# loop acros angle prediction and traj prediction
for i, (fig, ax, predict_data_x) in enumerate(zip([fig1, fig2, fig3],[ax1, ax2, ax3], predict_data_x_all)):
# get prediction data
predict_data_y = predict_data_y_all[i]
# get color
color = input_colors[i][int(shuffle_time)]
# color = mean_colors[m]
# initialize prediction arrays
prediction_scores = np.zeros(num_trials)
for j in range(num_trials):
test_size = 0.5
if shuffle_time and i==2:
test_size = .025
# get x-val set
X_train, X_test, y_train, y_test = train_test_split(predict_data_x, \
predict_data_y, test_size=test_size, random_state=j)
# create the model
# LR = linear_model.LinearRegression()
# if i:
# LR = linear_model.LogisticRegression()
# else:
LR = linear_model.Ridge(alpha = .1) # .15, .5
# train the model
# try:
LR.fit(X_train, y_train)
# except:
# print('i=h')
# print(LR.coef_)
# get the score
prediction_scores[j] = LR.score(X_test, y_test)
print(data_y_labels[i])
print(np.median(prediction_scores))
# exclude super negative ones
prediction_scores = prediction_scores[prediction_scores > np.percentile(prediction_scores, 10)]
# plot the scores
# ax.scatter(prediction_scores, np.zeros_like(prediction_scores), color=color, s=20, alpha = .1)
#do kde
kde = fit_kde(prediction_scores, bw=.04) # .04
plot_kde(ax, kde, prediction_scores, z = 0, vertical=False, color=color, violin=False, clip=False) # True)
fig.savefig(os.path.join(self.summary_plots_folder,'Prediction of ' + data_y_labels[i] + ' - ' + self.labels[c] + '.png'), format='png')
fig.savefig(os.path.join(self.summary_plots_folder,'Precition of ' + data_y_labels[i] + ' - ' + self.labels[c] + '.eps'), format='eps')
plt.show()
print('hi')
# # get the correlation
# r, p = scipy.stats.pearsonr(exploration_array[:, 0], edginess)
# print('r = ' + str(np.round(r, 3)) + '\np = ' + str(np.round(p, 3)))
#
# m = (((np.mean(prev_edginess) * np.mean(exploration_array[:, 0])) - np.mean(prev_edginess * exploration_array[:, 0])) /
# ((np.mean(prev_edginess) ** 2) - np.mean(prev_edginess ** 2)))
#
# regressed_data = exploration_array[:, 0] - prev_edginess * m
# r, p = scipy.stats.pearsonr(prev_edginess, regressed_data)
# print('r = ' + str(np.round(r, 3)) + '\np = ' + str(np.round(p, 3)))
#
# # get the correlation after regressing out prev edginess
# r, p = scipy.stats.pearsonr(regressed_data, edginess)
# print('r = ' + str(np.round(r, 3)) + '\n= ' + str(np.round(p, 3)))
# #
# def plot_efficiency(self):
# # initialize parameters
# fps = 30
# traj_loc = 40
# escape_duration = 12 # 12 #6
# HV_cutoff = .681
# ETD = 10
# # ax2, fig2, ax3, fig3 = initialize_figures_efficiency(self)
# efficiency_data = [[], [], [], []]
# duration_data = [[], [], [], []]
# # initialize arrays for stats
# efficiency_data_all = []
# duration_data_all = []
# prev_homings_data_all = []
# all_conditions = []
# mouse_ID = [];
# m = 1
# data_condition = ['naive', 'experienced']
# # data_condition = ['food','escape']
# # data_condition = ['OR - EV', 'OR - HV', 'OF']
# fig1, ax1 = plt.subplots(figsize=(13, 5))
#
# colors = [[1,0,0],[0,0,0]]
# kde_colors = [ [1, .4, .4], [.75, .75, .75]]
#
# # loop over experiments and conditions
# for c, (experiment, condition) in enumerate(zip(self.experiments, self.conditions)):
# # extract experiments from nested list
# sub_experiments, sub_conditions = extract_experiments(experiment, condition)
# # get the number of trials
# number_of_trials = get_number_of_trials(sub_experiments, sub_conditions, self.analysis)
# number_of_mice = get_number_of_mice(sub_experiments, sub_conditions, self.analysis)
# # initialize array to fill in with each trial's data
# efficiency, efficiency_RT, end_idx, num_prev_homings_EV, num_prev_homings_other_EV, num_prev_homings_HV, time_exploring, distance_exploring, time_exploring_obstacle, time_exploring_far, \
# scaling_factor, time, trial_num, trials, edginess, prev_edginess, dist_to_SH, dist_to_other_SH, RT_all, avg_speed, _ = \
# initialize_variables_efficiency(number_of_trials, self, sub_experiments)
# # loop over each experiment and condition
# for e, (experiment, condition) in enumerate(zip(sub_experiments, sub_conditions)):
# if 'void' in experiment or 'dark' in experiment:
# escape_duration = 12
# # loop over each mouse
# for i, mouse in enumerate(self.analysis[experiment][condition]['full path length']):
# # initialize arrays for stats
# efficiency_data_mouse = []
# duration_data_mouse = []
# prev_homings_data_mouse = []
# # control analysis
# if self.analysis_options['control'] and not mouse == 'control': continue
# if not self.analysis_options['control'] and mouse == 'control': continue
# # loop over each trial
# t = 0
# for trial in range(len(self.analysis[experiment][condition]['end time'][mouse])):
#
# trial_num += 1
# if t > 2 and not 'food' in experiment and not 'void' in experiment: continue
#
# if t > 8: continue
# # print(t)
# # impose coniditions
# end_idx[trial_num] = self.analysis[experiment][condition]['end time'][mouse][trial]
# if (end_idx[trial_num] > escape_duration * fps) or np.isnan(end_idx[trial_num]): continue
# # skip certain trials
# y_start = self.analysis[experiment][condition]['path'][mouse][trial][1][0] * scaling_factor
# x_start = self.analysis[experiment][condition]['path'][mouse][trial][0][0] * scaling_factor
# if y_start > 25: continue
# if abs(x_start - 50) > 25: continue # 25
#
# # get prev edginess
# _, _ = get_prev_edginess(ETD, condition, experiment, mouse, prev_edginess, dist_to_SH, dist_to_other_SH,
# scaling_factor, self, traj_loc, trial, trial_num, edginess, [], [])
#
# # only do predict edgy:
# # if c == 0:
# # if prev_edginess[trial_num] <= HV_cutoff and 'down' in experiment: continue
# # elif c == 1:
# # if prev_edginess[trial_num] > HV_cutoff and 'down' in experiment: continue
#
# # add data
# fill_in_trial_data_efficiency(ETD, condition, efficiency, efficiency_RT, experiment, mouse, num_prev_homings_EV, num_prev_homings_other_EV, num_prev_homings_HV,
# time_exploring, distance_exploring, time_exploring_obstacle, time_exploring_far,
# self, time, trial, trial_num, trials, edginess, t)
#
# # normalize end idx to
# RT = self.analysis[experiment][condition]['RT'][mouse][trial]
# if not RT:
# print(RT)
# continue
# RT_all[trial_num] = RT
#
# avg_speed[trial_num] = self.analysis[experiment][condition]['RT path length'][mouse][trial] * scaling_factor / (
# (end_idx[trial_num] - RT) / fps)
# # avg_speed[trial_num] = self.analysis[experiment][condition]['full path length'][mouse][trial] * scaling_factor / (end_idx[trial_num] / fps)
#
# end_idx[trial_num] = (end_idx[trial_num] / fps) / self.analysis[experiment][condition]['optimal path length'][mouse][
# trial] / scaling_factor * 100
#
# # add data for stats
# efficiency_data_mouse.append(efficiency[trial_num])
# # duration_data_mouse.append(end_idx[trial_num]) #TEMP COMMENTING
# duration_data_mouse.append(RT)
# prev_homings_data_mouse.append(num_prev_homings_EV[trial_num])
#
# t += 1
#
# # append data for stats
# if efficiency_data_mouse:
# efficiency_data_all.append(efficiency_data_mouse)
# duration_data_all.append(duration_data_mouse)
# prev_homings_data_all.append(prev_homings_data_mouse)
# all_conditions.append(data_condition[c])
# mouse_ID.append(m);
# m += 1
#
# # format end ind
# # end_idx = np.array([e/30 for e in end_idx])
# end_idx[np.isnan(efficiency)] = np.nan
# # loop over data to plot
# for i, (data, data_label) in enumerate(zip([efficiency_RT, end_idx, RT_all, avg_speed, edginess],
# ['Efficiency'])): # , 'Duration', 'Reaction Time', 'Speed', 'Trajectory'])): #edginess, 'Trajectory',
# # for i, (data, data_label) in enumerate(zip([edginess], ['Trajectory'])): # edginess, 'Trajectory',
#
# # for i, (data, data_label) in enumerate(zip([edginess, efficiency, end_idx], ['Trajectory', 'Efficiency', 'Duration'])):
# # for x_data, x_data_label in zip([num_prev_homings], ['Prior homings']):
# plot_data = data[~np.isnan(data)]
#
# # for x_data, x_data_label in zip([trials, time, num_prev_homings_EV, num_prev_homings_HV, prev_edginess, time_exploring, distance_exploring, time_exploring_far, time_exploring_obstacle],
# # ['Trials', 'Time', 'Edge vector homings', 'Homing vector homings', 'Mean prior trajectory','Time exploring', 'Distance explored', 'Time exploring far side', 'Time exploring obstacle']):
#
# for x_data, x_data_label in zip([trials, time_exploring], ['trial number']): # , 'Time exploring']):
#
# print('\nCorrelation between ' + data_label + ' and ' + x_data_label)
#
# # only plot escapes
# data_for_box_plot = data[~np.isnan(data)]
# print(len(data_for_box_plot))
# x_data = x_data[~np.isnan(data)]
#
# # get the correlation
# r, p = scipy.stats.pearsonr(x_data, data_for_box_plot)
# print('r = ' + str(np.round(r, 3)) + '\np = ' + str(np.round(p, 3)))
#
# # initialize figure
# plt.title(data_label + ' x ' + x_data_label)
# # set up the figure
# # if data_label=='Efficiency': ax1.set_ylim([-.03, 1.03])
# # elif data_label=='Duration': ax1.set_ylim([-.1, 7])
#
# if np.max(x_data) < 5:
# ax1.set_xticks(np.unique(x_data).astype(int))
# else:
# ax1.set_xticks(np.arange(5, 25, 5))
# # ax1.set_xlim([5,20])
#
# # jitter the axis
# scatter_axis = scatter_the_axis_efficiency(plot_data, x_data + c/3 - .2)
# # plot each trial
# ax1.scatter(scatter_axis, plot_data, color=colors[c], s=15, alpha=1, edgecolor=colors[c], linewidth=1)
#
# for x in np.unique(x_data):
# # plot kde
# kde = fit_kde(plot_data[x_data==x], bw=.02) #.2) # .04
# plot_kde(ax1, kde, plot_data[x_data==x], z=x + c/3 - .15, vertical=True, normto=.15, color=kde_colors[c], violin=False, clip=True)
#
# # box and whisker
# bp = ax1.boxplot([plot_data[x_data==x], [0, 0]], positions=[x + c / 3 - .2, -10], showfliers=False, widths = [0.05, .05], zorder=99)
# plt.setp(bp['boxes'], color=[.5, .5, .5], linewidth=2)
# plt.setp(bp['whiskers'], color=[.5, .5, .5], linewidth=2)
# plt.setp(bp['medians'], linewidth=2)
# ax1.set_xlim(.25, 3.75)
# ax1.set_ylim(.5, 1.05)
# # ax1.set_ylim(.95, 1.9)
# ax1.set_xticks([1,2,3])
# ax1.set_xticklabels([1,2,3])
#
#
#
# # # for each trial
# # for x in np.unique(x_data):
# # # plot kde
# # kde = fit_kde(plot_data[x_data>=0], bw=.02) #.2) # .04
# # plot_kde(ax1, kde, plot_data[x_data>=0], z=x + c/3 - .15, vertical=True, normto=.15, color=kde_colors[c], violin=False, clip=True)
# #
# # # box and whisker
# # bp = ax1.boxplot([plot_data[x_data>=0], [0, 0]], positions=[x + c / 3 - .2, -10], showfliers=False, widths = [0.05, .05], zorder=99)
# # plt.setp(bp['boxes'], color=[.5, .5, .5], linewidth=2)
# # plt.setp(bp['whiskers'], color=[.5, .5, .5], linewidth=2)
# # plt.setp(bp['medians'], linewidth=2)
# # ax1.set_xlim(.25, 3.75)
# # ax1.set_ylim(.5, 1.05)
# # # ax1.set_ylim(.95, 1.9)
# # ax1.set_xticks([1,2,3])
# # ax1.set_xticklabels([1,2,3])
# #
# # # jitter the axis
# # scatter_axis = scatter_the_axis_efficiency(plot_data, np.ones_like(plot_data) * (x + c/3 - .2))
# # # plot each trial
# # ax1.scatter(scatter_axis, plot_data, color=colors[c], s=15, alpha=1, edgecolor=colors[c], linewidth=1)
#
#
#
# ax1.plot([-1, 4], [1, 1], linestyle='--', color=[.5, .5, .5, .5])
# # save the plot
# plt.savefig(os.path.join(self.summary_plots_folder, data_label + ' by ' + x_data_label + ' - ' + self.labels[c] + '.png'), format='png')
# plt.savefig(os.path.join(self.summary_plots_folder, data_label + ' by ' + x_data_label + ' - ' + self.labels[c] + '.eps'), format='eps')
#
# plt.show()
# print('done')
#
#
#
def plot_efficiency(self):
# initialize parameters
fps = 30
traj_loc = 40
escape_duration = 12 #12 #6
HV_cutoff = .681
ETD = 10
# ax2, fig2, ax3, fig3 = initialize_figures_efficiency(self)
efficiency_data = [[],[],[],[]]
duration_data = [[],[],[],[]]
# initialize arrays for stats
efficiency_data_all = []
duration_data_all = []
prev_homings_data_all = []
all_conditions = []
mouse_ID = []; m = 1
# data_condition = ['naive','experienced']
data_condition = ['escape', 'food']
# data_condition = ['OR - EV', 'OR - HV', 'OF']
# data_condition = ['Obstacle removed (no shelter)', 'obstacle removed', 'acute OR', 'obstacle']
colors = [[0,0,0],[1,0,0]]
#
plot_stuff = True
do_traversals = False
# loop over experiments and conditions
for c, (experiment, condition) in enumerate(zip(self.experiments, self.conditions)):
print(' - - - -- - - - -- - - - - - - -- - - - - - - - - -')
# extract experiments from nested list
sub_experiments, sub_conditions = extract_experiments(experiment, condition)
# get the number of trials
number_of_trials = get_number_of_trials(sub_experiments, sub_conditions, self.analysis)
number_of_mice = get_number_of_mice(sub_experiments, sub_conditions, self.analysis)
# initialize array to fill in with each trial's data
efficiency, efficiency_RT, end_idx, num_prev_homings_EV, num_prev_homings_front_EV, num_prev_homings_other_EV, num_prev_homings_HV, time_exploring_pre, time_exploring_post, distance_exploring_pre, distance_exploring_post, time_exploring_obstacle_pre,\
time_exploring_obstacle_post,time_exploring_far_pre,time_exploring_far_post, time_exploring_edge, time_exploring_other_edge, scaling_factor, time, trial_num, trials, edginess, prev_edginess, dist_to_SH, dist_to_other_SH, RT_all, avg_speed, _ = \
initialize_variables_efficiency(number_of_trials, self, sub_experiments)
# loop over each experiment and condition
for e, (experiment, condition) in enumerate(zip(sub_experiments, sub_conditions)):
if 'void' in experiment or 'dark' in experiment:
escape_duration = 12
if 'food' in experiment: escape_duration = 9
# else:escape_duration = 9
# loop over each mouse
for i, mouse in enumerate(self.analysis[experiment][condition]['start time']):
print(mouse)
# initialize arrays for stats
efficiency_data_mouse = []
duration_data_mouse = []
prev_homings_data_mouse = []
# control analysis
if self.analysis_options['control'] and not mouse=='control': continue
if not self.analysis_options['control'] and mouse=='control': continue
# loop over each trial
t = 0
for trial in range(len(self.analysis[experiment][condition]['end time'][mouse])):
trial_num += 1
if t > 2 and not 'food' in experiment and not 'void' in experiment and not 'dark' in experiment: continue
if 'food' in experiment and condition == 'no obstacle' and self.analysis[experiment][condition]['start time'][mouse][trial] < 20: continue
if t > 8: continue
# if t > 2: continue
# if 'on off' in experiment and trial: continue
# print(t)
# impose coniditions
end_idx[trial_num] = self.analysis[experiment][condition]['end time'][mouse][trial]
if (end_idx[trial_num] > escape_duration * fps) or np.isnan(end_idx[trial_num]): continue
# skip certain trials
y_start = self.analysis[experiment][condition]['path'][mouse][trial][1][0] * scaling_factor
x_start = self.analysis[experiment][condition]['path'][mouse][trial][0][0] * scaling_factor
if y_start > 25: continue
if abs(x_start-50) > 30: continue #25
# get prev edginess
_, _ = get_prev_edginess(ETD, condition, experiment, mouse, prev_edginess, dist_to_SH, dist_to_other_SH,
scaling_factor, self, traj_loc, trial, trial_num, edginess, [], [])
# only do predict edgy:
# if c == 0:
# if prev_edginess[trial_num] <= HV_cutoff and 'down' in experiment: continue
# elif c == 1:
# if prev_edginess[trial_num] > HV_cutoff and 'down' in experiment: continue
# add data
fill_in_trial_data_efficiency(ETD, condition, efficiency, efficiency_RT, experiment, mouse, num_prev_homings_EV,num_prev_homings_front_EV, num_prev_homings_other_EV,num_prev_homings_HV,
time_exploring_pre, time_exploring_post, distance_exploring_pre, distance_exploring_post, time_exploring_obstacle_pre,
time_exploring_obstacle_post, time_exploring_far_pre, time_exploring_far_post, time_exploring_edge, time_exploring_other_edge,
self, time, trial, trial_num, trials, edginess, t)
# if edginess[trial_num] < HV_cutoff: continue
if do_traversals:
traversal = self.analysis[experiment][condition]['back traversal'][mouse]
# get the duration of those paths
# duration = traversal[t*5+3]
if traversal:
x_edge = self.analysis[experiment][condition]['x edge'][mouse][trial]
# if x_edge==25: x_edge = 75
# else: x_edge = 25
spont_edge = []
for trav in traversal[0 * 5 + 0]:
spont_edge.append(trav[0][-1]*scaling_factor)
esc_edge = []
for trav in traversal[1 * 5 + 0]:
esc_edge.append(trav[0][-1]*scaling_factor)
num_prev_homings_EV[trial_num] = np.sum((np.array(traversal[0 * 5 + 3]) < 1.5) * (abs(np.array(spont_edge)-x_edge) < 25) * \
(np.array(traversal[0 * 5 + 2]) > HV_cutoff) * \
(np.array(traversal[0 * 5 + 1]) < self.analysis[experiment][condition]['start time'][mouse][trial] * 30 * 60) * \
(np.array(traversal[0 * 5 + 1]) > (self.analysis[experiment][condition]['start time'][mouse][trial]-(15+20*('void' in experiment))) * 30 * 60)) + \
np.sum((np.array(traversal[1 * 5 + 3]) < 1.5) * (abs(np.array(esc_edge)-x_edge) < 25) * \
(np.array(traversal[1 * 5 + 2]) > HV_cutoff) * \
(np.array(traversal[1 * 5 + 1]) < self.analysis[experiment][condition]['start time'][mouse][trial] * 30 * 60) * \
(np.array(traversal[1 * 5 + 1]) > (self.analysis[experiment][condition]['start time'][mouse][trial] - (15+20*('void' in experiment))) * 30 * 60))
num_prev_homings_HV[trial_num] = np.sum((np.array(traversal[0 * 5 + 3]) < 1.5) * (abs(np.array(spont_edge)-x_edge) < 25) * \
(np.array(traversal[0 * 5 + 2]) < HV_cutoff) * \
(np.array(traversal[0 * 5 + 1]) < self.analysis[experiment][condition]['start time'][mouse][trial] * 30 * 60) * \
(np.array(traversal[0 * 5 + 1]) > (self.analysis[experiment][condition]['start time'][mouse][trial]-(15+20*('void' in experiment))) * 30 * 60)) + \
np.sum((np.array(traversal[1 * 5 + 3]) < 1.5) * (abs(np.array(esc_edge)-x_edge) < 25) * \
(np.array(traversal[1 * 5 + 2]) < HV_cutoff) * \
(np.array(traversal[1 * 5 + 1]) < self.analysis[experiment][condition]['start time'][mouse][trial] * 30 * 60) * \
(np.array(traversal[1 * 5 + 1]) > (self.analysis[experiment][condition]['start time'][mouse][trial] - (15+20*('void' in experiment))) * 30 * 60))
eligible_homings = ~((np.array(traversal[0 * 5 + 2]) > HV_cutoff) * (abs(np.array(spont_edge)-x_edge) > 40)) * (np.array(traversal[0 * 5 + 3]) < 3) * \
(np.array(traversal[0 * 5 + 1]) < self.analysis[experiment][condition]['start time'][mouse][trial] * 30 * 60) * \
(np.array(traversal[0 * 5 + 1]) > (self.analysis[experiment][condition]['start time'][mouse][trial] - 15) * 30 * 60)
if np.sum(eligible_homings):
mean_homing = np.mean(np.array(traversal[0 * 5 + 2])[eligible_homings])
else: mean_homing = 0
eligible_escapes = ~((np.array(traversal[1 * 5 + 2]) > HV_cutoff) * (abs(np.array(esc_edge) - x_edge) > 40)) * (np.array(traversal[1 * 5 + 3]) < 3) * \
(np.array(traversal[1 * 5 + 1]) < self.analysis[experiment][condition]['start time'][mouse][trial] * 30 * 60) * \
(np.array(traversal[1 * 5 + 1]) > (self.analysis[experiment][condition]['start time'][mouse][trial] - 15) * 30 * 60)
if np.sum(eligible_escapes):
mean_escape = np.mean(np.array(traversal[1 * 5 + 2])[eligible_escapes])
else: mean_escape = 0
prev_edginess[trial_num] = ( mean_homing * np.sum(eligible_homings) + mean_escape * np.sum(eligible_escapes) ) / \
(np.sum(eligible_homings) + np.sum(eligible_escapes))
else:
num_prev_homings_EV[trial_num] = 0
# prev_edginess[trial_num] = 0
if np.isnan(prev_edginess[trial_num]):
prev_edginess[trial_num] = 0
traversal = self.analysis[experiment][condition]['front traversal'][mouse]
# get the duration of those paths
# duration = traversal[t*5+3]
if traversal:
x_edge = self.analysis[experiment][condition]['x edge'][mouse][trial]
spont_edge = []
for trav in traversal[0 * 5 + 0]:
spont_edge.append(trav[0][-1]*scaling_factor)
esc_edge = []
for trav in traversal[1 * 5 + 0]:
esc_edge.append(trav[0][-1]*scaling_factor)
num_prev_homings_other_EV[trial_num] = np.sum((np.array(traversal[0 * 5 + 3]) < 1.5) * (abs(np.array(spont_edge)-x_edge) < 25) * \
(np.array(traversal[0 * 5 + 2]) > HV_cutoff) * \
(np.array(traversal[0 * 5 + 1]) < self.analysis[experiment][condition]['start time'][mouse][trial] * 30 * 60))
else:
num_prev_homings_other_EV[trial_num] = 0
# print(mouse)
# print(trial + 1)
# print(num_prev_homings_EV[trial_num])
# print(num_prev_homings_other_EV[trial_num])
# print(edginess[trial_num])
# print('')
# normalize end idx to
RT = self.analysis[experiment][condition]['RT'][mouse][trial]
# if not RT:
# print(RT)
# continue
RT_all[trial_num] = RT
avg_speed[trial_num] = self.analysis[experiment][condition]['RT path length'][mouse][trial] * scaling_factor / ((end_idx[trial_num] - RT) / fps)
# avg_speed[trial_num] = self.analysis[experiment][condition]['full path length'][mouse][trial] * scaling_factor / (end_idx[trial_num] / fps)
time[trial_num] = self.analysis[experiment][condition]['start time'][mouse][trial]
time[trial_num] = (end_idx[trial_num] / fps) / self.analysis[experiment][condition]['optimal path length'][mouse][trial] / scaling_factor * 100
time[trial_num] = abs(50 - x_start)
end_idx[trial_num] = (end_idx[trial_num] / fps - RT) / self.analysis[experiment][condition]['optimal RT path length'][mouse][trial] / scaling_factor * 100
# add data for stats
efficiency_data_mouse.append([efficiency_RT[trial_num], trial])
duration_data_mouse.append([end_idx[trial_num], trial]) #TEMP COMMENTING #RT
# duration_data_mouse.append(num_prev_homings_EV[trial_num])
prev_homings_data_mouse.append(num_prev_homings_EV[trial_num])
t += 1
# print(trial+1)
#append data for stats
if efficiency_data_mouse:
efficiency_data_all.append(efficiency_data_mouse)
duration_data_all.append(duration_data_mouse)
prev_homings_data_all.append(prev_homings_data_mouse)
all_conditions.append(data_condition[c])
mouse_ID.append(m); m+= 1
# format end ind
# end_idx = np.array([e/30 for e in end_idx])
end_idx[np.isnan(efficiency)] = np.nan
# loop over data to plot
# for i, (data, data_label) in enumerate(zip([edginess, efficiency_RT, end_idx, RT_all, avg_speed], ['Trajectory'])): #,'Efficiency', 'Duration', 'Reaction Time', 'Speed', 'Trajectory'])): #edginess, 'Trajectory',
# for i, (data, data_label) in enumerate(zip([edginess], ['Trajectory'])):
for i, (data, data_label) in enumerate(zip([end_idx], ['RT duration', 'RT duration', 'Efficiency', 'RT'])): # time, , efficiency_RT, RT_all
# for i, (data, data_label) in enumerate(zip([RT_all], ['Reaction time'])):
# for i, (data, data_label) in enumerate(zip([edginess, efficiency, end_idx], ['Trajectory', 'Efficiency', 'Duration'])):
# for x_data, x_data_label in zip([num_prev_homings], ['Prior homings']):
plot_data = data[~np.isnan(data)]
if False or True:
# for x_data, x_data_label in zip([trials, time, num_prev_homings_EV, num_prev_homings_other_EV, num_prev_homings_HV, prev_edginess, time_exploring, distance_exploring, time_exploring_far, time_exploring_obstacle, time_exploring_edge, time_exploring_other_edge],
# ['Trials', 'Time', 'Edge vector homings','Other edge vector homings', 'Homing vector homings', 'Mean prior trajectory','Time exploring', 'Distance explored', 'Time exploring far side', 'Time exploring obstacle', 'Time exploring edge', 'Time exploring other edge']):
# for x_data, x_data_label in zip([trials, time, time_exploring_pre, distance_exploring_pre, time_exploring_post, distance_exploring_post,
# time_exploring_far_pre,time_exploring_far_post, time_exploring_obstacle_pre, time_exploring_obstacle_post, time_exploring_other_edge, time_exploring_edge],
# ['Trials', 'Time', 'Time exploring (pre)', 'Distance explored (pre)', 'Time exploring (post)', 'Distance explored (post)',
# 'Time exploring far side (pre)', 'Time exploring far side (post)', 'Time exploring obstacle (pre)', 'Time exploring obstacle (post)',
# 'Time exploring other edge (pre)', 'Time exploring edge (pre)']):
# num_homings_combined = (num_prev_homings_EV>0).astype(int) - (num_prev_homings_HV>0).astype(int)
# num_homings_combined[num_prev_homings_EV==0] = -1
#
# for x_data, x_data_label in zip([time, num_prev_homings_EV>0, num_prev_homings_EV, num_prev_homings_other_EV, num_prev_homings_other_EV>0,
# num_prev_homings_front_EV, num_prev_homings_front_EV>0, prev_edginess, num_prev_homings_HV, num_prev_homings_HV>2, num_homings_combined],
# ['Time', '1 Edge vector homings', 'Edge vector homings','Other edge vector homings','1 other edge vector homings',
# 'Front edge vectors','1 front edge vector', 'Mean prior trajectory', 'Homing vector homings', '1 Homing vector homing', 'Combined homings']):
# for x_data, x_data_label in zip([trials, num_prev_homings_EV>0, num_prev_homings_EV, prev_edginess], ['trial', '1 Edge vector homings', 'Edge vector homings', 'Mean prior trajectory']):
for x_data, x_data_label in zip([trials], ['trial']): # ,edginess>HV_cutoff #, 'edginess'
print('\nCorrelation between ' + data_label + ' and ' + x_data_label)
# only plot escapes
data_for_box_plot = data[~ | np.isnan(data) | numpy.isnan |
import numpy as np
import os
import re
import requests
import sys
import time
from netCDF4 import Dataset
import pandas as pd
from bs4 import BeautifulSoup
from tqdm import tqdm
# setup constants used to access the data from the different M2M interfaces
BASE_URL = 'https://ooinet.oceanobservatories.org/api/m2m/' # base M2M URL
SENSOR_URL = '12576/sensor/inv/' # Sensor Information
# setup access credentials
AUTH = ['OOIAPI-853A3LA6QI3L62', '<KEY>']
def M2M_Call(uframe_dataset_name, start_date, end_date):
options = '?beginDT=' + start_date + '&endDT=' + end_date + '&format=application/netcdf'
r = requests.get(BASE_URL + SENSOR_URL + uframe_dataset_name + options, auth=(AUTH[0], AUTH[1]))
if r.status_code == requests.codes.ok:
data = r.json()
else:
return None
# wait until the request is completed
print('Waiting for OOINet to process and prepare data request, this may take up to 20 minutes')
url = [url for url in data['allURLs'] if re.match(r'.*async_results.*', url)][0]
check_complete = url + '/status.txt'
with tqdm(total=400, desc='Waiting') as bar:
for i in range(400):
r = requests.get(check_complete)
bar.update(1)
if r.status_code == requests.codes.ok:
bar.n = 400
bar.last_print_n = 400
bar.refresh()
print('\nrequest completed in %f minutes.' % elapsed)
break
else:
time.sleep(3)
elapsed = (i * 3) / 60
return data
def M2M_Files(data, tag=''):
"""
Use a regex tag combined with the results of the M2M data request to collect the data from the THREDDS catalog.
Collected data is gathered into an xarray dataset for further processing.
:param data: JSON object returned from M2M data request with details on where the data is to be found for download
:param tag: regex tag to use in discriminating the data files, so we only collect the correct ones
:return: the collected data as an xarray dataset
"""
# Create a list of the files from the request above using a simple regex as a tag to discriminate the files
url = [url for url in data['allURLs'] if re.match(r'.*thredds.*', url)][0]
files = list_files(url, tag)
return files
def list_files(url, tag=''):
"""
Function to create a list of the NetCDF data files in the THREDDS catalog created by a request to the M2M system.
:param url: URL to user's THREDDS catalog specific to a data request
:param tag: regex pattern used to distinguish files of interest
:return: list of files in the catalog with the URL path set relative to the catalog
"""
page = requests.get(url).text
soup = BeautifulSoup(page, 'html.parser')
pattern = re.compile(tag)
return [node.get('href') for node in soup.find_all('a', text=pattern)]
def M2M_Data(nclist,variables):
thredds = 'https://opendap.oceanobservatories.org/thredds/dodsC/ooi/'
#nclist is going to contain more than one url eventually
for jj in range(len(nclist)):
url=nclist[jj]
url=url[25:]
dap_url = thredds + url + '#fillmismatch'
openFile = Dataset(dap_url,'r')
for ii in range(len(variables)):
dum = openFile.variables[variables[ii].name]
variables[ii].data = np.append(variables[ii].data, dum[:].data)
tmp = variables[0].data/60/60/24
time_converted = pd.to_datetime(tmp, unit='D', origin=pd.Timestamp('1900-01-01'))
return variables, time_converted
class var(object):
def __init__(self):
"""A Class that generically holds data with a variable name
and the units as attributes"""
self.name = ''
self.data = np.array([])
self.units = ''
def __repr__(self):
return_str = "name: " + self.name + '\n'
return_str += "units: " + self.units + '\n'
return_str += "data: size: " + str(self.data.shape)
return return_str
class structtype(object):
def __init__(self):
""" A class that imitates a Matlab structure type
"""
self._data = []
def __getitem__(self, index):
"""implement index behavior in the struct"""
if index == len(self._data):
self._data.append(var())
return self._data[index]
def __len__(self):
return len(self._data)
def M2M_URLs(platform_name,node,instrument_class,method):
var_list = structtype()
#MOPAK
if platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/SBD17/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD11/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD11/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/SBD17/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD11/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD11/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSPM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSPM/SBS01/01-MOPAK0000/telemetered/mopak_o_dcl_accel'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#METBK
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD11/06-METBKA000/telemetered/metbk_a_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD11/06-METBKA000/telemetered/metbk_a_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD11/06-METBKA000/telemetered/metbk_a_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD11/06-METBKA000/telemetered/metbk_a_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
#FLORT
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/02-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/SBD17/06-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/02-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/SBD17/06-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID27/02-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID27/02-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID27/02-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID27/02-FLORTD000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSPM/WFP01/04-FLORTK000/telemetered/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
#FDCHP
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'FDCHP' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD12/08-FDCHPA000/telemetered/fdchp_a_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#DOSTA
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/03-DOSTAD000/telemetered/dosta_abcdjm_ctdbp_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID27/04-DOSTAD000/telemetered/dosta_abcdjm_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID27/04-DOSTAD000/telemetered/dosta_abcdjm_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/03-DOSTAD000/telemetered/dosta_abcdjm_ctdbp_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID27/04-DOSTAD000/telemetered/dosta_abcdjm_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID27/04-DOSTAD000/telemetered/dosta_abcdjm_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD37/03-DOSTAD000/telemetered/dosta_abcdjm_ctdbp_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD37/03-DOSTAD000/telemetered/dosta_abcdjm_ctdbp_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD37/03-DOSTAD000/telemetered/dosta_abcdjm_ctdbp_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD37/03-DOSTAD000/telemetered/dosta_abcdjm_ctdbp_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSPM/WFP01/02-DOFSTK000/telemetered/dofst_k_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dofst_k_oxygen_l2'
var_list[2].name = 'dofst_k_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'Hz'
var_list[3].units = 'dbar'
#ADCP
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID26/01-ADCPTA000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID26/01-ADCPTC000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID26/01-ADCPTA000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID26/01-ADCPTC000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD35/04-ADCPTM000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD35/04-ADCPTM000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD35/04-ADCPTC000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD35/04-ADCPSJ000/telemetered/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
#ZPLSC
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD37/07-ZPLSCC000/telemetered/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD37/07-ZPLSCC000/telemetered/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD37/07-ZPLSCC000/telemetered/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD37/07-ZPLSCC000/telemetered/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/MFD37/07-ZPLSCC000/recovered_host/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/MFD37/07-ZPLSCC000/recovered_host/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/MFD37/07-ZPLSCC000/recovered_host/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/MFD37/07-ZPLSCC000/recovered_host/zplsc_c_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#WAVSS
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_statistics'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_statistics'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_statistics'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_statistics'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
#VELPT
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/SBD17/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD11/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD11/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/SBD17/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD11/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD11/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID26/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID26/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID26/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID26/04-VELPTA000/telemetered/velpt_ab_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
#PCO2W
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'PCO2W' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/05-PCO2WB000/telemetered/pco2w_abc_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD35/05-PCO2WB000/telemetered/pco2w_abc_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'PCO2W' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/05-PCO2WB000/telemetered/pco2w_abc_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD35/05-PCO2WB000/telemetered/pco2w_abc_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD35/05-PCO2WB000/telemetered/pco2w_abc_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD35/05-PCO2WB000/telemetered/pco2w_abc_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
#PHSEN
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID26/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID26/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID26/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID26/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD35/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD35/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD35/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD35/06-PHSEND000/telemetered/phsen_abcdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
#SPKIR
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/08-SPKIRB000/telemetered/spkir_abj_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID26/08-SPKIRB000/telemetered/spkir_abj_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID26/08-SPKIRB000/telemetered/spkir_abj_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/08-SPKIRB000/telemetered/spkir_abj_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID26/08-SPKIRB000/telemetered/spkir_abj_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID26/08-SPKIRB000/telemetered/spkir_abj_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
#PRESF
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD35/02-PRESFA000/telemetered/presf_abc_dcl_tide_measurement'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD35/02-PRESFA000/telemetered/presf_abc_dcl_tide_measurement'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD35/02-PRESFB000/telemetered/presf_abc_dcl_tide_measurement'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD35/02-PRESFC000/telemetered/presf_abc_dcl_tide_measurement'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
#CTDBP
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD37/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/SBD17/06-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD37/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/SBD17/06-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID27/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID27/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID27/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID27/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD37/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD37/03-CTDBPE000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
#VEL3D
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD35/01-VEL3DD000/telemetered/vel3d_cd_dcl_velocity_data'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD35/01-VEL3DD000/telemetered/vel3d_cd_dcl_velocity_data'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD35/01-VEL3DD000/telemetered/vel3d_cd_dcl_velocity_data'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD35/01-VEL3DD000/telemetered/vel3d_cd_dcl_velocity_data'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
#VEL3DK
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'VEL3D' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSPM/WFP01/01-VEL3DK000/telemetered/vel3d_k_wfp_stc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_k_eastward_velocity'
var_list[2].name = 'vel3d_k_northward_velocity'
var_list[3].name = 'vel3d_k_upward_velocity'
var_list[4].name = 'vel3d_k_heading'
var_list[5].name = 'vel3d_k_pitch'
var_list[6].name = 'vel3d_k_roll'
var_list[7].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'ddegrees'
var_list[5].units = 'ddegrees'
var_list[6].units = 'ddegrees'
var_list[7].units = 'dbar'
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSPM/WFP01/03-CTDPFK000/telemetered/ctdpf_ckl_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'ctdpf_ckl_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdpf_ckl_seawater_pressure'
var_list[5].name = 'ctdpf_ckl_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
#PCO2A
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD12/04-PCO2AA000/telemetered/pco2a_a_dcl_instrument_water'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD12/04-PCO2AA000/telemetered/pco2a_a_dcl_instrument_water'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD12/04-PCO2AA000/telemetered/pco2a_a_dcl_instrument_water'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD12/04-PCO2AA000/telemetered/pco2a_a_dcl_instrument_water'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
#PARAD
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSPM/WFP01/05-PARADK000/telemetered/parad_k__stc_imodem_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_k_par'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
#OPTAA
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID27/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID27/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID27/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID27/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/MFD37/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/MFD37/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/MFD37/01-OPTAAD000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/MFD37/01-OPTAAC000/telemetered/optaa_dj_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#NUTNR
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSM/RID16/07-NUTNRB000/telemetered/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/RID26/07-NUTNRB000/telemetered/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/RID26/07-NUTNRB000/telemetered/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSM/RID16/07-NUTNRB000/telemetered/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/RID26/07-NUTNRB000/telemetered/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/RID26/07-NUTNRB000/telemetered/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
##
#MOPAK
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/SBD17/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD11/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD11/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/SBD17/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD11/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD11/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSPM' and node == 'BUOY' and instrument_class == 'MOPAK' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSPM/SBS01/01-MOPAK0000/recovered_host/mopak_o_dcl_accel_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#METBK
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD11/06-METBKA000/recovered_host/metbk_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD11/06-METBKA000/recovered_host/metbk_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD11/06-METBKA000/recovered_host/metbk_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD11/06-METBKA000/recovered_host/metbk_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
#FLORT
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/RID16/02-FLORTD000/recovered_host/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/SBD17/06-FLORTD000/recovered_host/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/RID16/02-FLORTD000/recovered_host/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/SBD17/06-FLORTD000/recovered_host/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/RID27/02-FLORTD000/recovered_host/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/RID27/02-FLORTD000/recovered_host/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/RID27/02-FLORTD000/recovered_host/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/RID27/02-FLORTD000/recovered_host/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
#FDCHP
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'FDCHP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD12/08-FDCHPA000/recovered_host/fdchp_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#DOSTA
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/RID16/03-DOSTAD000/recovered_host/dosta_abcdjm_ctdbp_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/RID27/04-DOSTAD000/recovered_host/dosta_abcdjm_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/RID27/04-DOSTAD000/recovered_host/dosta_abcdjm_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/RID16/03-DOSTAD000/recovered_host/dosta_abcdjm_ctdbp_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/RID27/04-DOSTAD000/recovered_host/dosta_abcdjm_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/RID27/04-DOSTAD000/recovered_host/dosta_abcdjm_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'optode_temperature'
var_list[4].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
var_list[4].units = 'umol/L'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/MFD37/03-DOSTAD000/recovered_host/dosta_abcdjm_ctdbp_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/MFD37/03-DOSTAD000/recovered_host/dosta_abcdjm_ctdbp_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/MFD37/03-DOSTAD000/recovered_host/dosta_abcdjm_ctdbp_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/MFD37/03-DOSTAD000/recovered_host/dosta_abcdjm_ctdbp_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_ln_optode_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
#ADCP
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/RID26/01-ADCPTA000/recovered_host/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/RID26/01-ADCPTC000/recovered_host/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/RID26/01-ADCPTA000/recovered_host/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/RID26/01-ADCPTC000/recovered_host/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/MFD35/04-ADCPTM000/recovered_host/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/MFD35/04-ADCPTM000/recovered_host/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/MFD35/04-ADCPTC000/recovered_host/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/MFD35/04-ADCPSJ000/recovered_host/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
#WAVSS
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_statistics_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_statistics_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_statistics_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_statistics_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
#VELPT
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/SBD17/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD11/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD11/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredHost':
#uframe_dataset_name = 'CE06ISSM/RID16/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
uframe_dataset_name = 'CE06ISSM/RID16/04-VELPTA000/recovered_host/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD11/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD11/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/RID16/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/RID26/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/RID26/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/RID16/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/RID26/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/RID26/04-VELPTA000/recovered_host/velpt_ab_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
#PCO2W
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'PCO2W' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/RID16/05-PCO2WB000/recovered_host/pco2w_abc_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/MFD35/05-PCO2WB000/recovered_host/pco2w_abc_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'PCO2W' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/RID16/05-PCO2WB000/recovered_host/pco2w_abc_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/MFD35/05-PCO2WB000/recovered_host/pco2w_abc_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/MFD35/05-PCO2WB000/recovered_host/pco2w_abc_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/MFD35/05-PCO2WB000/recovered_host/pco2w_abc_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
#PHSEN
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/RID16/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/RID26/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/RID26/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/RID16/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/RID26/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/RID26/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/MFD35/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/MFD35/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/MFD35/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/MFD35/06-PHSEND000/recovered_host/phsen_abcdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
#SPKIR
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/RID16/08-SPKIRB000/recovered_host/spkir_abj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/RID26/08-SPKIRB000/recovered_host/spkir_abj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/RID26/08-SPKIRB000/recovered_host/spkir_abj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/RID16/08-SPKIRB000/recovered_host/spkir_abj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/RID26/08-SPKIRB000/recovered_host/spkir_abj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'SPKIR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/RID26/08-SPKIRB000/recovered_host/spkir_abj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
#PRESF
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/MFD35/02-PRESFA000/recovered_host/presf_abc_dcl_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/MFD35/02-PRESFA000/recovered_host/presf_abc_dcl_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/MFD35/02-PRESFB000/recovered_host/presf_abc_dcl_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/MFD35/02-PRESFC000/recovered_host/presf_abc_dcl_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'abs_seafloor_pressure'
var_list[2].name = 'seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
#CTDBP
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/RID16/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/MFD37/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/SBD17/06-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/RID16/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/MFD37/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/SBD17/06-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/RID27/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/RID27/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/RID27/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/RID27/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/MFD37/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/MFD37/03-CTDBPE000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
#VEL3D
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/MFD35/01-VEL3DD000/recovered_host/vel3d_cd_dcl_velocity_data_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/MFD35/01-VEL3DD000/recovered_host/vel3d_cd_dcl_velocity_data_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/MFD35/01-VEL3DD000/recovered_host/vel3d_cd_dcl_velocity_data_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/MFD35/01-VEL3DD000/recovered_host/vel3d_cd_dcl_velocity_data_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
#PCO2A
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD12/04-PCO2AA000/recovered_host/pco2a_a_dcl_instrument_water_recovered'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD12/04-PCO2AA000/recovered_host/pco2a_a_dcl_instrument_water_recovered'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD12/04-PCO2AA000/recovered_host/pco2a_a_dcl_instrument_water_recovered'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD12/04-PCO2AA000/recovered_host/pco2a_a_dcl_instrument_water_recovered'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
#OPTAA
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/RID16/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/RID27/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/RID27/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/RID16/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/RID27/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/RID27/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/MFD37/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/MFD37/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/MFD37/01-OPTAAD000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'OPTAA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/MFD37/01-OPTAAC000/recovered_host/optaa_dj_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#NUTNR
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE01ISSM/RID16/07-NUTNRB000/recovered_host/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/RID26/07-NUTNRB000/recovered_host/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/RID26/07-NUTNRB000/recovered_host/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE06ISSM/RID16/07-NUTNRB000/recovered_host/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/RID26/07-NUTNRB000/recovered_host/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/RID26/07-NUTNRB000/recovered_host/suna_dcl_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/RID16/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/MFD37/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/SBD17/06-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/RID16/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/MFD37/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/SBD17/06-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE02SHSM/RID27/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/RID27/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSSM/RID27/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/RID27/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/MFD37/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/MFD37/03-CTDBPE000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'RecoveredWFP':
uframe_dataset_name = 'CE09OSPM/WFP01/03-CTDPFK000/recovered_wfp/ctdpf_ckl_wfp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdpf_ckl_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdpf_ckl_seawater_pressure'
var_list[5].name = 'ctdpf_ckl_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CE02SHSM/RID26/01-ADCPTA000/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSSM/RID26/01-ADCPTC000/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/RID26/01-ADCPTA000/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/RID26/01-ADCPTC000/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/MFD35/04-ADCPTM000/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/MFD35/04-ADCPTM000/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/MFD35/04-ADCPTC000/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'ADCP' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/MFD35/04-ADCPSJ000/recovered_inst/adcp_velocity_earth'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/MFD37/07-ZPLSCC000/recovered_inst/zplsc_echogram_data'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/MFD37/07-ZPLSCC000/recovered_inst/zplsc_echogram_data'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/MFD37/07-ZPLSCC000/recovered_inst/zplsc_echogram_data'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'ZPLSC' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/MFD37/07-ZPLSCC000/recovered_inst/zplsc_echogram_data'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/SBD17/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE02SHSM/SBD11/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSSM/SBD11/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/SBD17/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/SBD11/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/SBD11/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/RID16/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE02SHSM/RID26/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSSM/RID26/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/RID16/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/RID26/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'VELPT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/RID26/04-VELPTA000/recovered_inst/velpt_ab_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'eastward_velocity'
var_list[2].name = 'northward_velocity'
var_list[3].name = 'upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'VEL3D' and method == 'RecoveredWFP':
uframe_dataset_name = 'CE09OSPM/WFP01/01-VEL3DK000/recovered_wfp/vel3d_k_wfp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_k_eastward_velocity'
var_list[2].name = 'vel3d_k_northward_velocity'
var_list[3].name = 'vel3d_k_upward_velocity'
var_list[4].name = 'vel3d_k_heading'
var_list[5].name = 'vel3d_k_pitch'
var_list[6].name = 'vel3d_k_roll'
var_list[7].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'ddegrees'
var_list[5].units = 'ddegrees'
var_list[6].units = 'ddegrees'
var_list[7].units = 'dbar'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/MFD35/01-VEL3DD000/recovered_inst/vel3d_cd_dcl_velocity_data_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/MFD35/01-VEL3DD000/recovered_inst/vel3d_cd_dcl_velocity_data_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/MFD35/01-VEL3DD000/recovered_inst/vel3d_cd_dcl_velocity_data_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'VEL3D' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/MFD35/01-VEL3DD000/recovered_inst/vel3d_cd_dcl_velocity_data_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/MFD35/02-PRESFA000/recovered_inst/presf_abc_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'presf_tide_pressure'
var_list[2].name = 'presf_tide_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/MFD35/02-PRESFA000/recovered_inst/presf_abc_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'presf_tide_pressure'
var_list[2].name = 'presf_tide_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/MFD35/02-PRESFB000/recovered_inst/presf_abc_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'presf_tide_pressure'
var_list[2].name = 'presf_tide_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PRESF' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/MFD35/02-PRESFC000/recovered_inst/presf_abc_tide_measurement_recovered'
var_list[0].name = 'time'
var_list[1].name = 'presf_tide_pressure'
var_list[2].name = 'presf_tide_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
var_list[2].units = 'degC'
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/RID16/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE02SHSM/RID26/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSSM/RID26/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/RID16/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/RID26/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/RID26/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/MFD35/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/MFD35/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/MFD35/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PHSEN' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/MFD35/06-PHSEND000/recovered_inst/phsen_abcdef_instrument'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'phsen_abcdef_ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'PCO2W' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/RID16/05-PCO2WB000/recovered_inst/pco2w_abc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/MFD35/05-PCO2WB000/recovered_inst/pco2w_abc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'PCO2W' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/RID16/05-PCO2WB000/recovered_inst/pco2w_abc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/MFD35/05-PCO2WB000/recovered_inst/pco2w_abc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/MFD35/05-PCO2WB000/recovered_inst/pco2w_abc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'PCO2W' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/MFD35/05-PCO2WB000/recovered_inst/pco2w_abc_instrument'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'RecoveredWFP':
uframe_dataset_name = 'CE09OSPM/WFP01/05-PARADK000/recovered_wfp/parad_k__stc_imodem_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_k_par'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/RID16/07-NUTNRB000/recovered_inst/suna_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE02SHSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredInst':
uframe_dataset_name = 'CE02SHSM/RID26/07-NUTNRB000/recovered_inst/suna_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE04OSSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSSM/RID26/07-NUTNRB000/recovered_inst/suna_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/RID16/07-NUTNRB000/recovered_inst/suna_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE07SHSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/RID26/07-NUTNRB000/recovered_inst/suna_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE09OSSM' and node == 'NSIF' and instrument_class == 'NUTNR' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/RID26/07-NUTNRB000/recovered_inst/suna_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'FDCHP' and method == 'RecoveredInst':
uframe_dataset_name = 'CE02SHSM/SBD12/08-FDCHPA000/recovered_inst/fdchp_a_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE01ISSM' and node == 'BUOY' and instrument_class == 'FLORT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/SBD17/06-FLORTD000/recovered_inst/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE06ISSM' and node == 'BUOY' and instrument_class == 'FLORT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/SBD17/06-FLORTD000/recovered_inst/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'RecoveredWFP':
uframe_dataset_name = 'CE09OSPM/WFP01/04-FLORTK000/recovered_wfp/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CE09OSPM' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'RecoveredWFP':
uframe_dataset_name = 'CE09OSPM/WFP01/02-DOFSTK000/recovered_wfp/dofst_k_wfp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dofst_k_oxygen_l2'
var_list[2].name = 'dofst_k_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'Hz'
var_list[3].units = 'dbar'
elif platform_name == 'CE01ISSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/RID16/03-DOSTAD000/recovered_inst/dosta_abcdjm_ctdbp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'ctd_tc_oxygen'
var_list[3].name = 'ctdbp_seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
elif platform_name == 'CE06ISSM' and node == 'NSIF' and instrument_class == 'DOSTA' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/RID16/03-DOSTAD000/recovered_inst/dosta_abcdjm_ctdbp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'ctd_tc_oxygen'
var_list[3].name = 'ctdbp_seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/MFD37/03-DOSTAD000/recovered_inst/dosta_abcdjm_ctdbp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'ctd_tc_oxygen'
var_list[3].name = 'ctdbp_seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/MFD37/03-DOSTAD000/recovered_inst/dosta_abcdjm_ctdbp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'ctd_tc_oxygen'
var_list[3].name = 'ctdbp_seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
elif platform_name == 'CE07SHSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'RecoveredInst':
uframe_dataset_name = 'CE07SHSM/MFD37/03-DOSTAD000/recovered_inst/dosta_abcdjm_ctdbp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'ctd_tc_oxygen'
var_list[3].name = 'ctdbp_seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
elif platform_name == 'CE09OSSM' and node == 'MFN' and instrument_class == 'DOSTA' and method == 'RecoveredInst':
uframe_dataset_name = 'CE09OSSM/MFD37/03-DOSTAD000/recovered_inst/dosta_abcdjm_ctdbp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'ctd_tc_oxygen'
var_list[3].name = 'ctdbp_seawater_temperature'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'degC'
elif platform_name == 'CE01ISSM' and node == 'MFN' and instrument_class == 'WAVSS_Stats' and method == 'RecoveredInst':
uframe_dataset_name = 'CE01ISSM/MFD35/04-ADCPTM000/recovered_inst/adcpt_m_instrument_log9_recovered'
var_list[0].name = 'time'
var_list[1].name = 'significant_wave_height'
var_list[2].name = 'peak_wave_period'
var_list[3].name = 'peak_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'seconds'
var_list[3].units = 'degrees'
elif platform_name == 'CE06ISSM' and node == 'MFN' and instrument_class == 'WAVSS_Stats' and method == 'RecoveredInst':
uframe_dataset_name = 'CE06ISSM/MFD35/04-ADCPTM000/recovered_inst/adcpt_m_instrument_log9_recovered'
var_list[0].name = 'time'
var_list[1].name = 'significant_wave_height'
var_list[2].name = 'peak_wave_period'
var_list[3].name = 'peak_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'seconds'
var_list[3].units = 'degrees'
elif platform_name == 'CE02SHBP' and node == 'BEP' and instrument_class == 'CTD' and method == 'Streamed':
uframe_dataset_name = 'CE02SHBP/LJ01D/06-CTDBPN106/streamed/ctdbp_no_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_no_seawater_pressure'
var_list[5].name = 'ctdbp_no_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE04OSBP' and node == 'BEP' and instrument_class == 'CTD' and method == 'Streamed':
uframe_dataset_name = 'CE04OSBP/LJ01C/06-CTDBPO108/streamed/ctdbp_no_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_no_seawater_pressure'
var_list[5].name = 'ctdbp_no_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE02SHBP' and node == 'BEP' and instrument_class == 'DOSTA' and method == 'Streamed':
uframe_dataset_name = 'CE02SHBP/LJ01D/06-CTDBPN106/streamed/ctdbp_no_sample'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'ctd_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE04OSBP' and node == 'BEP' and instrument_class == 'DOSTA' and method == 'Streamed':
uframe_dataset_name = 'CE04OSBP/LJ01C/06-CTDBPO108/streamed/ctdbp_no_sample'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'ctd_tc_oxygen'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
elif platform_name == 'CE02SHBP' and node == 'BEP' and instrument_class == 'PHSEN' and method == 'Streamed':
uframe_dataset_name = 'CE02SHBP/LJ01D/10-PHSEND103/streamed/phsen_data_record'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE04OSBP' and node == 'BEP' and instrument_class == 'PHSEN' and method == 'Streamed':
uframe_dataset_name = 'CE04OSBP/LJ01C/10-PHSEND107/streamed/phsen_data_record'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE02SHBP' and node == 'BEP' and instrument_class == 'PCO2W' and method == 'Streamed':
uframe_dataset_name = 'CE02SHBP/LJ01D/09-PCO2WB103/streamed/pco2w_b_sami_data_record'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE04OSBP' and node == 'BEP' and instrument_class == 'PCO2W' and method == 'Streamed':
uframe_dataset_name = 'CE04OSBP/LJ01C/09-PCO2WB104/streamed/pco2w_b_sami_data_record'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
elif platform_name == 'CE02SHBP' and node == 'BEP' and instrument_class == 'ADCP' and method == 'Streamed':
uframe_dataset_name = 'CE02SHBP/LJ01D/05-ADCPTB104/streamed/adcp_velocity_beam'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE04OSBP' and node == 'BEP' and instrument_class == 'ADCP' and method == 'Streamed':
uframe_dataset_name = 'CE04OSBP/LJ01C/05-ADCPSI103/streamed/adcp_velocity_beam'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
elif platform_name == 'CE02SHBP' and node == 'BEP' and instrument_class == 'VEL3D' and method == 'Streamed':
uframe_dataset_name = 'CE02SHBP/LJ01D/07-VEL3DC108/streamed/vel3d_cd_velocity_data'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE04OSBP' and node == 'BEP' and instrument_class == 'VEL3D' and method == 'Streamed':
uframe_dataset_name = 'CE04OSBP/LJ01C/07-VEL3DC107/streamed/vel3d_cd_velocity_data'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_c_eastward_turbulent_velocity'
var_list[2].name = 'vel3d_c_northward_turbulent_velocity'
var_list[3].name = 'vel3d_c_upward_turbulent_velocity'
var_list[4].name = 'seawater_pressure_mbar'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = '0.001dbar'
elif platform_name == 'CE02SHBP' and node == 'BEP' and instrument_class == 'OPTAA' and method == 'Streamed':
uframe_dataset_name = 'CE02SHBP/LJ01D/08-OPTAAD106/streamed/optaa_sample'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CE04OSBP' and node == 'BEP' and instrument_class == 'OPTAA' and method == 'Streamed':
uframe_dataset_name = 'CE04OSBP/LJ01C/08-OPTAAC104/streamed/optaa_sample'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
#CSPP Data below
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSP/SP001/08-FLORTJ000/telemetered/flort_dj_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE01ISSP/SP001/08-FLORTJ000/recovered_cspp/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSP/SP001/08-FLORTJ000/telemetered/flort_dj_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE06ISSP/SP001/08-FLORTJ000/recovered_cspp/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSP/SP001/02-DOSTAJ000/telemetered/dosta_abcdjm_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[4].name = 'optode_temperature'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'umol/L'
var_list[4].units = 'degC'
var_list[5].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE01ISSP/SP001/02-DOSTAJ000/recovered_cspp/dosta_abcdjm_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[4].name = 'optode_temperature'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'umol/L'
var_list[4].units = 'degC'
var_list[5].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSP/SP001/02-DOSTAJ000/telemetered/dosta_abcdjm_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[4].name = 'optode_temperature'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'umol/L'
var_list[4].units = 'degC'
var_list[5].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE06ISSP/SP001/02-DOSTAJ000/recovered_cspp/dosta_abcdjm_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[4].name = 'optode_temperature'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'umol/L'
var_list[4].units = 'degC'
var_list[5].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSP/SP001/09-CTDPFJ000/telemetered/ctdpf_j_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temperature'
var_list[2].name = 'salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE01ISSP/SP001/09-CTDPFJ000/recovered_cspp/ctdpf_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temperature'
var_list[2].name = 'salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSP/SP001/09-CTDPFJ000/telemetered/ctdpf_j_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temperature'
var_list[2].name = 'salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE06ISSP/SP001/09-CTDPFJ000/recovered_cspp/ctdpf_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temperature'
var_list[2].name = 'salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSP/SP001/10-PARADJ000/telemetered/parad_j_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_j_par_counts_output'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE01ISSP/SP001/10-PARADJ000/recovered_cspp/parad_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_j_par_counts_output'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSP/SP001/10-PARADJ000/telemetered/parad_j_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_j_par_counts_output'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE06ISSP/SP001/10-PARADJ000/recovered_cspp/parad_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_j_par_counts_output'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'NUTNR' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE01ISSP/SP001/06-NUTNRJ000/recovered_cspp/nutnr_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'salinity_corrected_nitrate'
var_list[2].name = 'nitrate_concentration'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
var_list[3].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'NUTNR' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE06ISSP/SP001/06-NUTNRJ000/recovered_cspp/nutnr_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'salinity_corrected_nitrate'
var_list[2].name = 'nitrate_concentration'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
var_list[3].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSP/SP001/07-SPKIRJ000/telemetered/spkir_abj_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'SPKIR' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE01ISSP/SP001/07-SPKIRJ000/recovered_cspp/spkir_abj_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'SPKIR' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSP/SP001/07-SPKIRJ000/telemetered/spkir_abj_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'SPKIR' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE06ISSP/SP001/07-SPKIRJ000/recovered_cspp/spkir_abj_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE01ISSP/SP001/05-VELPTJ000/telemetered/velpt_j_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'velpt_j_eastward_velocity'
var_list[2].name = 'velpt_j_northward_velocity'
var_list[3].name = 'velpt_j_upward_velocity'
var_list[4].name = 'heading'
var_list[5].name = 'roll'
var_list[6].name = 'pitch'
var_list[7].name = 'temperature'
var_list[8].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'degrees'
var_list[5].units = 'degrees'
var_list[6].units = 'degrees'
var_list[7].units = 'degC'
var_list[8].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'VELPT' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE01ISSP/SP001/05-VELPTJ000/recovered_cspp/velpt_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'velpt_j_eastward_velocity'
var_list[2].name = 'velpt_j_northward_velocity'
var_list[3].name = 'velpt_j_upward_velocity'
var_list[4].name = 'heading'
var_list[5].name = 'roll'
var_list[6].name = 'pitch'
var_list[7].name = 'temperature'
var_list[8].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'degrees'
var_list[5].units = 'degrees'
var_list[6].units = 'degrees'
var_list[7].units = 'degC'
var_list[8].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'VELPT' and method == 'Telemetered':
uframe_dataset_name = 'CE06ISSP/SP001/05-VELPTJ000/telemetered/velpt_j_cspp_instrument'
var_list[0].name = 'time'
var_list[1].name = 'velpt_j_eastward_velocity'
var_list[2].name = 'velpt_j_northward_velocity'
var_list[3].name = 'velpt_j_upward_velocity'
var_list[4].name = 'heading'
var_list[5].name = 'roll'
var_list[6].name = 'pitch'
var_list[7].name = 'temperature'
var_list[8].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'degrees'
var_list[5].units = 'degrees'
var_list[6].units = 'degrees'
var_list[7].units = 'degC'
var_list[8].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'VELPT' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE06ISSP/SP001/05-VELPTJ000/recovered_cspp/velpt_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'velpt_j_eastward_velocity'
var_list[2].name = 'velpt_j_northward_velocity'
var_list[3].name = 'velpt_j_upward_velocity'
var_list[4].name = 'heading'
var_list[5].name = 'roll'
var_list[6].name = 'pitch'
var_list[7].name = 'temperature'
var_list[8].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'degrees'
var_list[5].units = 'degrees'
var_list[6].units = 'degrees'
var_list[7].units = 'degC'
var_list[8].units = 'dbar'
elif platform_name == 'CE01ISSP' and node == 'PROFILER' and instrument_class == 'OPTAA' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE01ISSP/SP001/04-OPTAAJ000/recovered_cspp/optaa_dj_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
elif platform_name == 'CE06ISSP' and node == 'PROFILER' and instrument_class == 'OPTAA' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE06ISSP/SP001/04-OPTAAJ000/recovered_cspp/optaa_dj_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
elif platform_name == 'CE02SHSP' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE02SHSP/SP001/07-FLORTJ000/recovered_cspp/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CE07SHSP' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE07SHSP/SP001/07-FLORTJ000/recovered_cspp/flort_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CE02SHSP' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE02SHSP/SP001/01-DOSTAJ000/recovered_cspp/dosta_abcdjm_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[4].name = 'optode_temperature'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'umol/L'
var_list[4].units = 'degC'
var_list[5].units = 'dbar'
elif platform_name == 'CE07SHSP' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE07SHSP/SP001/01-DOSTAJ000/recovered_cspp/dosta_abcdjm_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'estimated_oxygen_concentration'
var_list[3].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[4].name = 'optode_temperature'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'umol/L'
var_list[4].units = 'degC'
var_list[5].units = 'dbar'
elif platform_name == 'CE02SHSP' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE02SHSP/SP001/08-CTDPFJ000/recovered_cspp/ctdpf_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temperature'
var_list[2].name = 'salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE07SHSP' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE07SHSP/SP001/08-CTDPFJ000/recovered_cspp/ctdpf_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temperature'
var_list[2].name = 'salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE02SHSP' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE02SHSP/SP001/09-PARADJ000/recovered_cspp/parad_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_j_par_counts_output'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE07SHSP' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE07SHSP/SP001/09-PARADJ000/recovered_cspp/parad_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_j_par_counts_output'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE02SHSP' and node == 'PROFILER' and instrument_class == 'NUTNR' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE02SHSP/SP001/05-NUTNRJ000/recovered_cspp/nutnr_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'salinity_corrected_nitrate'
var_list[2].name = 'nitrate_concentration'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
var_list[3].units = 'dbar'
elif platform_name == 'CE07SHSP' and node == 'PROFILER' and instrument_class == 'NUTNR' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE07SHSP/SP001/05-NUTNRJ000/recovered_cspp/nutnr_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'salinity_corrected_nitrate'
var_list[2].name = 'nitrate_concentration'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
var_list[3].units = 'dbar'
elif platform_name == 'CE02SHSP' and node == 'PROFILER' and instrument_class == 'SPKIR' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE02SHSP/SP001/06-SPKIRJ000/recovered_cspp/spkir_abj_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE07SHSP' and node == 'PROFILER' and instrument_class == 'SPKIR' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE07SHSP/SP001/06-SPKIRJ000/recovered_cspp/spkir_abj_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'spkir_abj_cspp_downwelling_vector'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE02SHSP' and node == 'PROFILER' and instrument_class == 'VELPT' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE02SHSP/SP001/02-VELPTJ000/recovered_cspp/velpt_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'velpt_j_eastward_velocity'
var_list[2].name = 'velpt_j_northward_velocity'
var_list[3].name = 'velpt_j_upward_velocity'
var_list[4].name = 'heading'
var_list[5].name = 'roll'
var_list[6].name = 'pitch'
var_list[7].name = 'temperature'
var_list[8].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'degrees'
var_list[5].units = 'degrees'
var_list[6].units = 'degrees'
var_list[7].units = 'degC'
var_list[8].units = 'dbar'
elif platform_name == 'CE07SHSP' and node == 'PROFILER' and instrument_class == 'VELPT' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE07SHSP/SP001/02-VELPTJ000/recovered_cspp/velpt_j_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'velpt_j_eastward_velocity'
var_list[2].name = 'velpt_j_northward_velocity'
var_list[3].name = 'velpt_j_upward_velocity'
var_list[4].name = 'heading'
var_list[5].name = 'roll'
var_list[6].name = 'pitch'
var_list[7].name = 'temperature'
var_list[8].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'degrees'
var_list[5].units = 'degrees'
var_list[6].units = 'degrees'
var_list[7].units = 'degC'
var_list[8].units = 'dbar'
elif platform_name == 'CE02SHSP' and node == 'PROFILER' and instrument_class == 'OPTAA' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE02SHSP/SP001/04-OPTAAJ000/recovered_cspp/optaa_dj_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
elif platform_name == 'CE07SHSP' and node == 'PROFILER' and instrument_class == 'OPTAA' and method == 'RecoveredCSPP':
uframe_dataset_name = 'CE07SHSP/SP001/04-OPTAAJ000/recovered_cspp/optaa_dj_cspp_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'dbar'
elif platform_name == 'CEGL386' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL386/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL386' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL386/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL384' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL384/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL384' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL384/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL383' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL383/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL383' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL383/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL382' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL382/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL382' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL382/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL381' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL381/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL381' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL381/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL327' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL327/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL327' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL327/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL326' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL326/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL326' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL326/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL320' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL320/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL320' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL320/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL319' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL319/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL319' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL319/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL312' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL312/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL312' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL312/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL311' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL311/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL311' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL311/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL247' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL247/05-CTDGVM000/telemetered/ctdgv_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL247' and node == 'GLIDER' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL247/05-CTDGVM000/recovered_host/ctdgv_m_glider_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_water_temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'sci_seawater_density'
var_list[4].name = 'sci_water_pressure_dbar'
var_list[5].name = 'sci_water_cond'
var_list[6].name = 'lat'
var_list[7].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
var_list[6].units = 'degree_north'
var_list[7].units = 'degree_east'
elif platform_name == 'CEGL386' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL386/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL386' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL386/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL384' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL384/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL384' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL384/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL383' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL383/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL383' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL383/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL382' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL382/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL382' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL382/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL381' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL381/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL381' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL381/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL327' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL327/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL327' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL327/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL326' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL326/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL326' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL326/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL320' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL320/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL320' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL320/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL319' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL319/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL319' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL319/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL312' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL312/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL312' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL312/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL311' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL311/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL311' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL311/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL247' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL247/04-DOSTAM000/telemetered/dosta_abcdjm_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL247' and node == 'GLIDER' and instrument_class == 'DOSTA' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL247/04-DOSTAM000/recovered_host/dosta_abcdjm_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sci_oxy4_oxygen'
var_list[2].name = 'sci_abs_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[4].name = 'lat'
var_list[5].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/kg'
var_list[3].units = 'dbar'
var_list[4].units = 'degree_north'
var_list[5].units = 'degree_east'
elif platform_name == 'CEGL386' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL386/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL386' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL386/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL384' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL384/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL384' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL384/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL383' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL383/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL383' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL383/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL382' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL382/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL382' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL382/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL381' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL381/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL381' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL381/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL327' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL327/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL327' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL327/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL326' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL326/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL326' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL326/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL320' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL320/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL320' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL320/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL319' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL319/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL319' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL319/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL312' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL312/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL312' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL312/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL311' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL311/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL311' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL311/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL247' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL247/02-FLORTM000/telemetered/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL247' and node == 'GLIDER' and instrument_class == 'FLORT' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL247/02-FLORTM000/recovered_host/flort_m_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'sci_flbbcd_chlor_units'
var_list[3].name = 'sci_flbbcd_cdom_units'
var_list[4].name = 'sci_flbbcd_bb_units'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[7].name = 'lat'
var_list[8].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
var_list[7].units = 'degree_north'
var_list[8].units = 'degree_east'
elif platform_name == 'CEGL386' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL386/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL386' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL386/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL384' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL384/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL384' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL384/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL383' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL383/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL383' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL383/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL382' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL382/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL382' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL382/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL381' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL381/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL381' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL381/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL327' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL327/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL327' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL327/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL326' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL326/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL326' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL326/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL320' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL320/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL320' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL320/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL319' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL319/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL319' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL319/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL312' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL312/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL312' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL312/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL311' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL311/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL311' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL311/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL247' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'Telemetered':
uframe_dataset_name = 'CE05MOAS/GL247/01-PARADM000/telemetered/parad_m_glider_instrument'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL247' and node == 'GLIDER' and instrument_class == 'PARAD' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL247/01-PARADM000/recovered_host/parad_m_glider_recovered'
var_list[0].name = 'time'
var_list[1].name = 'parad_m_par'
var_list[2].name = 'int_ctd_pressure'
var_list[3].name = 'lat'
var_list[4].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
var_list[3].units = 'degree_north'
var_list[4].units = 'degree_east'
elif platform_name == 'CEGL386' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL386/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL384' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL384/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL383' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL383/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL382' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL382/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL381' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL381/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL327' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL327/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL326' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL326/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL320' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL320/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL319' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL319/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL312' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL312/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL311' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL311/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CEGL247' and node == 'GLIDER' and instrument_class == 'ADCP' and method == 'RecoveredHost':
uframe_dataset_name = 'CE05MOAS/GL247/03-ADCPAM000/recovered_host/adcp_velocity_glider'
var_list[0].name = 'time'
var_list[1].name = 'bin_depths'
var_list[2].name = 'heading'
var_list[3].name = 'pitch'
var_list[4].name = 'roll'
var_list[5].name = 'eastward_seawater_velocity'
var_list[6].name = 'northward_seawater_velocity'
var_list[7].name = 'upward_seawater_velocity'
var_list[8].name = 'int_ctd_pressure'
var_list[9].name = 'lat'
var_list[10].name = 'lon'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'meters'
var_list[2].units = 'deci-degrees'
var_list[3].units = 'deci-degrees'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'm/s'
var_list[6].units = 'm/s'
var_list[7].units = 'm/s'
var_list[8].units = 'dbar'
var_list[9].units = 'degree_north'
var_list[10].units = 'degree_east'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD11/06-METBKA000/telemetered/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD11/06-METBKA000/recovered_host/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD11/06-METBKA000/telemetered/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD11/06-METBKA000/recovered_host/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD11/06-METBKA000/telemetered/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD11/06-METBKA000/recovered_host/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD11/06-METBKA000/telemetered/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD11/06-METBKA000/recovered_host/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_mean_directional'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_mean_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_mean_directional'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_mean_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_mean_directional'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_mean_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_mean_directional'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_mean_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_non_directional'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_non_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_non_directional'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_non_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_non_directional'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_non_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_non_directional'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_non_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_motion'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_motion_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_motion'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_motion_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_motion'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_motion_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_motion'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_motion_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'Telemetered':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_fourier'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
elif platform_name == 'CE02SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'RecoveredHost':
uframe_dataset_name = 'CE02SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_fourier_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'Telemetered':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_fourier'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
elif platform_name == 'CE04OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'RecoveredHost':
uframe_dataset_name = 'CE04OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_fourier_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'Telemetered':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_fourier'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
elif platform_name == 'CE09OSSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'RecoveredHost':
uframe_dataset_name = 'CE09OSSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_fourier_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'Telemetered':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_fourier'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
elif platform_name == 'CE07SHSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'RecoveredHost':
uframe_dataset_name = 'CE07SHSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_fourier_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
elif platform_name == 'CE04OSPS' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/SF01B/2A-CTDPFA107/streamed/ctdpf_sbe43_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'seawater_pressure'
var_list[5].name = 'seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE04OSPD' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSPD/DP01B/01-CTDPFL105/recovered_inst/dpc_ctd_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'dpc_ctd_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE04OSPD' and node == 'PROFILER' and instrument_class == 'CTD' and method == 'RecoveredWFP':
uframe_dataset_name = 'CE04OSPD/DP01B/01-CTDPFL105/recovered_wfp/dpc_ctd_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'dpc_ctd_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE04OSPS' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/SF01B/2A-CTDPFA107/streamed/ctdpf_sbe43_sample'
var_list[0].name = 'time'
var_list[1].name = 'corrected_dissolved_oxygen'
var_list[2].name = 'seawater_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'dbar'
elif platform_name == 'CE04OSPD' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSPD/DP01B/06-DOSTAD105/recovered_inst/dpc_optode_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'dbar'
elif platform_name == 'CE04OSPD' and node == 'PROFILER' and instrument_class == 'DOSTA' and method == 'RecoveredWFP':
uframe_dataset_name = 'CE04OSPD/DP01B/06-DOSTAD105/recovered_wfp/dpc_optode_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'dosta_abcdjm_cspp_tc_oxygen'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'umol/L'
var_list[3].units = 'dbar'
elif platform_name == 'CE04OSPS' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/SF01B/3A-FLORTD104/streamed/flort_d_data_record'
var_list[0].name = 'time'
var_list[1].name = 'seawater_scattering_coefficient'
var_list[2].name = 'fluorometric_chlorophyll_a'
var_list[3].name = 'fluorometric_cdom'
var_list[4].name = 'total_volume_scattering_coefficient'
var_list[5].name = 'optical_backscatter'
var_list[6].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm-1'
var_list[2].units = 'ug/L'
var_list[3].units = 'ppb'
var_list[4].units = 'm-1 sr-1'
var_list[5].units = 'm-1'
var_list[6].units = 'dbar'
elif platform_name == 'CE04OSPD' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSPD/DP01B/04-FLNTUA103/recovered_inst/dpc_flnturtd_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'flntu_x_mmp_cds_fluorometric_chlorophyll_a'
var_list[2].name = 'flntu_x_mmp_cds_total_volume_scattering_coefficient '
var_list[3].name = 'flntu_x_mmp_cds_bback_total'
var_list[4].name = 'flcdr_x_mmp_cds_fluorometric_cdom'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'ug/L'
var_list[2].units = 'm-1 sr-1'
var_list[3].units = 'm-1'
var_list[4].units = 'ppb'
var_list[5].units = 'dbar'
elif platform_name == 'CE04OSPD' and node == 'PROFILER' and instrument_class == 'FLORT' and method == 'RecoveredWFP':
uframe_dataset_name = 'CE04OSPD/DP01B/03-FLCDRA103/recovered_wfp/dpc_flcdrtd_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'flntu_x_mmp_cds_fluorometric_chlorophyll_a'
var_list[2].name = 'flntu_x_mmp_cds_total_volume_scattering_coefficient '
var_list[3].name = 'flntu_x_mmp_cds_bback_total'
var_list[4].name = 'flcdr_x_mmp_cds_fluorometric_cdom'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'ug/L'
var_list[2].units = 'm-1 sr-1'
var_list[3].units = 'm-1'
var_list[4].units = 'ppb'
var_list[5].units = 'dbar'
elif platform_name == 'CE04OSPS' and node == 'PROFILER' and instrument_class == 'PHSEN' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/SF01B/2B-PHSENA108/streamed/phsen_data_record'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'ph_seawater'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'dbar'
elif platform_name == 'CE04OSPS' and node == 'PROFILER' and instrument_class == 'PARAD' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/SF01B/3C-PARADA102/streamed/parad_sa_sample'
var_list[0].name = 'time'
var_list[1].name = 'par_counts_output'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol photons m-2 s-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE04OSPS' and node == 'PROFILER' and instrument_class == 'SPKIR' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/SF01B/3D-SPKIRA102/streamed/spkir_data_record'
var_list[0].name = 'time'
var_list[1].name = 'spkir_downwelling_vector'
var_list[2].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uW cm-2 nm-1'
var_list[2].units = 'dbar'
elif platform_name == 'CE04OSPS' and node == 'PROFILER' and instrument_class == 'NUTNR' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/SF01B/4A-NUTNRA102/streamed/nutnr_a_sample'
var_list[0].name = 'time'
var_list[1].name = 'nitrate_concentration'
var_list[2].name = 'salinity_corrected_nitrate'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/L'
var_list[2].units = 'umol/L'
var_list[3].units = 'dbar'
elif platform_name == 'CE04OSPS' and node == 'PROFILER' and instrument_class == 'PCO2W' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/SF01B/4F-PCO2WA102/streamed/pco2w_a_sami_data_record'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[3].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
var_list[3].units = 'dbar'
elif platform_name == 'CE04OSPS' and node == 'PROFILER' and instrument_class == 'VELPT' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/SF01B/4B-VELPTD106/streamed/velpt_velocity_data'
var_list[0].name = 'time'
var_list[1].name = 'velpt_d_eastward_velocity'
var_list[2].name = 'velpt_d_northward_velocity'
var_list[3].name = 'velpt_d_upward_velocity'
var_list[4].name = 'heading_decidegree'
var_list[5].name = 'roll_decidegree'
var_list[6].name = 'pitch_decidegree'
var_list[7].name = 'temperature_centidegree'
var_list[8].name = 'pressure_mbar'
var_list[9].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'deci-degrees'
var_list[5].units = 'deci-degrees'
var_list[6].units = 'deci-degrees'
var_list[7].units = '0.01degC'
var_list[8].units = '0.001dbar'
var_list[9].units = 'dbar'
elif platform_name == 'CE04OSPD' and node == 'PROFILER' and instrument_class == 'VEL3D' and method == 'RecoveredInst':
uframe_dataset_name = 'CE04OSPD/DP01B/02-VEL3DA105/recovered_inst/dpc_acm_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_a_eastward_velocity'
var_list[2].name = 'vel3d_a_northward_velocity'
var_list[3].name = 'vel3d_a_upward_velocity_ascending'
var_list[4].name = 'vel3d_a_upward_velocity_descending'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'm/s'
var_list[5].units = 'dbar'
elif platform_name == 'CE04OSPD' and node == 'PROFILER' and instrument_class == 'VEL3D' and method == 'RecoveredWFP':
uframe_dataset_name = 'CE04OSPD/DP01B/02-VEL3DA105/recovered_wfp/dpc_acm_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'vel3d_a_eastward_velocity'
var_list[2].name = 'vel3d_a_northward_velocity'
var_list[3].name = 'vel3d_a_upward_velocity_ascending'
var_list[4].name = 'vel3d_a_upward_velocity_descending'
var_list[5].name = 'int_ctd_pressure'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'm/s'
var_list[2].units = 'm/s'
var_list[3].units = 'm/s'
var_list[4].units = 'm/s'
var_list[5].units = 'dbar'
elif platform_name == 'CE04OSPS' and node == 'PLATFORM200M' and instrument_class == 'CTD' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/PC01B/4A-CTDPFA109/streamed/ctdpf_optode_sample'
var_list[0].name = 'time'
var_list[1].name = 'seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'seawater_pressure'
var_list[5].name = 'seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CE04OSPS' and node == 'PLATFORM200M' and instrument_class == 'DOSTA' and method == 'Streamed':
#uframe_dataset_name = 'CE04OSPS/PC01B/4A-DOSTAD109/streamed/ctdpf_optode_sample'
uframe_dataset_name = 'CE04OSPS/PC01B/4A-CTDPFA109/streamed/ctdpf_optode_sample'
var_list[0].name = 'time'
var_list[1].name = 'dissolved_oxygen'
var_list[2].name = 'seawater_pressure' #also use this for the '4A-DOSTAD109/streamed/ctdpf_optode_sample' stream
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'umol/kg'
var_list[2].units = 'dbar'
elif platform_name == 'CE04OSPS' and node == 'PLATFORM200M' and instrument_class == 'PHSEN' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/PC01B/4B-PHSENA106/streamed/phsen_data_record'
var_list[0].name = 'time'
var_list[1].name = 'phsen_thermistor_temperature'
var_list[2].name = 'ph_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
elif platform_name == 'CE04OSPS' and node == 'PLATFORM200M' and instrument_class == 'PCO2W' and method == 'Streamed':
uframe_dataset_name = 'CE04OSPS/PC01B/4D-PCO2WA105/streamed/pco2w_a_sami_data_record'
var_list[0].name = 'time'
var_list[1].name = 'pco2w_thermistor_temperature'
var_list[2].name = 'pco2_seawater'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'uatm'
#Coastal Pioneer CSM Data Streams
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD11/06-METBKA000/telemetered/metbk_a_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'METBK2' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD12/06-METBKA000/telemetered/metbk_a_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD11/06-METBKA000/recovered_host/metbk_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'METBK2' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD12/06-METBKA000/recovered_host/metbk_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CP03ISSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/SBD11/06-METBKA000/telemetered/metbk_a_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CP03ISSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/SBD11/06-METBKA000/recovered_host/metbk_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CP04OSSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/SBD11/06-METBKA000/telemetered/metbk_a_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
elif platform_name == 'CP04OSSM' and node == 'BUOY' and instrument_class == 'METBK1' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/SBD11/06-METBKA000/recovered_host/metbk_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'sea_surface_temperature'
var_list[2].name = 'sea_surface_conductivity'
var_list[3].name = 'met_salsurf'
var_list[4].name = 'met_windavg_mag_corr_east'
var_list[5].name = 'met_windavg_mag_corr_north'
var_list[6].name = 'barometric_pressure'
var_list[7].name = 'air_temperature'
var_list[8].name = 'relative_humidity'
var_list[9].name = 'longwave_irradiance'
var_list[10].name = 'shortwave_irradiance'
var_list[11].name = 'precipitation'
var_list[12].name = 'met_heatflx_minute'
var_list[13].name = 'met_latnflx_minute'
var_list[14].name = 'met_netlirr_minute'
var_list[15].name = 'met_sensflx_minute'
var_list[16].name = 'eastward_velocity'
var_list[17].name = 'northward_velocity'
var_list[18].name = 'met_spechum'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[17].data = np.array([])
var_list[18].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'S/m'
var_list[3].units = 'unitless'
var_list[4].units = 'm/s'
var_list[5].units = 'm/s'
var_list[6].units = 'mbar'
var_list[7].units = 'degC'
var_list[8].units = '#'
var_list[9].units = 'W/m'
var_list[10].units = 'W/m'
var_list[11].units = 'mm'
var_list[12].units = 'W/m'
var_list[13].units = 'W/m'
var_list[14].units = 'W/m'
var_list[15].units = 'W/m'
var_list[16].units = 'm/s'
var_list[17].units = 'm/s'
var_list[18].units = 'g/kg'
#WAVSS
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_statistics'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_Stats' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_statistics_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_zero_crossings'
var_list[2].name = 'average_wave_height'
var_list[3].name = 'mean_spectral_period'
var_list[4].name = 'max_wave_height'
var_list[5].name = 'significant_wave_height'
var_list[6].name = 'significant_period'
var_list[7].name = 'wave_height_10'
var_list[8].name = 'wave_period_10'
var_list[9].name = 'mean_wave_period'
var_list[10].name = 'peak_wave_period'
var_list[11].name = 'wave_period_tp5'
var_list[12].name = 'wave_height_hmo'
var_list[13].name = 'mean_direction'
var_list[14].name = 'mean_spread'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'counts'
var_list[2].units = 'm'
var_list[3].units = 'sec'
var_list[4].units = 'm'
var_list[5].units = 'm'
var_list[6].units = 'sec'
var_list[7].units = 'm'
var_list[8].units = 'sec'
var_list[9].units = 'sec'
var_list[10].units = 'sec'
var_list[11].units = 'sec'
var_list[12].units = 'm'
var_list[13].units = 'degrees'
var_list[14].units = 'degrees'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_mean_directional'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_MeanDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_mean_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'mean_direction'
var_list[2].name = 'number_bands'
var_list[3].name = 'initial_frequency'
var_list[4].name = 'frequency_spacing'
var_list[5].name = 'psd_mean_directional'
var_list[6].name = 'mean_direction_array'
var_list[7].name = 'directional_spread_array'
var_list[8].name = 'spread_direction'
var_list[9].name = 'wavss_a_directional_frequency'
var_list[10].name = 'wavss_a_corrected_mean_wave_direction'
var_list[11].name = 'wavss_a_corrected_directional_wave_direction'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degrees'
var_list[2].units = '1'
var_list[3].units = 'Hz'
var_list[4].units = 'Hz'
var_list[5].units = 'm2 Hz-1'
var_list[6].units = 'degrees'
var_list[7].units = 'degrees'
var_list[8].units = 'degrees'
var_list[9].units = 'Hz'
var_list[10].units = 'deg'
var_list[11].units = 'deg'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_non_directional'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_NonDir' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_non_directional_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'psd_non_directional'
var_list[5].name = 'wavss_a_non_directional_frequency'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = 'm2 Hz-1'
var_list[5].units = 'Hz'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_motion'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_Motion' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_motion_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_time_samples'
var_list[2].name = 'initial_time'
var_list[3].name = 'time_spacing'
var_list[4].name = 'solution_found'
var_list[5].name = 'heave_offset_array'
var_list[6].name = 'north_offset_array'
var_list[7].name = 'east_offset_array'
var_list[8].name = 'wavss_a_buoymotion_time'
var_list[9].name = 'wavss_a_magcor_buoymotion_x'
var_list[10].name = 'wavss_a_magcor_buoymotion_y'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'sec'
var_list[3].units = 'sec'
var_list[4].units = '1'
var_list[5].units = 'm'
var_list[6].units = 'm'
var_list[7].units = 'm'
var_list[8].units = 'seconds since 1900-01-01'
var_list[9].units = 'm'
var_list[10].units = 'm'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/telemetered/wavss_a_dcl_fourier'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'WAVSS_Fourier' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD12/05-WAVSSA000/recovered_host/wavss_a_dcl_fourier_recovered'
var_list[0].name = 'time'
var_list[1].name = 'number_bands'
var_list[2].name = 'initial_frequency'
var_list[3].name = 'frequency_spacing'
var_list[4].name = 'number_directional_bands'
var_list[5].name = 'initial_directional_frequency'
var_list[6].name = 'directional_frequency_spacing'
var_list[7].name = 'fourier_coefficient_2d_array'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = '1'
var_list[2].units = 'Hz'
var_list[3].units = 'Hz'
var_list[4].units = '1'
var_list[5].units = 'Hz'
var_list[6].units = 'Hz'
var_list[7].units = '1'
#PCO2A
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD12/04-PCO2AA000/telemetered/pco2a_a_dcl_instrument_water'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CP03ISSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/SBD12/04-PCO2AA000/telemetered/pco2a_a_dcl_instrument_water'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CP04OSSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/SBD12/04-PCO2AA000/telemetered/pco2a_a_dcl_instrument_water'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
#PCO2A
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD12/04-PCO2AA000/recovered_host/pco2a_a_dcl_instrument_water_recovered'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CP03ISSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/SBD12/04-PCO2AA000/recovered_host/pco2a_a_dcl_instrument_water_recovered'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
elif platform_name == 'CP04OSSM' and node == 'BUOY' and instrument_class == 'PCO2A' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/SBD12/04-PCO2AA000/recovered_host/pco2a_a_dcl_instrument_water_recovered'
var_list[0].name = 'time'
var_list[1].name = 'partial_pressure_co2_ssw'
var_list[2].name = 'partial_pressure_co2_atm'
var_list[3].name = 'pco2_co2flux'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'uatm'
var_list[2].units = 'uatm'
var_list[3].units = 'mol m-2 s-1'
#FDCHP
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'FDCHP' and method == 'RecoveredInst':
uframe_dataset_name = 'CP01CNSM/SBD12/08-FDCHPA000/recovered_inst/fdchp_a_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'FDCHP' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD12/08-FDCHPA000/telemetered/fdchp_a_dcl_instrument'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'FDCHP' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD12/08-FDCHPA000/recovered_host/fdchp_a_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[0].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD11/06-METBKA000/telemetered/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD11/06-METBKA000/recovered_host/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CP03ISSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/SBD11/06-METBKA000/telemetered/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CP03ISSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/SBD11/06-METBKA000/recovered_host/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CP04OSSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/SBD11/06-METBKA000/telemetered/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CP04OSSM' and node == 'BUOY' and instrument_class == 'METBK1-hr' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/SBD11/06-METBKA000/recovered_host/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'METBK2-hr' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/SBD12/06-METBKA000/telemetered/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CP01CNSM' and node == 'BUOY' and instrument_class == 'METBK2-hr' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/SBD12/06-METBKA000/recovered_host/metbk_hourly'
var_list[0].name = 'met_timeflx'
var_list[1].name = 'met_rainrte'
var_list[2].name = 'met_buoyfls'
var_list[3].name = 'met_buoyflx'
var_list[4].name = 'met_frshflx'
var_list[5].name = 'met_heatflx'
var_list[6].name = 'met_latnflx'
var_list[7].name = 'met_mommflx'
var_list[8].name = 'met_netlirr'
var_list[9].name = 'met_rainflx'
var_list[10].name = 'met_sensflx'
var_list[11].name = 'met_sphum2m'
var_list[12].name = 'met_stablty'
var_list[13].name = 'met_tempa2m'
var_list[14].name = 'met_tempskn'
var_list[15].name = 'met_wind10m'
var_list[16].name = 'met_netsirr_hourly'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[6].data = np.array([])
var_list[7].data = np.array([])
var_list[8].data = np.array([])
var_list[9].data = np.array([])
var_list[10].data = np.array([])
var_list[11].data = np.array([])
var_list[12].data = np.array([])
var_list[13].data = np.array([])
var_list[14].data = np.array([])
var_list[15].data = np.array([])
var_list[16].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'mm/hr'
var_list[2].units = 'W/m2'
var_list[3].units = 'W/m2'
var_list[4].units = 'mm/hr'
var_list[5].units = 'W/m2'
var_list[6].units = 'W/m2'
var_list[7].units = 'N/m2'
var_list[8].units = 'W/m2'
var_list[9].units = 'W/m2'
var_list[10].units = 'W/m2'
var_list[11].units = 'g/kg'
var_list[12].units = 'unitless'
var_list[13].units = 'degC'
var_list[14].units = 'degC'
var_list[15].units = 'm/s'
var_list[16].units = 'W/m2'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP01CNSM/RID27/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP01CNSM/RID27/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP01CNSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CP01CNSM/RID27/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP03ISSM/RID27/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP03ISSM/RID27/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP03ISSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CP03ISSM/RID27/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/RID27/03-CTDBPC000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/RID27/03-CTDBPC000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP04OSSM' and node == 'NSIF' and instrument_class == 'CTD' and method == 'RecoveredInst':
uframe_dataset_name = 'CP04OSSM/RID27/03-CTDBPC000/recovered_inst/ctdbp_cdef_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'ctdbp_seawater_temperature'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'ctdbp_seawater_pressure'
var_list[5].name = 'ctdbp_seawater_conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'Telemetered':
uframe_dataset_name = 'CP04OSSM/MFD37/03-CTDBPE000/telemetered/ctdbp_cdef_dcl_instrument'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = np.array([])
var_list[4].data = np.array([])
var_list[5].data = np.array([])
var_list[0].units = 'seconds since 1900-01-01'
var_list[1].units = 'degC'
var_list[2].units = 'unitless'
var_list[3].units = 'kg/m3'
var_list[4].units = 'dbar'
var_list[5].units = 'S/m'
elif platform_name == 'CP04OSSM' and node == 'MFN' and instrument_class == 'CTD' and method == 'RecoveredHost':
uframe_dataset_name = 'CP04OSSM/MFD37/03-CTDBPE000/recovered_host/ctdbp_cdef_dcl_instrument_recovered'
var_list[0].name = 'time'
var_list[1].name = 'temp'
var_list[2].name = 'practical_salinity'
var_list[3].name = 'density'
var_list[4].name = 'pressure'
var_list[5].name = 'conductivity'
var_list[0].data = np.array([])
var_list[1].data = np.array([])
var_list[2].data = np.array([])
var_list[3].data = | np.array([]) | numpy.array |
# -*- coding: utf-8 -*-
""" Interface to numerical ODE solvers.
"""
import sys
# Compatibility - timer functions
# In Python 3, the more accurate `time.process_time()` method is available. But
# for legacy support, can default instead to `time.clock()`
import time
import warnings
from abc import ABCMeta, abstractmethod
import numpy as np
if sys.version_info[0] < 3:
timer = time.clock
else:
timer = time.process_time
from . import constants as c
available_integrators = ["odeint"]
try:
from odespy.odepack import Lsode, Lsoda
from odespy import Vode
available_integrators.extend(["lsode", "lsoda", "vode"])
except ImportError:
warnings.warn(
"Could not import odespy package; "
"invoking the 'lsoda' or 'lsode' options will fail!"
)
try:
from assimulo.problem import Explicit_Problem
from assimulo.solvers.sundials import CVode, CVodeError
# from assimulo.solvers.odepack import LSODAR
from assimulo.exception import TimeLimitExceeded
available_integrators.extend(["cvode", "lsodar"])
except ImportError:
warnings.warn(
"Could not import Assimulo; " "invoking the CVode solver will fail!"
)
__all__ = ["Integrator"]
state_atol = [1e-4, 1e-4, 1e-4, 1e-10, 1e-10, 1e-4, 1e-8]
state_rtol = 1e-7
class Integrator(metaclass=ABCMeta):
"""
Container class for the various integrators to use in the parcel model.
All defined integrators should return a tuple whose first value ``x`` is either
``None`` or vector containing the parcel state at all requested timestamps, and
whose second value is a boolean indicating whether the model run was successful.
"""
def __init__(
self, rhs, output_dt, solver_dt, y0, args, t0=0.0, console=False
):
self.output_dt = output_dt
self.solver_dt = solver_dt
self.y0 = y0
self.t0 = t0
self.console = console
self.args = args
def _user_rhs(t, y):
dode_dt = rhs(y, t, *self.args)
return dode_dt
self.rhs = _user_rhs
@abstractmethod
def integrate(self, t_end, **kwargs):
pass
@abstractmethod
def __repr__(self):
pass
@staticmethod
def solver(method):
""" Maps a solver name to a function.
"""
solvers = {
# # SciPy interfaces
# 'odeint': Integrator._solve_odeint,
# # ODESPY interfaces
# 'lsoda': partial(Integrator._solve_with_odespy, method='lsoda'),
# 'lsode': partial(Integrator._solve_with_odespy, method='lsode'),
# 'vode': partial(Integrator._solve_with_odespy, method='vode'),
# # Assimulo interfaces
# 'cvode': partial(Integrator._solve_with_assimulo, method='cvode'),
# 'lsodar': partial(Integrator._solve_with_assimulo, method='lsodar'),
"cvode": CVODEIntegrator
}
if method in available_integrators:
return solvers[method]
else:
# solver name is not defined, or the module containing
# it is unavailable
raise ValueError("integrator for %s is not available" % method)
class ExtendedProblem(Explicit_Problem):
""" This extension of the Assimulo 'Explicit_Problem' class
encodes some of the logic particular to the parcel model simulation,
specifically rules for terminating the simulation and detecting
events such as the maximum supersaturation occurring """
name = "Parcel model ODEs"
sw0 = [True, False] # Normal integration switch # Past cut-off switch
t_cutoff = 1e5
dS_dt = 1.0
def __init__(self, rhs_fcn, rhs_args, terminate_depth, *args, **kwargs):
self.rhs_fcn = rhs_fcn
self.rhs_args = rhs_args
self.V = rhs_args[3]
self.terminate_time = terminate_depth / self.V
super(Explicit_Problem, self).__init__(*args, **kwargs)
def rhs(self, t, y, sw):
if not sw[1]: # Normal integration before cutoff
dode_dt = self.rhs_fcn(t, y) # FROM THE CVODEINTEGRATOR
self.dS_dt = dode_dt[c.N_STATE_VARS - 1]
else:
# There may be a bug here. I can't recall when this branch is ever run; it
# seems to zero out the state derivative, but to construct that array it
# should be looking at self.rhs_args, not self.args (which isn't saved).
# I'm going to comment out this line which I think is broken and replace it
# with the correct one for now, but leave a record of this change
# <NAME> <<EMAIL>> - 2/15/2016
# dode_dt = np.zeros(c.N_STATE_VARS + self.args[0]) # FROM INIT ARGS
dode_dt = | np.zeros(c.N_STATE_VARS + self.rhs_args[0]) | numpy.zeros |
# -*- coding: utf-8 -*-
"""
Authors
-------
<NAME> <<EMAIL>>
About
-----
Functions to handle command-line input
Known Issues
------------
None
"""
# General imports
import os
import sys
import time
from functools import partial
import shutil
if os.path.exists(os.path.join(os.getcwd(), 'config')): # You're 1 up from config?
sys.path.insert(0, os.path.join(os.getcwd(), 'config'))
else: # You're working from a directory parallel with config?
sys.path.insert(0, os.path.abspath(os.path.join(os.getcwd(), '../config')))
import pickle
# Tractor imports
from tractor import NCircularGaussianPSF, PixelizedPSF, Image, Tractor, FluxesPhotoCal, NullWCS, ConstantSky, EllipseESoft, Fluxes, PixPos
from tractor.galaxy import ExpGalaxy, DevGalaxy, FixedCompositeGalaxy, SoftenedFracDev, GalaxyShape
from tractor.sersic import SersicIndex, SersicGalaxy
from tractor.sercore import SersicCoreGalaxy
from tractor.pointsource import PointSource
from tractor.psfex import PixelizedPsfEx, PsfExModel
from tractor.psf import HybridPixelizedPSF
# Miscellaneous science imports
from astropy.io import fits, ascii
from astropy.table import Table, Column, vstack, join
from astropy.wcs import WCS
import astropy.units as u
import numpy as np
from functools import partial
import matplotlib.pyplot as plt
import weakref
from scipy import stats
import pathos as pa
from astropy.coordinates import SkyCoord
# import sfdmap
# Local imports
from .brick import Brick
from .mosaic import Mosaic
from .utils import header_from_dict, SimpleGalaxy
from .visualization import plot_background, plot_blob, plot_blobmap, plot_brick, plot_mask
try:
import config as conf
except:
raise RuntimeError('Cannot find configuration file!')
# m = sfdmap.SFDMap(conf.SFDMAP_DIR)
# Make sure no interactive plotting is going on.
plt.ioff()
import warnings
warnings.filterwarnings("ignore")
print(
f"""
====================================================================
________ _ _______ ____ ____ ________ _______
|_ __ | / \ |_ __ \ |_ \ / _||_ __ ||_ __ \
| |_ \_| / _ \ | |__) | | \/ | | |_ \_| | |__) |
| _| / ___ \ | __ / | |\ /| | | _| _ | __ /
_| |_ _/ / \ \_ _| | \ \_ _| |_\/_| |_ _| |__/ | _| | \ \_
|_____||____| |____||____| |___||_____||_____||________||____| |___|
--------------------------------------------------------------------
M O D E L P H O T O M E T R Y W I T H T H E T R A C T O R
--------------------------------------------------------------------
(C) 2020 -- <NAME> (DAWN, University of Copenhagen)
====================================================================
CONSOLE_LOGGING_LEVEL ..... {conf.CONSOLE_LOGGING_LEVEL}
LOGFILE_LOGGING_LEVEL ..... {conf.LOGFILE_LOGGING_LEVEL}
PLOT ...................... {conf.PLOT}
NTHREADS .................. {conf.NTHREADS}
OVERWRITE ................. {conf.OVERWRITE}
"""
)
print('Starting up logging system...')
# Start the logging
import logging.config
logger = logging.getLogger('farmer')
if not len(logger.handlers):
if conf.LOGFILE_LOGGING_LEVEL is not None:
logging_level = logging.getLevelName(conf.LOGFILE_LOGGING_LEVEL)
else:
logging_level = logging.DEBUG
logger.setLevel(logging_level)
formatter = logging.Formatter('[%(asctime)s] %(name)s :: %(levelname)s - %(message)s', '%H:%M:%S')
# Logging to the console at logging level
ch = logging.StreamHandler()
ch.setLevel(logging.getLevelName(conf.CONSOLE_LOGGING_LEVEL))
ch.setFormatter(formatter)
logger.addHandler(ch)
if (conf.LOGFILE_LOGGING_LEVEL is None) | (not os.path.exists(conf.LOGGING_DIR)):
print('Logging information wills stream only to console.\n')
else:
# create file handler which logs even debug messages
logging_path = os.path.join(conf.LOGGING_DIR, 'logfile.log')
print(f'Logging information will stream to console and {logging_path}\n')
# If overwrite is on, remove old logger
if conf.OVERWRITE & os.path.exists(logging_path):
print('WARNING -- Existing logfile will be overwritten.')
os.remove(logging_path)
fh = logging.FileHandler(logging_path)
fh.setLevel(logging.getLevelName(conf.LOGFILE_LOGGING_LEVEL))
fh.setFormatter(formatter)
logger.addHandler(fh)
# When a user invokes the interface, first check the translation file
# Optionally, tell the user.
# Try to import the translate file from it's usual spot first.
try:
from translate import translate
logger.info(f'interface.translation :: Imported translate file with {len(translate.keys())} entries.')
if len(conf.BANDS) != len(translate.keys()):
logger.warning(f'Configuration file only includes {len(conf.BANDS)} entries!')
# I have nicknames in the config, I need the raw names for file I/O
mask = np.ones_like(conf.BANDS, dtype=bool)
for i, band in enumerate(conf.BANDS):
if band not in translate.keys():
logger.warning(f'Cound not find {band} in translate file!')
mask[i] = False
# Re-assign bands and rawbands in config object
logger.debug(f'Assigning nicknames to raw image names:')
conf.BANDS = list(np.array(conf.BANDS)[mask])
conf.RAWBANDS = conf.BANDS.copy()
for i, band in enumerate(conf.RAWBANDS):
conf.RAWBANDS[i] = translate[band]
logger.debug(f' {i+1} :: {conf.RAWBANDS[i]} --> {conf.BANDS[i]}')
# The translation file could not be found, so make a scene.
except:
logger.warning('interface.translation :: WARNING - Could not import translate file! Will use config instead.')
logger.info('interface.translation :: Image names must be < 50 characters (FITS standard) - checking...')
# I have raw names, I need shortened raw names (i.e. nicknames)
conf.RAWBANDS = conf.BANDS.copy()
count_short = 0
for i, band in enumerate(conf.RAWBANDS):
if len(band) > 50:
conf.BANDS[i] = band[:50]
logger.debug(f' {i+1} :: {band} --> {conf.BANDS[i]}')
count_short += 1
logger.info(f'interface.translation :: Done checking. Shortened {count_short} image names.')
def make_directories():
"""Uses the existing config file to set up the directories. Must call from config.py directory!
"""
import pathlib
logger.info('Making directories!')
dir_dict = {'IMAGE_DIR': conf.IMAGE_DIR,
'PSF_DIR': conf.PSF_DIR,
'BRICK_DIR': conf.BRICK_DIR,
'INTERIM_DIR': conf.INTERIM_DIR,
'PLOT_DIR': conf.PLOT_DIR,
'CATALOG_DIR': conf.CATALOG_DIR,
'LOGGING_DIR': conf.LOGGING_DIR
}
for key in dir_dict.keys():
path = dir_dict[key]
if os.path.exists(path): # too important to allow overwrite...
logger.warning(f'{key} already exists under {path}!')
for i in dir_dict.keys():
if path == dir_dict[i]:
logger.info(f'{key} was already created for {i}...OK')
break
else:
logger.info(f'{key} --> {path}')
pathlib.Path(path).mkdir(parents=True, exist_ok=True)
def make_psf(image_type=conf.MULTIBAND_NICKNAME, band=None, sextractor_only=False, psfex_only=False, override=conf.OVERWRITE):
""" This is where we automatically construct the PSFs for Farmer.
Step 1. Run sextractor_only=True to obtain the PSF candidates
Step 2. Using the output plot, determine the selection box for the stars
Step 3. Run psfex_only=True to construct the PSF.
See config file to set box dimensions, psf spatial sampling, etc.
"""
# If the user asked to make a PSF for the detection image, tell them we don't do that
if image_type is conf.DETECTION_NICKNAME:
raise ValueError('Farmer does not use a PSF to perform detection!')
# Else if the user asks for a PSF to be made for the modeling band
elif image_type is conf.MODELING_NICKNAME:
# Make the Mosaic
logger.info(f'Making PSF for {conf.MODELING_NICKNAME}')
modmosaic = Mosaic(conf.MODELING_NICKNAME, modeling=True, mag_zeropoint=conf.MODELING_ZPT, skip_build=True)
# Make the PSF
logger.info(f'Mosaic loaded for {conf.MODELING_NICKNAME}')
modmosaic._make_psf(xlims=conf.MOD_REFF_LIMITS, ylims=conf.MOD_VAL_LIMITS, override=override, sextractor_only=sextractor_only, psfex_only=psfex_only)
logger.info(f'PSF made successfully for {conf.MODELING_NICKNAME}')
# Else if the user asks for a PSF in one of the bands
elif image_type is conf.MULTIBAND_NICKNAME:
# Sanity check
if band not in conf.BANDS:
raise ValueError(f'{band} is not a valid band nickname!')
# Use all bands or just one?
if band is not None:
sbands = [band,]
else:
sbands = conf.BANDS
# Loop over bands
for i, band in enumerate(sbands):
# Figure out PS selection box position and zeropoint
idx_band = np.array(conf.BANDS) == band
multi_xlims = np.array(conf.MULTIBAND_REFF_LIMITS)[idx_band][0]
multi_ylims = np.array(conf.MULTIBAND_VAL_LIMITS)[idx_band][0]
mag_zpt = np.array(conf.MULTIBAND_ZPT)[idx_band][0]
# Make the Mosaic
logger.info(f'Making PSF for {band}')
bandmosaic = Mosaic(band, mag_zeropoint = mag_zpt, skip_build=True)
# Make the PSF
logger.info(f'Mosaic loaded for {band}')
bandmosaic._make_psf(xlims=multi_xlims, ylims=multi_ylims, override=override, sextractor_only=sextractor_only, psfex_only=psfex_only)
if not sextractor_only:
logger.info(f'PSF made successfully for {band}')
else:
logger.info(f'interface.make_psf :: SExtraction complete for {band}')
return
def make_bricks(image_type=conf.MULTIBAND_NICKNAME, band=None, brick_id=None, insert=False, skip_psf=True, max_bricks=None, make_new_bricks=False):
""" Stage 1. Here we collect the detection, modelling, and multiband images for processing. We may also cut them up!
NB: PSFs can be automatically made at this stage too, assuming you've determined your PSF selection a priori.
"""
# Make bricks for the detection image
if (image_type==conf.DETECTION_NICKNAME) | (image_type is None):
# Detection
logger.info('Making mosaic for detection...')
detmosaic = Mosaic(conf.DETECTION_NICKNAME, detection=True)
if conf.NTHREADS > 1:
logger.warning('Parallelization of brick making is currently not supported. Continuing anyways...')
# BUGGY DUE TO MEM ALLOC
# logger.info('Making bricks for detection (in parallel)')
# pool = mp.ProcessingPool(processes=conf.NTHREADS)
# pool.map(partial(detmosaic._make_brick, detection=True, overwrite=True), np.arange(0, detmosaic.n_bricks()))
logger.info('Making bricks for detection (in serial)')
for bid in np.arange(1, detmosaic.n_bricks()+1):
detmosaic._make_brick(bid, detection=True, overwrite=True)
# Make bricks for the modeling image
elif (image_type==conf.MODELING_NICKNAME) | (image_type is None):
# Modeling
logger.info('Making mosaic for modeling...')
modmosaic = Mosaic(conf.MODELING_NICKNAME, modeling=True)
# The user wants PSFs on the fly
if not skip_psf:
mod_xlims = np.array(conf.MOD_REFF_LIMITS)
mod_ylims = np.array(conf.MOD_VAL_LIMITS)
modmosaic._make_psf(xlims=mod_xlims, ylims=mod_ylims)
# Make bricks in parallel
if (conf.NTHREADS > 1) & (brick_id is None):
logger.warning('Parallelization of brick making is currently not supported. Continuing anyways...')
# BUGGY DUE TO MEM ALLOC
# if conf.VERBOSE: print('Making bricks for detection (in parallel)')
# pool = mp.ProcessingPool(processes=conf.NTHREADS)
# pool.map(partial(modmosaic._make_brick, detection=True, overwrite=True), np.arange(0, modmosaic.n_bricks()))
# # Make bricks in serial
# else:
if brick_id is not None:
logger.info(f'Making brick #{brick_id} for modeling (in serial)')
modmosaic._make_brick(brick_id, modeling=True, overwrite=True)
else:
logger.info('Making bricks for modeling (in serial)')
if max_bricks is None:
max_bricks = modmosaic.n_bricks()
for bid in np.arange(1, max_bricks+1):
modmosaic._make_brick(bid, modeling=True, overwrite=True)
# Make bricks for one or more multiband images
elif (image_type==conf.MULTIBAND_NICKNAME) | (image_type is None):
# One variable list
if band is not None:
try:
if len(band) > 0:
sbands = band
else:
sbands = conf.BANDS
except:
sbands = [band,]
else:
sbands = conf.BANDS
# In serial, loop over images
for i, sband in enumerate(sbands):
# Assume we can overwrite files unless insertion is explicit
# First image w/o insertion will make new file anyways
if make_new_bricks:
overwrite = True
if insert | (i > 0):
overwrite = False
else:
overwrite=False
# Build the mosaic
logger.info(f'Making mosaic for image {sband}...')
bandmosaic = Mosaic(sband)
# The user wants PSFs made on the fly
if not skip_psf:
idx_band = np.array(conf.BANDS) == sband
multi_xlims = np.array(conf.MULTIBAND_REFF_LIMITS)[idx_band][0]
multi_ylims = np.array(conf.MULTIBAND_VAL_LIMITS)[idx_band][0]
bandmosaic._make_psf(xlims=multi_xlims, ylims=multi_ylims)
# Make bricks in parallel
if (conf.NTHREADS > 1) & (brick_id is None):
logger.warning('Parallelization of brick making is currently not supported. Continuing anyways...')
# logger.info(f'Making bricks for band {sband} (in parallel)')
# with pa.pools.ProcessPool(ncpus=conf.NTHREADS) as pool:
# logger.info(f'Parallel processing pool initalized with {conf.NTHREADS} threads.')
# pool.uimap(partial(bandmosaic._make_brick, detection=False, overwrite=overwrite), np.arange(0, bandmosaic.n_bricks()))
# logger.info('Parallel processing complete.')
# Make bricks in serial
# else:
if brick_id is not None:
logger.info(f'Making brick #{brick_id} for multiband (in serial)')
bandmosaic._make_brick(brick_id, detection=False, overwrite=overwrite)
else:
logger.info(f'Making bricks for band {sband} (in serial)')
if max_bricks is None:
max_bricks = bandmosaic.n_bricks()
for bid in np.arange(1, max_bricks+1):
bandmosaic._make_brick(bid, detection=False, overwrite=overwrite)
# image type is invalid
else:
raise RuntimeError(f'{image_type} is an unrecognized nickname (see {conf.DETECTION_NICKNAME}, {conf.MODELING_NICKNAME}, {conf.MULTIBAND_NICKNAME})')
return
def runblob(blob_id, blobs, modeling=None, catalog=None, plotting=0, source_id=None, source_only=False, blob_only=False):
""" Essentially a private function. Runs each individual blob and handles the bulk of the work. """
# if conf.NTHREADS != 0:
# fh = logging.FileHandler(f'B{blob_id}.log')
# fh.setLevel(logging.getLevelName(conf.LOGFILE_LOGGING_LEVEL))
# formatter = logging.Formatter('[%(asctime)s] %(name)s :: %(levelname)s - %(message)s', '%H:%M:%S')
# fh.setFormatter(formatter)
# logger = pathos.logger(level=logging.getLevelName(conf.LOGFILE_LOGGING_LEVEL), handler=fh)
logger = logging.getLogger(f'farmer.blob.{blob_id}')
logger.info(f'Starting on Blob #{blob_id}')
modblob = None
fblob = None
tstart = time.time()
logger.debug('Making weakref proxies of blobs')
if modeling is None:
modblob, fblob = weakref.proxy(blobs[0]), weakref.proxy(blobs[1])
elif modeling:
modblob = weakref.proxy(blobs)
else:
fblob = weakref.proxy(blobs)
logger.debug(f'Weakref made ({time.time() - tstart:3.3f})s')
# Make blob with modeling image
if modblob is not None:
logger.debug(f'Making blob with {conf.MODELING_NICKNAME}')
modblob.logger = logger
if modblob.rejected:
logger.info('Blob has been rejected!')
# if conf.NTHREADS != 0:
# logger.removeHandler(fh)
catout = modblob.bcatalog.copy()
del modblob
return catout
# If the user wants to just model a specific source...
if source_only & (source_id is not None):
logger.info(f'Preparing to model single source: {source_id}')
sid = modblob.bcatalog['source_id']
modblob.bcatalog = modblob.bcatalog[sid == source_id]
modblob.n_sources = len(modblob.bcatalog)
modblob.mids = np.ones(modblob.n_sources, dtype=int)
modblob.model_catalog = np.zeros(modblob.n_sources, dtype=object)
modblob.solution_catalog = np.zeros(modblob.n_sources, dtype=object)
modblob.solved_chisq = np.zeros(modblob.n_sources)
modblob.solved_bic = np.zeros(modblob.n_sources)
modblob.solution_chisq = np.zeros(modblob.n_sources)
modblob.tr_catalogs = np.zeros((modblob.n_sources, 3, 2), dtype=object)
modblob.chisq = np.zeros((modblob.n_sources, 3, 2))
modblob.rchisq = np.zeros((modblob.n_sources, 3, 2))
modblob.bic = np.zeros((modblob.n_sources, 3, 2))
assert(len(modblob.bcatalog) > 0)
if not blob_only:
if (conf.MODEL_PHOT_MAX_NBLOB > 0) & (modblob.n_sources > conf.MODEL_PHOT_MAX_NBLOB):
logger.info('Number of sources exceeds set limit. Skipping!')
# if conf.NTHREADS != 0:
# logger.removeHandler(fh)
catout = modblob.bcatalog.copy()
catout['x'] += modblob.subvector[1]
catout['y'] += modblob.subvector[0]
del modblob
return catout
# Run models
if conf.ITERATIVE_SUBTRACTION_THRESH is None:
iter_thresh = 1E31
else:
iter_thresh = conf.ITERATIVE_SUBTRACTION_THRESH
if (conf.ITERATIVE_SUBTRACTION_THRESH is not None) & (modblob.n_sources >= iter_thresh):
logger.debug(f'Performing iterative subtraction for {conf.MODELING_NICKNAME}')
astart = time.time()
for i, band in enumerate(modblob.bands):
band_name = band[len(conf.MODELING_NICKNAME)+1:]
zpt = conf.MULTIBAND_ZPT[modblob._band2idx(band_name)]
# sorting order
avg_flux = np.zeros(modblob.n_sources)
for i, item in enumerate(modblob.bcatalog):
rawfluxes = np.array([np.sum(img[modblob.segmap == item['source_id']]) for img in modblob.images])
fluxes = rawfluxes * 10**(-0.4 * (zpt - 23.9))
avg_flux[i] = np.mean(fluxes, 0)
index = np.argsort(avg_flux)[::-1] # sort by brightness
copy_images = modblob.images.copy()
import copy
modblob.solution_model_images = np.zeros_like(modblob.images)
for i, idx in enumerate(index):
logger.debug(f" ({i+1}/{modblob.n_sources}) Attemping to model source #{item['source_id']}")
itemblob = copy.deepcopy(modblob)
itemblob.bcatalog = Table(modblob.bcatalog[idx])
itemblob.n_sources = 1
itemblob.mids = np.ones(itemblob.n_sources, dtype=int)
itemblob.model_catalog = np.zeros(itemblob.n_sources, dtype=object)
itemblob.solution_catalog = np.zeros(itemblob.n_sources, dtype=object)
itemblob.solved_chisq = np.zeros(itemblob.n_sources)
itemblob.solved_bic = np.zeros(itemblob.n_sources)
itemblob.solution_chisq = np.zeros(itemblob.n_sources)
itemblob.tr_catalogs = np.zeros((itemblob.n_sources, 3, 2), dtype=object)
itemblob.chisq = np.zeros((itemblob.n_sources, 3, 2))
itemblob.rchisq = np.zeros((itemblob.n_sources, 3, 2))
itemblob.bic = np.zeros((itemblob.n_sources, 3, 2))
itemblob.images = copy_images
itemblob._is_itemblob = True
logger.debug(f'Staging images for {conf.MODELING_NICKNAME} -- blob #{modblob.blob_id}')
itemblob.stage_images()
logger.debug(f'Images staged. ({time.time() - astart:3.3f})s')
astart = time.time()
logger.debug(f'Modeling images for {conf.MODELING_NICKNAME} -- blob #{modblob.blob_id}')
status = itemblob.tractor_phot()
if status:
logger.debug(f'Morphology determined. ({time.time() - astart:3.3f})s')
logger.debug(f'Transferring results back to parent blob...')
#transfer back
modblob.bcatalog[idx] = itemblob.bcatalog[0]
modblob.solution_model_images += itemblob.solution_model_images
# subtract model from image
copy_images -= itemblob.solution_model_images
else:
logger.warning(f'Morphology failed! ({time.time() - astart:3.3f})s')
# # if conf.NTHREADS != 0:
# # logger.removeHandler(fh)
# catout = modblob.bcatalog.copy()
# catout['x'] += modblob.subvector[1]
# catout['y'] += modblob.subvector[0]
# del modblob
# return catout
else:
astart = time.time()
logger.debug(f'Staging images for {conf.MODELING_NICKNAME}')
modblob.stage_images()
logger.debug(f'Images staged. ({time.time() - astart:3.3f})s')
astart = time.time()
logger.debug(f'Modeling images for {conf.MODELING_NICKNAME}')
status = modblob.tractor_phot()
if not status:
logger.warning(f'Morphology failed! ({time.time() - astart:3.3f})s')
# if conf.NTHREADS != 0:
# logger.removeHandler(fh)
catout = modblob.bcatalog.copy()
catout['x'] += modblob.subvector[1]
catout['y'] += modblob.subvector[0]
del modblob
return catout
logger.debug(f'Morphology determined. ({time.time() - astart:3.3f})s')
# Run follow-up phot
if conf.DO_APPHOT:
for img_type in ('image', 'model', 'isomodel', 'residual', 'weight', 'chisq',):
for band in modblob.bands:
if True: #try:
modblob.aperture_phot(band, img_type, sub_background=conf.SUBTRACT_BACKGROUND)
else:
logger.warning(f'Aperture photmetry FAILED for {band} {img_type}. Likely a bad blob.')
if conf.DO_SEPHOT:
for img_type in ('image', 'model', 'isomodel', 'residual'):
for band in modblob.bands:
try:
modblob.sep_phot(band, img_type, centroid='MODEL', sub_background=conf.SUBTRACT_BACKGROUND)
modblob.sep_phot(band, img_type, centroid='DETECTION', sub_background=conf.SUBTRACT_BACKGROUND)
except:
logger.warning(f'SEP photometry FAILED for {band} {img_type}. Likely a bad blob.')
if conf.DO_SEXPHOT:
for band in modblob.bands:
try:
modblob.residual_phot(band, sub_background=conf.SUBTRACT_BACKGROUND)
except:
logger.warning(f'SEP residual photmetry FAILED. Likely a bad blob.)')
duration = time.time() - tstart
logger.info(f'Solution for Blob #{modblob.blob_id} (N={modblob.n_sources}) arrived at in {duration:3.3f}s ({duration/modblob.n_sources:2.2f}s per src)')
catout = modblob.bcatalog.copy()
del modblob
#################### FORCED PHOTOMETRY ################################
if fblob is not None:
# make new blob with band information
logger.debug(f'Making blob with {conf.MULTIBAND_NICKNAME}')
fblob.logger = logger
if fblob.rejected:
logger.info('Blob has been rejected!')
# if conf.NTHREADS != 0:
# logger.removeHandler(fh)
catout = fblob.bcatalog.copy()
del fblob
return catout
astart = time.time()
status = fblob.stage_images()
if not status:
# if conf.NTHREADS != 0:
# logger.removeHandler(fh)
catout = fblob.bcatalog.copy()
del fblob
return catout
logger.info(f'{len(fblob.bands)} images staged. ({time.time() - astart:3.3f})s')
astart = time.time()
if modblob is not None:
fblob.model_catalog = modblob.solution_catalog.copy()
fblob.position_variance = modblob.position_variance.copy()
fblob.parameter_variance = modblob.parameter_variance.copy()
logger.info(f'Solution parameters transferred. ({time.time() - astart:3.3f})s')
else:
if catalog is None:
raise ValueError('Input catalog not supplied!')
else:
blobmask = np.ones(len(catalog))
if source_id is not None:
# If the user wants to just model a specific source...
logger.info(f'Preparing to force single source: {source_id}')
sid = catalog['source_id']
bid = catalog['blob_id']
fblob.bcatalog = catalog[(sid == source_id) & (bid == blob_id)]
fblob.n_sources = len(fblob.bcatalog)
fblob.mids = np.ones(fblob.n_sources, dtype=int)
fblob.model_catalog = np.zeros(fblob.n_sources, dtype=object)
fblob.solution_catalog = np.zeros(fblob.n_sources, dtype=object)
fblob.solved_chisq = np.zeros(fblob.n_sources)
fblob.solved_bic = np.zeros(fblob.n_sources)
fblob.solution_chisq = np.zeros(fblob.n_sources)
fblob.tr_catalogs = np.zeros((fblob.n_sources, 3, 2), dtype=object)
fblob.chisq = np.zeros((fblob.n_sources, 3, 2))
fblob.rchisq = | np.zeros((fblob.n_sources, 3, 2)) | numpy.zeros |
"""Defines the POVM class"""
#***************************************************************************************************
# Copyright 2015, 2019 National Technology & Engineering Solutions of Sandia, LLC (NTESS).
# Under the terms of Contract DE-NA0003525 with NTESS, the U.S. Government retains certain rights
# in this software.
# Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
# in compliance with the License. You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0 or in the LICENSE file in the root pyGSTi directory.
#***************************************************************************************************
import collections as _collections
import itertools as _itertools
import numpy as _np
import warnings as _warnings
import functools as _functools
#from . import labeldicts as _ld
from . import modelmember as _gm
from . import spamvec as _sv
from . import operation as _op
from . import labeldicts as _ld
from .label import Label as _Label
from ..tools import matrixtools as _mt
from ..tools import basistools as _bt
from ..tools import optools as _gt
#Thoughts:
# what are POVM objs needed for?
# - construction of Effect vectors: allocating a pool of
# shared parameters that multiple SPAMVecs use
# - how should Model add items?
# "default allocator" inserts new params into _paramvec when gpindices is None
# (or is made None b/c parent is different) and sets gpindices accordingly
# Could an alternate allocator allocate a POVM, which asks for/presensts a
# block of indices, and after receiving this block adds effect vec to Model
# which use the indices in this block? - maybe when Model inserts a POVM
# it rebuilds paramvec as usual but doesn't insert it's effects into Model
# (maybe not really inserting but "allocating/integrating" it - meaning it's
# gpindices is set) until after the POVM's block of indices is allocated?
# - maybe concept of "allocation" is a good one - meaning when an objects
# gpindices and parent are set, and there's room in the Model's _paramvec
# for the parameters.
# - currently, a gates are "allocated" by _rebuild_paramvec when their
# gpindices is None (if gpindices is not None, the indices can get
# "shifted" but not "allocated" (check this!)
# - maybe good to alert an object when it has be "allocated" to a Model;
# a LinearOperator may do nothing, but a POVM might then allocate its member effects.
# E.G: POVM created = creates objects all with None gpindices
# POVM assigned to a Model => Model allocates POVM & calls POVM.allocated_callback()
# POVM.allocated_callback() allocates (on behalf of Model b/c POVM owns those indices?) its member effects -
# maybe needs to add them to Model.effects so they're accounted for later & calls
# SPAMVec.allocated_callback()
# SPAMVec.allocated_callback() does nothing.
# - it seems good for Model to keep track directly of allocated preps, gates, & effects OR else it will need to alert
# objects when they're allocated indices shift so they can shift their member's
# indices... (POVM.shifted_callback())
# - at this point, could just add set_gpindices and shift_gpindices members to ModelMember, though not all indices
# necessarily shift by same amt...
# - grouping a set of effect vectors together for iterating
# over (just holding the names seems sufficient)
# Conclusions/philosphy: 12/8/2017
# - povms and instruments will hold their members, but member SPAMVec or LinearOperator objects
# will have the Model as their parent, and have gpindices which reference the Model.
# - it is the parent object's (e.g. a Model, POVM, or Instrument) which is responsible
# for setting the gpindices of its members. The gpindices is set via a property or method
# call, and parent objects will thereby set the gpindices of their contained elements.
#
def convert(povm, toType, basis, extra=None):
"""
TODO: update toType options
Convert POVM to a new type of parameterization, potentially
creating a new object. Raises ValueError for invalid conversions.
Parameters
----------
povm : POVM
POVM to convert
toType : {"full","TP","static","static unitary","H+S terms",
"H+S clifford terms","clifford"}
The type of parameterizaton to convert to. See
:method:`Model.set_all_parameterizations` for more details.
basis : {'std', 'gm', 'pp', 'qt'} or Basis object
The basis for `povm`. Allowed values are Matrix-unit (std),
Gell-Mann (gm), Pauli-product (pp), and Qutrit (qt)
(or a custom basis object).
Returns
-------
POVM
The converted POVM vector, usually a distinct
object from the object passed as input.
"""
if toType in ("full", "static", "static unitary"):
converted_effects = [(lbl, _sv.convert(vec, toType, basis))
for lbl, vec in povm.items()]
return UnconstrainedPOVM(converted_effects)
elif toType == "TP":
if isinstance(povm, TPPOVM):
return povm # no conversion necessary
else:
converted_effects = [(lbl, _sv.convert(vec, "full", basis))
for lbl, vec in povm.items()]
return TPPOVM(converted_effects)
elif _gt.is_valid_lindblad_paramtype(toType):
# A LindbladPOVM needs a *static* base/reference POVM
# with the appropriate evotype. If we can convert `povm` to such a
# thing we win. (the error generator is initialized as just the identity below)
nQubits = int(round(_np.log2(povm.dim) / 2.0)) # Linblad ops always work on density-matrices, never states
bQubits = bool(_np.isclose(nQubits, _np.log2(povm.dim) / 2.0)) # integer # of qubits?
proj_basis = "pp" if (basis == "pp" or bQubits) else basis
_, evotype = _gt.split_lindblad_paramtype(toType)
if isinstance(povm, ComputationalBasisPOVM): # special easy case
assert(povm.nqubits == nQubits)
base_povm = ComputationalBasisPOVM(nQubits, evotype)
else:
base_items = [(lbl, _sv._convert_to_lindblad_base(Evec, "effect", evotype, basis))
for lbl, Evec in povm.items()]
base_povm = UnconstrainedPOVM(base_items)
# purevecs = extra if (extra is not None) else None # UNUSED
cls = _op.LindbladDenseOp if (povm.dim <= 64 and evotype == "densitymx") \
else _op.LindbladOp
povmNoiseMap = cls.from_operation_obj(_np.identity(povm.dim, 'd'), toType,
None, proj_basis, basis, truncate=True)
return LindbladPOVM(povmNoiseMap, base_povm, basis)
elif toType == "clifford":
if isinstance(povm, ComputationalBasisPOVM) and povm._evotype == "stabilizer":
return povm
#OLD
##Try to figure out whether this POVM acts on states or density matrices
#if any([ (isinstance(Evec,DenseSPAMVec) and _np.iscomplexobj(Evec.base)) # PURE STATE?
# for Evec in povm.values()]):
# nqubits = int(round(_np.log2(povm.dim)))
#else:
# nqubits = int(round(_np.log2(povm.dim))) // 2
#Assume `povm` already represents state-vec ops, since otherwise we'd
# need to change dimension
nqubits = int(round(_np.log2(povm.dim)))
#Check if `povm` happens to be a Z-basis POVM on `nqubits`
v = ( | _np.array([1, 0], 'd') | numpy.array |
"""
Test Surrogates Overview
========================
"""
# Author: <NAME> <<EMAIL>>
# License: new BSD
from PIL import Image
import numpy as np
import scripts.surrogates_overview as exo
import scripts.image_classifier as imgclf
import sklearn.datasets
import sklearn.linear_model
SAMPLES = 10
BATCH = 50
SAMPLE_IRIS = False
IRIS_SAMPLES = 50000
def test_bilmey_image():
"""Tests surrogate image bLIMEy."""
# Load the image
doggo_img = Image.open('surrogates_overview/img/doggo.jpg')
doggo_array = np.array(doggo_img)
# Load the classifier
clf = imgclf.ImageClassifier()
explain_classes = [('tennis ball', 852),
('golden retriever', 207),
('Labrador retriever', 208)]
# Configure widgets to select occlusion colour, segmentation granularity
# and explained class
colour_selection = {
i: i for i in ['mean', 'black', 'white', 'randomise-patch', 'green']
}
granularity_selection = {'low': 13, 'medium': 30, 'high': 50}
# Generate explanations
blimey_image_collection = {}
for gran_name, gran_number in granularity_selection.items():
blimey_image_collection[gran_name] = {}
for col_name in colour_selection:
blimey_image_collection[gran_name][col_name] = \
exo.build_image_blimey(
doggo_array,
clf.predict_proba,
explain_classes,
explanation_size=5,
segments_number=gran_number,
occlusion_colour=col_name,
samples_number=SAMPLES,
batch_size=BATCH,
random_seed=42)
exp = []
for gran_ in blimey_image_collection:
for col_ in blimey_image_collection[gran_]:
exp.append(blimey_image_collection[gran_][col_]['surrogates'])
assert len(exp) == len(EXP_IMG)
for e, E in zip(exp, EXP_IMG):
assert sorted(list(e.keys())) == sorted(list(E.keys()))
for key in e.keys():
assert e[key]['name'] == E[key]['name']
assert len(e[key]['explanation']) == len(E[key]['explanation'])
for e_, E_ in zip(e[key]['explanation'], E[key]['explanation']):
assert e_[0] == E_[0]
assert np.allclose(e_[1], E_[1], atol=.001, equal_nan=True)
def test_bilmey_tabular():
"""Tests surrogate tabular bLIMEy."""
# Load the iris data set
iris = sklearn.datasets.load_iris()
iris_X = iris.data # [:, :2] # take the first two features only
iris_y = iris.target
iris_labels = iris.target_names
iris_feature_names = iris.feature_names
label2class = {lab: i for i, lab in enumerate(iris_labels)}
# Fit the classifier
logreg = sklearn.linear_model.LogisticRegression(C=1e5)
logreg.fit(iris_X, iris_y)
# explained class
_dtype = iris_X.dtype
explained_instances = {
'setosa': np.array([5, 3.5, 1.5, 0.25]).astype(_dtype),
'versicolor': np.array([5.5, 2.75, 4.5, 1.25]).astype(_dtype),
'virginica': np.array([7, 3, 5.5, 2.25]).astype(_dtype)
}
petal_length_idx = iris_feature_names.index('petal length (cm)')
petal_length_bins = [1, 2, 3, 4, 5, 6, 7]
petal_width_idx = iris_feature_names.index('petal width (cm)')
petal_width_bins = [0, .5, 1, 1.5, 2, 2.5]
discs_ = []
for i, ix in enumerate(petal_length_bins): # X-axis
for iix in petal_length_bins[i + 1:]:
for j, jy in enumerate(petal_width_bins): # Y-axis
for jjy in petal_width_bins[j + 1:]:
discs_.append({
petal_length_idx: [ix, iix],
petal_width_idx: [jy, jjy]
})
for inst_i in explained_instances:
for cls_i in iris_labels:
for disc_i, disc in enumerate(discs_):
inst = explained_instances[inst_i]
cls = label2class[cls_i]
exp = exo.build_tabular_blimey(
inst, cls, iris_X, iris_y, logreg.predict_proba, disc,
IRIS_SAMPLES, SAMPLE_IRIS, 42)
key = '{}&{}&{}'.format(inst_i, cls, disc_i)
exp_ = EXP_TAB[key]
assert exp['explanation'].shape[0] == exp_.shape[0]
assert np.allclose(
exp['explanation'], exp_, atol=.001, equal_nan=True)
EXP_IMG = [
{207: {'explanation': [(13, -0.24406872165780585),
(11, -0.20456180387430317),
(9, -0.1866779131424261),
(4, 0.15001224157793785),
(3, 0.11589480417160983)],
'name': 'golden retriever'},
208: {'explanation': [(13, -0.08395966359346249),
(0, -0.0644986107387837),
(9, 0.05845584633658977),
(1, 0.04369763085720947),
(11, -0.035958188394941866)],
'name': '<NAME>'},
852: {'explanation': [(13, 0.3463529698715463),
(11, 0.2678050131923326),
(4, -0.10639863421417416),
(6, 0.08345792378117327),
(9, 0.07366945242386444)],
'name': '<NAME>'}},
{207: {'explanation': [(13, -0.0624167912596456),
(7, 0.06083359545295548),
(3, 0.0495953943686462),
(11, -0.04819787147412231),
(2, -0.03858823761391199)],
'name': '<NAME>'},
208: {'explanation': [(13, -0.08408428146916162),
(7, 0.07704235920590158),
(3, 0.06646468388122273),
(11, -0.0638326572126609),
(2, -0.052621478002380796)],
'name': '<NAME>'},
852: {'explanation': [(11, 0.35248212611685886),
(13, 0.2516925608037859),
(2, 0.13682853028454384),
(9, 0.12930134856644754),
(6, 0.1257747954095489)],
'name': '<NAME>'}},
{207: {'explanation': [(3, 0.21351937934930917),
(10, 0.16933456312772083),
(11, -0.13447244552856766),
(8, 0.11058919217055371),
(2, -0.06269239798368743)],
'name': '<NAME>'},
208: {'explanation': [(8, 0.05995551486884414),
(9, -0.05375302972380482),
(11, -0.051997353324246445),
(6, 0.04213181405953071),
(2, -0.039169895361928275)],
'name': '<NAME>'},
852: {'explanation': [(7, 0.31382219776986503),
(11, 0.24126214884275987),
(13, 0.21075924370226598),
(2, 0.11937652039885377),
(8, -0.11911265319329697)],
'name': '<NAME>'}},
{207: {'explanation': [(3, 0.39254403293049134),
(9, 0.19357165018747347),
(6, 0.16592079671652987),
(0, 0.14042059731407297),
(1, 0.09793027079765507)],
'name': '<NAME>'},
208: {'explanation': [(9, -0.19351859273276703),
(1, -0.15262967987262344),
(3, 0.12205127112235375),
(2, 0.11352141032313934),
(6, -0.11164209893429898)],
'name': '<NAME>'},
852: {'explanation': [(7, 0.17213007100844877),
(0, -0.1583030948868859),
(3, -0.13748574615069775),
(5, 0.13273283867075436),
(11, 0.12309551170070354)],
'name': '<NAME>'}},
{207: {'explanation': [(3, 0.4073533182995105),
(10, 0.20711667988142463),
(8, 0.15360813290032324),
(6, 0.1405424759832785),
(1, 0.1332920685413575)],
'name': '<NAME>'},
208: {'explanation': [(9, -0.14747910525112617),
(1, -0.13977061235228924),
(2, 0.10526833898161611),
(6, -0.10416022118399552),
(3, 0.09555992655161764)],
'name': '<NAME>'},
852: {'explanation': [(11, 0.2232260929107954),
(7, 0.21638443149433054),
(5, 0.21100464215582274),
(13, 0.145614853795006),
(1, -0.11416523431311262)],
'name': '<NAME>'}},
{207: {'explanation': [(1, 0.14700178977744183),
(0, 0.10346667279328238),
(2, 0.10346667279328238),
(7, 0.10346667279328238),
(8, 0.10162900633690726)],
'name': '<NAME>'},
208: {'explanation': [(10, -0.10845134816658476),
(8, -0.1026920429226184),
(6, -0.10238154733842847),
(18, 0.10094164937411244),
(16, 0.08646888450232793)],
'name': '<NAME>'},
852: {'explanation': [(18, -0.20542297091894474),
(13, 0.2012751176130666),
(8, -0.19194747162742365),
(20, 0.14686930696710473),
(15, 0.11796990086271067)],
'name': '<NAME>'}},
{207: {'explanation': [(13, 0.12446259821701779),
(17, 0.11859084421095789),
(15, 0.09690553833007137),
(12, -0.08869743701731962),
(4, 0.08124900427893789)],
'name': '<NAME>'},
208: {'explanation': [(10, -0.09478194981909983),
(20, -0.09173392507039077),
(9, 0.08768898801254493),
(17, -0.07553994244536394),
(4, 0.07422905503397653)],
'name': '<NAME>'},
852: {'explanation': [(21, 0.1327882942965061),
(1, 0.1238236573086363),
(18, -0.10911712271717902),
(19, 0.09707191051320978),
(6, 0.08593672504338913)],
'name': '<NAME>'}},
{207: {'explanation': [(6, 0.14931728779865114),
(14, 0.14092073957103526),
(1, 0.11071480021464616),
(4, 0.10655287976934531),
(8, 0.08705404649152573)],
'name': '<NAME>'},
208: {'explanation': [(8, -0.12242580400886727),
(9, 0.12142729544158742),
(14, -0.1148252787068248),
(16, -0.09562322208795092),
(4, 0.09350160975513132)],
'name': '<NAME>'},
852: {'explanation': [(6, 0.04227675072263027),
(9, -0.03107924340879173),
(14, 0.028007115650713045),
(13, 0.02771190348545554),
(19, 0.02640441416071482)],
'name': '<NAME>'}},
{207: {'explanation': [(19, 0.14313680656283245),
(18, 0.12866508562342843),
(8, 0.11809779264185447),
(0, 0.11286255403442104),
(2, 0.11286255403442104)],
'name': '<NAME>'},
208: {'explanation': [(9, 0.2397917428082761),
(14, -0.19435572812170654),
(6, -0.1760894833446507),
(18, -0.12243333818399058),
(15, 0.10986343675377105)],
'name': '<NAME>'},
852: {'explanation': [(14, 0.15378038774613365),
(9, -0.14245940635481966),
(6, 0.10213601012183973),
(20, 0.1009180838986786),
(3, 0.09780065767815548)],
'name': '<NAME>'}},
{207: {'explanation': [(15, 0.06525850448807077),
(9, 0.06286791243851698),
(19, 0.055189970374185854),
(8, 0.05499197604401475),
(13, 0.04748220842936177)],
'name': '<NAME>'},
208: {'explanation': [(6, -0.31549091899770765),
(5, 0.1862302670824446),
(8, -0.17381478451341995),
(10, -0.17353516098662508),
(14, -0.13591542421754205)],
'name': '<NAME>'},
852: {'explanation': [(14, 0.2163853942943355),
(6, 0.17565046338282214),
(1, 0.12446193028474549),
(9, -0.11365789839746396),
(10, 0.09239073691962967)],
'name': '<NAME>'}},
{207: {'explanation': [(19, 0.1141207265647932),
(36, -0.08861425922625768),
(30, 0.07219209872026074),
(9, -0.07150939547859836),
(38, -0.06988288637544438)],
'name': '<NAME>'},
208: {'explanation': [(29, 0.10531073909547647),
(13, 0.08279642208039652),
(34, -0.0817952443980797),
(33, -0.08086848205765082),
(12, 0.08086848205765082)],
'name': '<NAME>'},
852: {'explanation': [(13, -0.1330452414595897),
(4, 0.09942366413042845),
(12, -0.09881995683190645),
(33, 0.09881995683190645),
(19, -0.09596925317560831)],
'name': '<NAME>'}},
{207: {'explanation': [(37, 0.08193926967758253),
(35, 0.06804043021426347),
(15, 0.06396269230810163),
(11, 0.062255657227065296),
(8, 0.05529200233091672)],
'name': '<NAME>'},
208: {'explanation': [(19, 0.05711957286614678),
(27, -0.050230108135410824),
(16, -0.04743034616549999),
(5, -0.046717346734255705),
(9, -0.04419100026638039)],
'name': '<NAME>'},
852: {'explanation': [(3, -0.08390967998497496),
(30, -0.07037680222442452),
(22, 0.07029819368543713),
(8, -0.06861396187180349),
(37, -0.06662511956402824)],
'name': '<NAME>'}},
{207: {'explanation': [(19, 0.048418845359024805),
(9, -0.0423869575883795),
(30, 0.04012650790044438),
(36, -0.03787242980067195),
(10, 0.036557999380695635)],
'name': '<NAME>'},
208: {'explanation': [(10, 0.12120686823129677),
(17, 0.10196564232230493),
(7, 0.09495133975425854),
(25, -0.0759657891182803),
(2, -0.07035244568286837)],
'name': '<NAME>'},
852: {'explanation': [(3, -0.0770578003457272),
(28, 0.0769372258280398),
(6, -0.06044725989272927),
(22, 0.05550155775286349),
(31, -0.05399028046597057)],
'name': '<NAME>'}},
{207: {'explanation': [(14, 0.05371383110181226),
(0, -0.04442539316084218),
(18, 0.042589475382826494),
(19, 0.04227647855354252),
(17, 0.041685661662754295)],
'name': '<NAME>'},
208: {'explanation': [(29, 0.14419601354489464),
(17, 0.11785174500536676),
(36, 0.1000501679652906),
(10, 0.09679790134851017),
(35, 0.08710376081189208)],
'name': '<NAME>'},
852: {'explanation': [(8, -0.02486237985832769),
(3, -0.022559886154747102),
(11, -0.021878686669239856),
(36, 0.021847953817988534),
(19, -0.018317598300716522)],
'name': '<NAME>'}},
{207: {'explanation': [(37, 0.08098729255605368),
(35, 0.06639102704982619),
(15, 0.06033721190370432),
(34, 0.05826267856117829),
(28, 0.05549505160798173)],
'name': '<NAME>'},
208: {'explanation': [(17, 0.13839012042250542),
(10, 0.11312187488346881),
(7, 0.10729071207480922),
(25, -0.09529127965797404),
(11, -0.09279834572979286)],
'name': '<NAME>'},
852: {'explanation': [(3, -0.028385651836694076),
(22, 0.023364702783498722),
(8, -0.023097812578270233),
(30, -0.022931236620034406),
(37, -0.022040170736525342)],
'name': '<NAME>'}}
]
EXP_TAB = {
'setosa&0&0': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&1': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&2': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&3': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&4': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&5': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&6': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&7': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&8': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&9': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&10': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&11': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&12': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&13': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&14': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&15': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&16': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&17': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&18': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&19': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&20': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&21': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&22': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&23': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&24': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&25': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&26': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&27': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&28': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&29': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&30': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&31': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&32': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&33': np.array([0.7936433456054741, 0.01258375207649658]),
'setosa&0&34': np.array([0.7974072911132786, 0.006894018772033576]),
'setosa&0&35': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&36': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&37': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&38': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&39': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&40': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&41': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&42': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&43': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&44': np.array([0.7936433456054741, 0.01258375207649658]),
'setosa&0&45': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&46': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&47': np.array([0.06151571389390039, 0.524561199322281]),
'setosa&0&48': np.array([0.4329463382004908, 0.057167210150691136]),
'setosa&0&49': np.array([0.4656481363306145, 0.007982539480288167]),
'setosa&0&50': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&51': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&52': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&53': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&54': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&55': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&56': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&57': np.array([0.06151571389390039, 0.524561199322281]),
'setosa&0&58': np.array([0.06151571389390039, 0.524561199322281]),
'setosa&0&59': np.array([0.4329463382004908, 0.057167210150691136]),
'setosa&0&60': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&61': np.array([0.00988785935411159, 0.9698143912008228]),
'setosa&0&62': np.array([0.009595083643662688, 0.5643652067423869]),
'setosa&0&63': np.array([0.13694026920485936, 0.36331091829858003]),
'setosa&0&64': np.array([0.3094460464703627, 0.11400643817329122]),
'setosa&0&65': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&66': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&67': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&68': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&69': np.array([0.00988785935411159, 0.9698143912008228]),
'setosa&0&70': np.array([0.00988785935411159, 0.9698143912008228]),
'setosa&0&71': np.array([0.00988785935411159, 0.9698143912008228]),
'setosa&0&72': np.array([0.009595083643662688, 0.5643652067423869]),
'setosa&0&73': np.array([0.009595083643662688, 0.5643652067423869]),
'setosa&0&74': np.array([0.13694026920485936, 0.36331091829858003]),
'setosa&0&75': np.array([0.0, 0.95124502153736]),
'setosa&0&76': np.array([0.0, 0.9708703761803881]),
'setosa&0&77': np.array([0.0, 0.5659706098422994]),
'setosa&0&78': np.array([0.0, 0.3962828716108186]),
'setosa&0&79': np.array([0.0, 0.2538069363248767]),
'setosa&0&80': np.array([0.0, 0.95124502153736]),
'setosa&0&81': np.array([0.0, 0.95124502153736]),
'setosa&0&82': np.array([0.0, 0.95124502153736]),
'setosa&0&83': np.array([0.0, 0.95124502153736]),
'setosa&0&84': np.array([0.0, 0.9708703761803881]),
'setosa&0&85': np.array([0.0, 0.9708703761803881]),
'setosa&0&86': np.array([0.0, 0.9708703761803881]),
'setosa&0&87': np.array([0.0, 0.5659706098422994]),
'setosa&0&88': np.array([0.0, 0.5659706098422994]),
'setosa&0&89': np.array([0.0, 0.3962828716108186]),
'setosa&0&90': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&91': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&92': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&93': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&94': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&95': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&96': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&97': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&98': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&99': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&100': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&101': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&102': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&103': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&104': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&105': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&106': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&107': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&108': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&109': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&110': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&111': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&112': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&113': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&114': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&115': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&116': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&117': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&118': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&119': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&120': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&121': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&122': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&123': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&124': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&125': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&126': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&127': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&128': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&129': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&130': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&131': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&132': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&133': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&134': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&135': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&136': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&137': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&138': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&139': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&140': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&141': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&142': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&143': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&144': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&145': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&146': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&147': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&148': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&149': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&150': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&151': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&152': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&153': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&154': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&155': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&156': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&157': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&158': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&159': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&160': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&161': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&162': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&163': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&164': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&165': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&166': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&167': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&168': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&169': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&170': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&171': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&172': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&173': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&174': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&175': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&176': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&177': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&178': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&179': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&180': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&181': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&182': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&183': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&184': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&185': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&186': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&187': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&188': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&189': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&190': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&191': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&192': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&193': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&194': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&195': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&196': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&197': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&198': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&199': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&200': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&201': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&202': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&203': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&204': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&205': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&206': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&207': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&208': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&209': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&210': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&211': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&212': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&213': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&214': np.array([0.9706534384443797, 0.007448195602953232]),
'setosa&0&215': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&216': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&217': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&218': np.array([0.7431524521056113, 0.24432235603856345]),
'setosa&0&219': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&220': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&221': np.array([0.4926091071260067, 0.49260910712601286]),
'setosa&0&222': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&223': np.array([0.9550700362273441, 0.025428672111930138]),
'setosa&0&224': np.array([0.9672121512728677, 0.012993005706020341]),
'setosa&0&225': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&226': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&227': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&228': np.array([0.7936433456054741, 0.01258375207649658]),
'setosa&0&229': np.array([0.7974072911132786, 0.006894018772033576]),
'setosa&0&230': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&231': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&232': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&233': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&234': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&235': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&236': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&237': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&238': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&239': np.array([0.7936433456054741, 0.01258375207649658]),
'setosa&0&240': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&241': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&242': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&243': np.array([0.7936433456054741, 0.01258375207649658]),
'setosa&0&244': np.array([0.7974072911132786, 0.006894018772033576]),
'setosa&0&245': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&246': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&247': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&248': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&249': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&250': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&251': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&252': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&253': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&254': np.array([0.7936433456054741, 0.01258375207649658]),
'setosa&0&255': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&256': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&257': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&258': np.array([0.7936433456054741, 0.01258375207649658]),
'setosa&0&259': np.array([0.7974072911132786, 0.006894018772033576]),
'setosa&0&260': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&261': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&262': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&263': np.array([0.19685199412911678, 0.7845879230594391]),
'setosa&0&264': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&265': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&266': np.array([0.07476043598366156, 0.9062715528547001]),
'setosa&0&267': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&268': np.array([0.7770298852793471, 0.0294434304771479]),
'setosa&0&269': np.array([0.7936433456054741, 0.01258375207649658]),
'setosa&0&270': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&271': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&272': np.array([0.06151571389390039, 0.524561199322281]),
'setosa&0&273': np.array([0.4329463382004908, 0.057167210150691136]),
'setosa&0&274': np.array([0.4656481363306145, 0.007982539480288167]),
'setosa&0&275': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&276': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&277': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&278': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&279': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&280': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&281': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&282': np.array([0.06151571389390039, 0.524561199322281]),
'setosa&0&283': np.array([0.06151571389390039, 0.524561199322281]),
'setosa&0&284': np.array([0.4329463382004908, 0.057167210150691136]),
'setosa&0&285': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&286': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&287': np.array([0.06151571389390039, 0.524561199322281]),
'setosa&0&288': np.array([0.4329463382004908, 0.057167210150691136]),
'setosa&0&289': np.array([0.4656481363306145, 0.007982539480288167]),
'setosa&0&290': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&291': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&292': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&293': np.array([0.050316962184345455, 0.9292276112117481]),
'setosa&0&294': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&295': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&296': np.array([0.0171486447659196, 0.9632117581295891]),
'setosa&0&297': np.array([0.06151571389390039, 0.524561199322281]),
'setosa&0&298': np.array([0.06151571389390039, 0.524561199322281]),
'setosa&0&299': np.array([0.4329463382004908, 0.057167210150691136]),
'setosa&0&300': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&301': np.array([0.00988785935411159, 0.9698143912008228]),
'setosa&0&302': np.array([0.009595083643662688, 0.5643652067423869]),
'setosa&0&303': np.array([0.13694026920485936, 0.36331091829858003]),
'setosa&0&304': np.array([0.3094460464703627, 0.11400643817329122]),
'setosa&0&305': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&306': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&307': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&308': np.array([0.029402442458921055, 0.9481684282717416]),
'setosa&0&309': np.array([0.00988785935411159, 0.9698143912008228]),
'setosa&0&310': np.array([0.00988785935411159, 0.9698143912008228]),
'setosa&0&311': np.array([0.00988785935411159, 0.9698143912008228]),
'setosa&0&312': np.array([0.009595083643662688, 0.5643652067423869]),
'setosa&0&313': np.array([0.009595083643662688, 0.5643652067423869]),
'setosa&0&314': np.array([0.13694026920485936, 0.36331091829858003]),
'setosa&1&0': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&1': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&2': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&3': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&4': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&5': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&6': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&7': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&8': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&9': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&10': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&11': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&12': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&13': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&14': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&15': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&16': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&17': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&18': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&19': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&20': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&21': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&22': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&23': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&24': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&25': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&26': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&27': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&28': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&29': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&30': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&31': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&32': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&33': np.array([-0.4446001433508151, 0.6107546840046902]),
'setosa&1&34': np.array([-0.26192650167775977, 0.33491141590339474]),
'setosa&1&35': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&36': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&37': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&38': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&39': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&40': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&41': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&42': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&43': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&44': np.array([-0.4446001433508151, 0.6107546840046902]),
'setosa&1&45': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&46': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&47': np.array([0.5826506963750848, -0.22335655671229107]),
'setosa&1&48': np.array([0.33108168891715983, 0.13647816746351163]),
'setosa&1&49': np.array([0.4079256832347186, 0.038455640985860955]),
'setosa&1&50': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&51': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&52': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&53': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&54': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&55': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&56': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&57': np.array([0.5826506963750848, -0.22335655671229107]),
'setosa&1&58': np.array([0.5826506963750848, -0.22335655671229107]),
'setosa&1&59': np.array([0.33108168891715983, 0.13647816746351163]),
'setosa&1&60': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&61': np.array([0.5041830043657418, -0.5392782673950876]),
'setosa&1&62': np.array([0.25657760110071476, 0.12592645350389123]),
'setosa&1&63': np.array([0.13717260713320106, 0.3627779907901665]),
'setosa&1&64': np.array([0.3093950298647913, 0.1140298206733954]),
'setosa&1&65': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&66': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&67': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&68': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&69': np.array([0.5041830043657418, -0.5392782673950876]),
'setosa&1&70': np.array([0.5041830043657418, -0.5392782673950876]),
'setosa&1&71': np.array([0.5041830043657418, -0.5392782673950876]),
'setosa&1&72': np.array([0.25657760110071476, 0.12592645350389123]),
'setosa&1&73': np.array([0.25657760110071476, 0.12592645350389123]),
'setosa&1&74': np.array([0.13717260713320106, 0.3627779907901665]),
'setosa&1&75': np.array([0.0, -0.4756207622944677]),
'setosa&1&76': np.array([0.0, -0.4854334805210761]),
'setosa&1&77': np.array([0.0, 0.16885577975809635]),
'setosa&1&78': np.array([0.0, 0.395805885538554]),
'setosa&1&79': np.array([0.0, 0.2538072707138344]),
'setosa&1&80': np.array([0.0, -0.4756207622944677]),
'setosa&1&81': np.array([0.0, -0.4756207622944677]),
'setosa&1&82': np.array([0.0, -0.4756207622944677]),
'setosa&1&83': np.array([0.0, -0.4756207622944677]),
'setosa&1&84': np.array([0.0, -0.4854334805210761]),
'setosa&1&85': np.array([0.0, -0.4854334805210761]),
'setosa&1&86': np.array([0.0, -0.4854334805210761]),
'setosa&1&87': np.array([0.0, 0.16885577975809635]),
'setosa&1&88': np.array([0.0, 0.16885577975809635]),
'setosa&1&89': np.array([0.0, 0.395805885538554]),
'setosa&1&90': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&91': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&92': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&93': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&94': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&95': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&96': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&97': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&98': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&99': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&100': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&101': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&102': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&103': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&104': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&105': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&106': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&107': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&108': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&109': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&110': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&111': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&112': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&113': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&114': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&115': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&116': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&117': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&118': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&119': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&120': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&121': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&122': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&123': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&124': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&125': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&126': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&127': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&128': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&129': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&130': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&131': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&132': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&133': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&134': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&135': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&136': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&137': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&138': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&139': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&140': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&141': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&142': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&143': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&144': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&145': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&146': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&147': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&148': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&149': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&150': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&151': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&152': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&153': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&154': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&155': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&156': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&157': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&158': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&159': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&160': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&161': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&162': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&163': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&164': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&165': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&166': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&167': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&168': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&169': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&170': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&171': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&172': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&173': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&174': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&175': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&176': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&177': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&178': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&179': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&180': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&181': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&182': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&183': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&184': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&185': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&186': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&187': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&188': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&189': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&190': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&191': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&192': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&193': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&194': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&195': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&196': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&197': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&198': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&199': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&200': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&201': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&202': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&203': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&204': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&205': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&206': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&207': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&208': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&209': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&210': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&211': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&212': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&213': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&214': np.array([-0.4964962439921071, 0.3798215458387346]),
'setosa&1&215': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&216': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&217': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&218': np.array([-0.37157553889555184, -0.1221600832023858]),
'setosa&1&219': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&220': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&221': np.array([-0.2463036871609408, -0.24630368716093934]),
'setosa&1&222': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&223': np.array([-0.9105775730167809, 0.6842162738602727]),
'setosa&1&224': np.array([-0.6718337295341267, 0.6620422637360075]),
'setosa&1&225': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&226': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&227': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&228': np.array([-0.4446001433508151, 0.6107546840046902]),
'setosa&1&229': np.array([-0.26192650167775977, 0.33491141590339474]),
'setosa&1&230': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&231': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&232': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&233': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&234': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&235': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&236': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&237': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&238': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&239': np.array([-0.4446001433508151, 0.6107546840046902]),
'setosa&1&240': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&241': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&242': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&243': np.array([-0.4446001433508151, 0.6107546840046902]),
'setosa&1&244': np.array([-0.26192650167775977, 0.33491141590339474]),
'setosa&1&245': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&246': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&247': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&248': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&249': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&250': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&251': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&252': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&253': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&254': np.array([-0.4446001433508151, 0.6107546840046902]),
'setosa&1&255': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&256': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&257': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&258': np.array([-0.4446001433508151, 0.6107546840046902]),
'setosa&1&259': np.array([-0.26192650167775977, 0.33491141590339474]),
'setosa&1&260': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&261': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&262': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&263': np.array([0.32199975656257585, -0.748229355246375]),
'setosa&1&264': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&265': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&266': np.array([0.43843349141088417, -0.8642740701867918]),
'setosa&1&267': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&268': np.array([-0.7141739659554724, 0.6619819140152877]),
'setosa&1&269': np.array([-0.4446001433508151, 0.6107546840046902]),
'setosa&1&270': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&271': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&272': np.array([0.5826506963750848, -0.22335655671229107]),
'setosa&1&273': np.array([0.33108168891715983, 0.13647816746351163]),
'setosa&1&274': np.array([0.4079256832347186, 0.038455640985860955]),
'setosa&1&275': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&276': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&277': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&278': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&279': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&280': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&281': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&282': np.array([0.5826506963750848, -0.22335655671229107]),
'setosa&1&283': np.array([0.5826506963750848, -0.22335655671229107]),
'setosa&1&284': np.array([0.33108168891715983, 0.13647816746351163]),
'setosa&1&285': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&286': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&287': np.array([0.5826506963750848, -0.22335655671229107]),
'setosa&1&288': np.array([0.33108168891715983, 0.13647816746351163]),
'setosa&1&289': np.array([0.4079256832347186, 0.038455640985860955]),
'setosa&1&290': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&291': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&292': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&293': np.array([0.7749499208750121, -0.814718944080443]),
'setosa&1&294': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&295': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&296': np.array([0.80403091954169, -0.844515250413482]),
'setosa&1&297': np.array([0.5826506963750848, -0.22335655671229107]),
'setosa&1&298': np.array([0.5826506963750848, -0.22335655671229107]),
'setosa&1&299': np.array([0.33108168891715983, 0.13647816746351163]),
'setosa&1&300': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&301': np.array([0.5041830043657418, -0.5392782673950876]),
'setosa&1&302': np.array([0.25657760110071476, 0.12592645350389123]),
'setosa&1&303': np.array([0.13717260713320106, 0.3627779907901665]),
'setosa&1&304': np.array([0.3093950298647913, 0.1140298206733954]),
'setosa&1&305': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&306': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&307': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&308': np.array([0.4933316375690333, -0.5272416708629277]),
'setosa&1&309': np.array([0.5041830043657418, -0.5392782673950876]),
'setosa&1&310': np.array([0.5041830043657418, -0.5392782673950876]),
'setosa&1&311': np.array([0.5041830043657418, -0.5392782673950876]),
'setosa&1&312': np.array([0.25657760110071476, 0.12592645350389123]),
'setosa&1&313': np.array([0.25657760110071476, 0.12592645350389123]),
'setosa&1&314': np.array([0.13717260713320106, 0.3627779907901665]),
'setosa&2&0': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&1': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&2': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&3': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&4': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&5': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&6': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&7': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&8': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&9': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&10': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&11': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&12': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&13': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&14': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&15': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&16': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&17': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&18': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&19': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&20': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&21': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&22': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&23': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&24': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&25': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&26': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&27': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&28': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&29': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&30': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&31': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&32': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&33': np.array([-0.34904320225465857, -0.6233384360811872]),
'setosa&2&34': np.array([-0.5354807894355184, -0.3418054346754283]),
'setosa&2&35': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&36': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&37': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&38': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&39': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&40': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&41': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&42': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&43': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&44': np.array([-0.34904320225465857, -0.6233384360811872]),
'setosa&2&45': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&46': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&47': np.array([-0.6441664102689847, -0.3012046426099901]),
'setosa&2&48': np.array([-0.7640280271176497, -0.19364537761420375]),
'setosa&2&49': np.array([-0.8735738195653328, -0.046438180466149094]),
'setosa&2&50': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&51': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&52': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&53': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&54': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&55': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&56': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&57': np.array([-0.6441664102689847, -0.3012046426099901]),
'setosa&2&58': np.array([-0.6441664102689847, -0.3012046426099901]),
'setosa&2&59': np.array([-0.7640280271176497, -0.19364537761420375]),
'setosa&2&60': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&61': np.array([-0.5140708637198534, -0.43053612380573514]),
'setosa&2&62': np.array([-0.2661726847443776, -0.6902916602462779]),
'setosa&2&63': np.array([-0.2741128763380603, -0.7260889090887469]),
'setosa&2&64': np.array([-0.6188410763351541, -0.22803625884668638]),
'setosa&2&65': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&66': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&67': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&68': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&69': np.array([-0.5140708637198534, -0.43053612380573514]),
'setosa&2&70': np.array([-0.5140708637198534, -0.43053612380573514]),
'setosa&2&71': np.array([-0.5140708637198534, -0.43053612380573514]),
'setosa&2&72': np.array([-0.2661726847443776, -0.6902916602462779]),
'setosa&2&73': np.array([-0.2661726847443776, -0.6902916602462779]),
'setosa&2&74': np.array([-0.2741128763380603, -0.7260889090887469]),
'setosa&2&75': np.array([0.0, -0.47562425924289314]),
'setosa&2&76': np.array([0.0, -0.48543689565931186]),
'setosa&2&77': np.array([0.0, -0.7348263896003956]),
'setosa&2&78': np.array([0.0, -0.7920887571493729]),
'setosa&2&79': np.array([0.0, -0.507614207038711]),
'setosa&2&80': np.array([0.0, -0.47562425924289314]),
'setosa&2&81': np.array([0.0, -0.47562425924289314]),
'setosa&2&82': np.array([0.0, -0.47562425924289314]),
'setosa&2&83': np.array([0.0, -0.47562425924289314]),
'setosa&2&84': np.array([0.0, -0.48543689565931186]),
'setosa&2&85': np.array([0.0, -0.48543689565931186]),
'setosa&2&86': np.array([0.0, -0.48543689565931186]),
'setosa&2&87': np.array([0.0, -0.7348263896003956]),
'setosa&2&88': np.array([0.0, -0.7348263896003956]),
'setosa&2&89': np.array([0.0, -0.7920887571493729]),
'setosa&2&90': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&91': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&92': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&93': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&94': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&95': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&96': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&97': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&98': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&99': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&100': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&101': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&102': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&103': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&104': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&105': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&106': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&107': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&108': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&109': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&110': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&111': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&112': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&113': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&114': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&115': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&116': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&117': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&118': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&119': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&120': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&121': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&122': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&123': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&124': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&125': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&126': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&127': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&128': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&129': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&130': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&131': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&132': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&133': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&134': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&135': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&136': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&137': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&138': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&139': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&140': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&141': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&142': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&143': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&144': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&145': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&146': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&147': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&148': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&149': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&150': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&151': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&152': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&153': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&154': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&155': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&156': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&157': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&158': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&159': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&160': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&161': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&162': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&163': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&164': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&165': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&166': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&167': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&168': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&169': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&170': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&171': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&172': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&173': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&174': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&175': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&176': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&177': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&178': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&179': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&180': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&181': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&182': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&183': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&184': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&185': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&186': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&187': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&188': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&189': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&190': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&191': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&192': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&193': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&194': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&195': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&196': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&197': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&198': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&199': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&200': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&201': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&202': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&203': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&204': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&205': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&206': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&207': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&208': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&209': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&210': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&211': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&212': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&213': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&214': np.array([-0.47415719445227245, -0.38726974144168774]),
'setosa&2&215': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&216': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&217': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&218': np.array([-0.3715769132100501, -0.12216227283618744]),
'setosa&2&219': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&220': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&221': np.array([-0.24630541996506924, -0.24630541996506994]),
'setosa&2&222': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&223': np.array([-0.044492463210563125, -0.7096449459722027]),
'setosa&2&224': np.array([-0.29537842173874096, -0.6750352694420283]),
'setosa&2&225': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&226': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&227': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&228': np.array([-0.34904320225465857, -0.6233384360811872]),
'setosa&2&229': np.array([-0.5354807894355184, -0.3418054346754283]),
'setosa&2&230': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&231': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&232': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&233': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&234': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&235': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&236': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&237': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&238': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&239': np.array([-0.34904320225465857, -0.6233384360811872]),
'setosa&2&240': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&241': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&242': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&243': np.array([-0.34904320225465857, -0.6233384360811872]),
'setosa&2&244': np.array([-0.5354807894355184, -0.3418054346754283]),
'setosa&2&245': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&246': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&247': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&248': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&249': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&250': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&251': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&252': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&253': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&254': np.array([-0.34904320225465857, -0.6233384360811872]),
'setosa&2&255': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&256': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&257': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&258': np.array([-0.34904320225465857, -0.6233384360811872]),
'setosa&2&259': np.array([-0.5354807894355184, -0.3418054346754283]),
'setosa&2&260': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&261': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&262': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&263': np.array([-0.5188517506916893, -0.036358567813067795]),
'setosa&2&264': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&265': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&266': np.array([-0.513193927394545, -0.041997482667908786]),
'setosa&2&267': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&268': np.array([-0.06285591932387405, -0.6914253444924359]),
'setosa&2&269': np.array([-0.34904320225465857, -0.6233384360811872]),
'setosa&2&270': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&271': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&272': np.array([-0.6441664102689847, -0.3012046426099901]),
'setosa&2&273': np.array([-0.7640280271176497, -0.19364537761420375]),
'setosa&2&274': np.array([-0.8735738195653328, -0.046438180466149094]),
'setosa&2&275': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&276': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&277': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&278': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&279': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&280': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&281': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&282': np.array([-0.6441664102689847, -0.3012046426099901]),
'setosa&2&283': np.array([-0.6441664102689847, -0.3012046426099901]),
'setosa&2&284': np.array([-0.7640280271176497, -0.19364537761420375]),
'setosa&2&285': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&286': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&287': np.array([-0.6441664102689847, -0.3012046426099901]),
'setosa&2&288': np.array([-0.7640280271176497, -0.19364537761420375]),
'setosa&2&289': np.array([-0.8735738195653328, -0.046438180466149094]),
'setosa&2&290': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&291': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&292': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&293': np.array([-0.8252668830593567, -0.11450866713130638]),
'setosa&2&294': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&295': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&296': np.array([-0.8211795643076093, -0.1186965077161071]),
'setosa&2&297': np.array([-0.6441664102689847, -0.3012046426099901]),
'setosa&2&298': np.array([-0.6441664102689847, -0.3012046426099901]),
'setosa&2&299': np.array([-0.7640280271176497, -0.19364537761420375]),
'setosa&2&300': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&301': np.array([-0.5140708637198534, -0.43053612380573514]),
'setosa&2&302': np.array([-0.2661726847443776, -0.6902916602462779]),
'setosa&2&303': np.array([-0.2741128763380603, -0.7260889090887469]),
'setosa&2&304': np.array([-0.6188410763351541, -0.22803625884668638]),
'setosa&2&305': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&306': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&307': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&308': np.array([-0.5227340800279542, -0.42092675740881474]),
'setosa&2&309': np.array([-0.5140708637198534, -0.43053612380573514]),
'setosa&2&310': np.array([-0.5140708637198534, -0.43053612380573514]),
'setosa&2&311': np.array([-0.5140708637198534, -0.43053612380573514]),
'setosa&2&312': np.array([-0.2661726847443776, -0.6902916602462779]),
'setosa&2&313': np.array([-0.2661726847443776, -0.6902916602462779]),
'setosa&2&314': np.array([-0.2741128763380603, -0.7260889090887469]),
'versicolor&0&0': np.array([-0.7431524521056113, -0.24432235603856345]),
'versicolor&0&1': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&2': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&3': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&4': np.array([-0.9706534384443797, 0.007448195602953232]),
'versicolor&0&5': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&6': np.array([-0.967167257194905, -0.011919414234523772]),
'versicolor&0&7': np.array([-0.953200964337313, -0.027163424176667752]),
'versicolor&0&8': np.array([-0.8486399726113752, -0.13537345771621853]),
'versicolor&0&9': np.array([-0.9658161779555727, -0.01446062269877741]),
'versicolor&0&10': np.array([-0.9493506964095418, -0.0312186903717912]),
'versicolor&0&11': np.array([-0.7870031444780577, -0.1952404625292782]),
'versicolor&0&12': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&13': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&14': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&15': np.array([-0.7431524521056113, -0.24432235603856345]),
'versicolor&0&16': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&17': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&18': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&19': np.array([-0.9706534384443797, 0.007448195602953232]),
'versicolor&0&20': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&21': np.array([-0.967167257194905, -0.011919414234523772]),
'versicolor&0&22': np.array([-0.953200964337313, -0.027163424176667752]),
'versicolor&0&23': np.array([-0.8486399726113752, -0.13537345771621853]),
'versicolor&0&24': np.array([-0.9658161779555727, -0.01446062269877741]),
'versicolor&0&25': np.array([-0.9493506964095418, -0.0312186903717912]),
'versicolor&0&26': np.array([-0.7870031444780577, -0.1952404625292782]),
'versicolor&0&27': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&28': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&29': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&30': np.array([-0.19685199412911655, -0.7845879230594393]),
'versicolor&0&31': np.array([-0.07476043598366228, -0.9062715528546994]),
'versicolor&0&32': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&33': np.array([-0.7936433456054744, 0.012583752076496493]),
'versicolor&0&34': np.array([-0.7974072911132788, 0.006894018772033604]),
'versicolor&0&35': np.array([-0.07476043598366228, -0.9062715528546994]),
'versicolor&0&36': np.array([-0.7779663027946229, -0.2981599980028888]),
'versicolor&0&37': np.array([-0.6669876551417979, -0.2911996622134135]),
'versicolor&0&38': np.array([-0.3355030348883163, -0.6305271339971502]),
'versicolor&0&39': np.array([-0.7658431164447598, -0.3248317507526541]),
'versicolor&0&40': np.array([-0.6459073168288453, -0.31573292128613833]),
'versicolor&0&41': np.array([-0.2519677855687844, -0.7134447168661863]),
'versicolor&0&42': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&43': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&44': np.array([-0.7936433456054744, 0.012583752076496493]),
'versicolor&0&45': np.array([0.05031696218434577, -0.929227611211748]),
'versicolor&0&46': np.array([0.017148644765919676, -0.9632117581295891]),
'versicolor&0&47': np.array([0.06151571389390039, 0.524561199322281]),
'versicolor&0&48': np.array([0.4329463382004908, 0.057167210150691136]),
'versicolor&0&49': np.array([0.4656481363306145, 0.007982539480288167]),
'versicolor&0&50': np.array([0.017148644765919676, -0.9632117581295891]),
'versicolor&0&51': np.array([0.6614632074748169, -0.6030419328583525]),
'versicolor&0&52': np.array([0.5519595359123358, -0.6434192906054143]),
'versicolor&0&53': np.array([0.14241819268815753, -0.8424615476000691]),
'versicolor&0&54': np.array([0.667423576348749, -0.6594086777766442]),
'versicolor&0&55': np.array([0.5429872243487625, -0.6697888833280774]),
'versicolor&0&56': np.array([0.1140907502997574, -0.8737800276630269]),
'versicolor&0&57': np.array([0.06151571389390039, 0.524561199322281]),
'versicolor&0&58': np.array([0.06151571389390039, 0.524561199322281]),
'versicolor&0&59': np.array([0.4329463382004908, 0.057167210150691136]),
'versicolor&0&60': np.array([0.029402442458921384, -0.9481684282717414]),
'versicolor&0&61': np.array([0.009887859354111524, -0.9698143912008228]),
'versicolor&0&62': np.array([0.009595083643662688, 0.5643652067423869]),
'versicolor&0&63': np.array([0.13694026920485936, 0.36331091829858003]),
'versicolor&0&64': np.array([0.3094460464703627, 0.11400643817329122]),
'versicolor&0&65': np.array([0.009887859354111524, -0.9698143912008228]),
'versicolor&0&66': np.array([0.42809266524335826, -0.40375108595117376]),
'versicolor&0&67': np.array([0.45547700380103057, -0.6083463409799501]),
'versicolor&0&68': np.array([0.19002455311770447, -0.8848597943731074]),
'versicolor&0&69': np.array([0.436966114193701, -0.4638042290788281]),
'versicolor&0&70': np.array([0.45424510803217066, -0.6425314361631614]),
'versicolor&0&71': np.array([0.1746467870122951, -0.9073062742839755]),
'versicolor&0&72': np.array([0.009595083643662688, 0.5643652067423869]),
'versicolor&0&73': np.array([0.009595083643662688, 0.5643652067423869]),
'versicolor&0&74': np.array([0.13694026920485936, 0.36331091829858003]),
'versicolor&0&75': np.array([0.0, -0.95124502153736]),
'versicolor&0&76': np.array([0.0, -0.9708703761803881]),
'versicolor&0&77': np.array([0.0, 0.5659706098422994]),
'versicolor&0&78': np.array([0.0, 0.3962828716108186]),
'versicolor&0&79': np.array([0.0, 0.2538069363248767]),
'versicolor&0&80': np.array([0.0, -0.9708703761803881]),
'versicolor&0&81': np.array([0.0, -0.3631376646911367]),
'versicolor&0&82': np.array([0.0, -0.5804857652839247]),
'versicolor&0&83': np.array([0.0, -0.8943993997517804]),
'versicolor&0&84': np.array([0.0, -0.4231275527222919]),
'versicolor&0&85': np.array([0.0, -0.6164235822373675]),
'versicolor&0&86': np.array([0.0, -0.9166476163222441]),
'versicolor&0&87': np.array([0.0, 0.5659706098422994]),
'versicolor&0&88': np.array([0.0, 0.5659706098422994]),
'versicolor&0&89': np.array([0.0, 0.3962828716108186]),
'versicolor&0&90': np.array([-0.7431524521056113, -0.24432235603856345]),
'versicolor&0&91': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&92': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&93': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&94': np.array([-0.9706534384443797, 0.007448195602953232]),
'versicolor&0&95': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&96': np.array([-0.967167257194905, -0.011919414234523772]),
'versicolor&0&97': np.array([-0.953200964337313, -0.027163424176667752]),
'versicolor&0&98': np.array([-0.8486399726113752, -0.13537345771621853]),
'versicolor&0&99': np.array([-0.9658161779555727, -0.01446062269877741]),
'versicolor&0&100': np.array([-0.9493506964095418, -0.0312186903717912]),
'versicolor&0&101': np.array([-0.7870031444780577, -0.1952404625292782]),
'versicolor&0&102': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&103': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&104': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&105': np.array([-0.19685199412911655, -0.7845879230594393]),
'versicolor&0&106': np.array([-0.07476043598366228, -0.9062715528546994]),
'versicolor&0&107': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&108': np.array([-0.7936433456054744, 0.012583752076496493]),
'versicolor&0&109': np.array([-0.7974072911132788, 0.006894018772033604]),
'versicolor&0&110': np.array([-0.07476043598366228, -0.9062715528546994]),
'versicolor&0&111': np.array([-0.7779663027946229, -0.2981599980028888]),
'versicolor&0&112': np.array([-0.6669876551417979, -0.2911996622134135]),
'versicolor&0&113': np.array([-0.3355030348883163, -0.6305271339971502]),
'versicolor&0&114': np.array([-0.7658431164447598, -0.3248317507526541]),
'versicolor&0&115': np.array([-0.6459073168288453, -0.31573292128613833]),
'versicolor&0&116': np.array([-0.2519677855687844, -0.7134447168661863]),
'versicolor&0&117': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&118': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&119': np.array([-0.7936433456054744, 0.012583752076496493]),
'versicolor&0&120': np.array([-0.05855179950109871, -0.9211684729232403]),
'versicolor&0&121': np.array([-0.020067537725011863, -0.960349531159508]),
'versicolor&0&122': np.array([-0.5775164514598086, 0.6278692602817483]),
'versicolor&0&123': np.array([-0.6813845327458135, 0.6599725404733693]),
'versicolor&0&124': np.array([-0.5182062652425321, 0.3958533237517639]),
'versicolor&0&125': np.array([-0.020067537725011863, -0.960349531159508]),
'versicolor&0&126': np.array([-0.5107107533700952, 0.0075507123577884866]),
'versicolor&0&127': np.array([-0.1464063320531759, -0.4788055402156298]),
'versicolor&0&128': np.array([-0.061109248092233844, -0.8620287767000373]),
'versicolor&0&129': np.array([-0.4706137753079746, -0.057389625790424635]),
'versicolor&0&130': np.array([-0.06804620923037683, -0.5677904519730453]),
'versicolor&0&131': np.array([-0.020216773196675246, -0.9057119888626176]),
'versicolor&0&132': np.array([-0.5775164514598086, 0.6278692602817483]),
'versicolor&0&133': np.array([-0.5775164514598086, 0.6278692602817483]),
'versicolor&0&134': np.array([-0.6813845327458135, 0.6599725404733693]),
'versicolor&0&135': np.array([-0.19684482070614498, -0.7845939961595055]),
'versicolor&0&136': np.array([-0.07475231751447156, -0.9062785678426409]),
'versicolor&0&137': np.array([-0.6782037543706109, 0.2956007367698983]),
'versicolor&0&138': np.array([-0.7694171988675237, 0.276633135028249]),
'versicolor&0&139': np.array([-0.8063011502229427, 0.4134300066735808]),
'versicolor&0&140': np.array([-0.07475231751447156, -0.9062785678426409]),
'versicolor&0&141': np.array([-0.7985789197998611, 0.0026209054759345337]),
'versicolor&0&142': np.array([-0.7182275903095532, -0.11963032135457498]),
'versicolor&0&143': np.array([-0.2798927835773098, -0.6581136857450849]),
'versicolor&0&144': np.array([-0.7920119433269182, -0.0142751249964083]),
'versicolor&0&145': np.array([-0.6943081428778407, -0.14852813120265815]),
'versicolor&0&146': np.array([-0.16106555563262584, -0.777621649099753]),
'versicolor&0&147': np.array([-0.6782037543706109, 0.2956007367698983]),
'versicolor&0&148': np.array([-0.6782037543706109, 0.2956007367698983]),
'versicolor&0&149': np.array([-0.7694171988675237, 0.276633135028249]),
'versicolor&0&150': np.array([-0.7431524521056113, -0.24432235603856345]),
'versicolor&0&151': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&152': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&153': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&154': np.array([-0.9706534384443797, 0.007448195602953232]),
'versicolor&0&155': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&156': np.array([-0.967167257194905, -0.011919414234523772]),
'versicolor&0&157': np.array([-0.953200964337313, -0.027163424176667752]),
'versicolor&0&158': np.array([-0.8486399726113752, -0.13537345771621853]),
'versicolor&0&159': np.array([-0.9658161779555727, -0.01446062269877741]),
'versicolor&0&160': np.array([-0.9493506964095418, -0.0312186903717912]),
'versicolor&0&161': np.array([-0.7870031444780577, -0.1952404625292782]),
'versicolor&0&162': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&163': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&164': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&165': np.array([-0.19685199412911655, -0.7845879230594393]),
'versicolor&0&166': np.array([-0.07476043598366228, -0.9062715528546994]),
'versicolor&0&167': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&168': np.array([-0.7936433456054744, 0.012583752076496493]),
'versicolor&0&169': np.array([-0.7974072911132788, 0.006894018772033604]),
'versicolor&0&170': np.array([-0.07476043598366228, -0.9062715528546994]),
'versicolor&0&171': np.array([-0.7779663027946229, -0.2981599980028888]),
'versicolor&0&172': np.array([-0.6669876551417979, -0.2911996622134135]),
'versicolor&0&173': np.array([-0.3355030348883163, -0.6305271339971502]),
'versicolor&0&174': np.array([-0.7658431164447598, -0.3248317507526541]),
'versicolor&0&175': np.array([-0.6459073168288453, -0.31573292128613833]),
'versicolor&0&176': np.array([-0.2519677855687844, -0.7134447168661863]),
'versicolor&0&177': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&178': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&179': np.array([-0.7936433456054744, 0.012583752076496493]),
'versicolor&0&180': np.array([-0.05855179950109871, -0.9211684729232403]),
'versicolor&0&181': np.array([-0.020067537725011863, -0.960349531159508]),
'versicolor&0&182': np.array([-0.5775164514598086, 0.6278692602817483]),
'versicolor&0&183': np.array([-0.6813845327458135, 0.6599725404733693]),
'versicolor&0&184': np.array([-0.5182062652425321, 0.3958533237517639]),
'versicolor&0&185': np.array([-0.020067537725011863, -0.960349531159508]),
'versicolor&0&186': np.array([-0.5107107533700952, 0.0075507123577884866]),
'versicolor&0&187': np.array([-0.1464063320531759, -0.4788055402156298]),
'versicolor&0&188': np.array([-0.061109248092233844, -0.8620287767000373]),
'versicolor&0&189': np.array([-0.4706137753079746, -0.057389625790424635]),
'versicolor&0&190': np.array([-0.06804620923037683, -0.5677904519730453]),
'versicolor&0&191': np.array([-0.020216773196675246, -0.9057119888626176]),
'versicolor&0&192': np.array([-0.5775164514598086, 0.6278692602817483]),
'versicolor&0&193': np.array([-0.5775164514598086, 0.6278692602817483]),
'versicolor&0&194': np.array([-0.6813845327458135, 0.6599725404733693]),
'versicolor&0&195': np.array([-0.19684482070614498, -0.7845939961595055]),
'versicolor&0&196': np.array([-0.07475231751447156, -0.9062785678426409]),
'versicolor&0&197': np.array([-0.6782037543706109, 0.2956007367698983]),
'versicolor&0&198': np.array([-0.7694171988675237, 0.276633135028249]),
'versicolor&0&199': np.array([-0.8063011502229427, 0.4134300066735808]),
'versicolor&0&200': np.array([-0.07475231751447156, -0.9062785678426409]),
'versicolor&0&201': np.array([-0.7985789197998611, 0.0026209054759345337]),
'versicolor&0&202': np.array([-0.7182275903095532, -0.11963032135457498]),
'versicolor&0&203': np.array([-0.2798927835773098, -0.6581136857450849]),
'versicolor&0&204': np.array([-0.7920119433269182, -0.0142751249964083]),
'versicolor&0&205': np.array([-0.6943081428778407, -0.14852813120265815]),
'versicolor&0&206': np.array([-0.16106555563262584, -0.777621649099753]),
'versicolor&0&207': np.array([-0.6782037543706109, 0.2956007367698983]),
'versicolor&0&208': np.array([-0.6782037543706109, 0.2956007367698983]),
'versicolor&0&209': np.array([-0.7694171988675237, 0.276633135028249]),
'versicolor&0&210': np.array([-0.7431524521056113, -0.24432235603856345]),
'versicolor&0&211': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&212': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&213': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&214': np.array([-0.9706534384443797, 0.007448195602953232]),
'versicolor&0&215': np.array([-0.4926091071260067, -0.49260910712601286]),
'versicolor&0&216': np.array([-0.967167257194905, -0.011919414234523772]),
'versicolor&0&217': np.array([-0.953200964337313, -0.027163424176667752]),
'versicolor&0&218': np.array([-0.8486399726113752, -0.13537345771621853]),
'versicolor&0&219': np.array([-0.9658161779555727, -0.01446062269877741]),
'versicolor&0&220': np.array([-0.9493506964095418, -0.0312186903717912]),
'versicolor&0&221': np.array([-0.7870031444780577, -0.1952404625292782]),
'versicolor&0&222': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&223': np.array([-0.9550700362273441, 0.025428672111930138]),
'versicolor&0&224': np.array([-0.9672121512728677, 0.012993005706020341]),
'versicolor&0&225': np.array([-0.04777085826693217, -0.931704979630315]),
'versicolor&0&226': np.array([-0.016252316132452975, -0.9640854286687816]),
'versicolor&0&227': np.array([-0.44101924439572626, 0.5583264842761904]),
'versicolor&0&228': np.array([-0.5844994389588399, 0.5715208832363579]),
'versicolor&0&229': np.array([-0.46216647196120714, 0.35468591243823655]),
'versicolor&0&230': np.array([-0.016252316132452975, -0.9640854286687816]),
'versicolor&0&231': np.array([-0.3707180757031537, -0.1977196581472426]),
'versicolor&0&232': np.array([-0.1043459833293615, -0.5233314327065356]),
'versicolor&0&233': np.array([-0.049289647556763364, -0.8736084405111605]),
'versicolor&0&234': np.array([-0.34078174031874375, -0.25874482325965437]),
'versicolor&0&235': np.array([-0.050841051273783675, -0.5877587283589205]),
'versicolor&0&236': np.array([-0.0161720977425142, -0.9096817855236822]),
'versicolor&0&237': np.array([-0.44101924439572626, 0.5583264842761904]),
'versicolor&0&238': np.array([-0.44101924439572626, 0.5583264842761904]),
'versicolor&0&239': np.array([-0.5844994389588399, 0.5715208832363579]),
'versicolor&0&240': np.array([-0.11329659732608087, -0.8671819100849522]),
'versicolor&0&241': np.array([-0.040390637135858574, -0.9402832917474078]),
'versicolor&0&242': np.array([-0.5276460255602035, 0.28992233541586077]),
'versicolor&0&243': np.array([-0.6392402874163683, 0.24114611970435948]),
'versicolor&0&244': np.array([-0.6814868825686854, 0.35066801608083215]),
'versicolor&0&245': np.array([-0.040390637135858574, -0.9402832917474078]),
'versicolor&0&246': np.array([-0.6425009695928476, -0.24851992476830956]),
'versicolor&0&247': np.array([-0.5151243662384031, -0.3255567772442641]),
'versicolor&0&248': np.array([-0.16157511199607094, -0.7754323813403634]),
'versicolor&0&249': np.array([-0.6300442788906601, -0.28361140069713875]),
'versicolor&0&250': np.array([-0.4875864856121089, -0.3614122096616301]),
'versicolor&0&251': np.array([-0.08968204532514226, -0.8491191210330045]),
'versicolor&0&252': np.array([-0.5276460255602035, 0.28992233541586077]),
'versicolor&0&253': np.array([-0.5276460255602035, 0.28992233541586077]),
'versicolor&0&254': np.array([-0.6392402874163683, 0.24114611970435948]),
'versicolor&0&255': np.array([-0.19685199412911655, -0.7845879230594393]),
'versicolor&0&256': np.array([-0.07476043598366228, -0.9062715528546994]),
'versicolor&0&257': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&258': np.array([-0.7936433456054744, 0.012583752076496493]),
'versicolor&0&259': np.array([-0.7974072911132788, 0.006894018772033604]),
'versicolor&0&260': np.array([-0.07476043598366228, -0.9062715528546994]),
'versicolor&0&261': np.array([-0.7779663027946229, -0.2981599980028888]),
'versicolor&0&262': np.array([-0.6669876551417979, -0.2911996622134135]),
'versicolor&0&263': np.array([-0.3355030348883163, -0.6305271339971502]),
'versicolor&0&264': np.array([-0.7658431164447598, -0.3248317507526541]),
'versicolor&0&265': np.array([-0.6459073168288453, -0.31573292128613833]),
'versicolor&0&266': np.array([-0.2519677855687844, -0.7134447168661863]),
'versicolor&0&267': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&268': np.array([-0.7770298852793476, 0.029443430477147536]),
'versicolor&0&269': np.array([-0.7936433456054744, 0.012583752076496493]),
'versicolor&0&270': np.array([0.05031696218434577, -0.929227611211748]),
'versicolor&0&271': np.array([0.017148644765919676, -0.9632117581295891]),
'versicolor&0&272': np.array([0.06151571389390039, 0.524561199322281]),
'versicolor&0&273': np.array([0.4329463382004908, 0.057167210150691136]),
'versicolor&0&274': np.array([0.4656481363306145, 0.007982539480288167]),
'versicolor&0&275': np.array([0.017148644765919676, -0.9632117581295891]),
'versicolor&0&276': np.array([0.6614632074748169, -0.6030419328583525]),
'versicolor&0&277': np.array([0.5519595359123358, -0.6434192906054143]),
'versicolor&0&278': np.array([0.14241819268815753, -0.8424615476000691]),
'versicolor&0&279': np.array([0.667423576348749, -0.6594086777766442]),
'versicolor&0&280': np.array([0.5429872243487625, -0.6697888833280774]),
'versicolor&0&281': np.array([0.1140907502997574, -0.8737800276630269]),
'versicolor&0&282': np.array([0.06151571389390039, 0.524561199322281]),
'versicolor&0&283': np.array([0.06151571389390039, 0.524561199322281]),
'versicolor&0&284': np.array([0.4329463382004908, 0.057167210150691136]),
'versicolor&0&285': np.array([0.05031696218434577, -0.929227611211748]),
'versicolor&0&286': np.array([0.017148644765919676, -0.9632117581295891]),
'versicolor&0&287': np.array([0.06151571389390039, 0.524561199322281]),
'versicolor&0&288': np.array([0.4329463382004908, 0.057167210150691136]),
'versicolor&0&289': np.array([0.4656481363306145, 0.007982539480288167]),
'versicolor&0&290': np.array([0.017148644765919676, -0.9632117581295891]),
'versicolor&0&291': np.array([0.6614632074748169, -0.6030419328583525]),
'versicolor&0&292': np.array([0.5519595359123358, -0.6434192906054143]),
'versicolor&0&293': np.array([0.14241819268815753, -0.8424615476000691]),
'versicolor&0&294': np.array([0.667423576348749, -0.6594086777766442]),
'versicolor&0&295': np.array([0.5429872243487625, -0.6697888833280774]),
'versicolor&0&296': np.array([0.1140907502997574, -0.8737800276630269]),
'versicolor&0&297': np.array([0.06151571389390039, 0.524561199322281]),
'versicolor&0&298': np.array([0.06151571389390039, 0.524561199322281]),
'versicolor&0&299': np.array([0.4329463382004908, 0.057167210150691136]),
'versicolor&0&300': np.array([0.029402442458921384, -0.9481684282717414]),
'versicolor&0&301': np.array([0.009887859354111524, -0.9698143912008228]),
'versicolor&0&302': np.array([0.009595083643662688, 0.5643652067423869]),
'versicolor&0&303': np.array([0.13694026920485936, 0.36331091829858003]),
'versicolor&0&304': np.array([0.3094460464703627, 0.11400643817329122]),
'versicolor&0&305': np.array([0.009887859354111524, -0.9698143912008228]),
'versicolor&0&306': np.array([0.42809266524335826, -0.40375108595117376]),
'versicolor&0&307': np.array([0.45547700380103057, -0.6083463409799501]),
'versicolor&0&308': np.array([0.19002455311770447, -0.8848597943731074]),
'versicolor&0&309': np.array([0.436966114193701, -0.4638042290788281]),
'versicolor&0&310': np.array([0.45424510803217066, -0.6425314361631614]),
'versicolor&0&311': np.array([0.1746467870122951, -0.9073062742839755]),
'versicolor&0&312': np.array([0.009595083643662688, 0.5643652067423869]),
'versicolor&0&313': np.array([0.009595083643662688, 0.5643652067423869]),
'versicolor&0&314': np.array([0.13694026920485936, 0.36331091829858003]),
'versicolor&1&0': np.array([0.37157553889555184, 0.1221600832023858]),
'versicolor&1&1': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&2': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&3': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&4': np.array([0.4964962439921071, 0.3798215458387346]),
'versicolor&1&5': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&6': np.array([0.2805345936193346, 0.6595182922149835]),
'versicolor&1&7': np.array([0.08302493125394889, 0.6186280682763334]),
'versicolor&1&8': np.array([0.22125635302655813, 0.2925832702358638]),
'versicolor&1&9': np.array([0.2365788606456636, 0.7120007179768731]),
'versicolor&1&10': np.array([0.022347126801293967, 0.6718013300441928]),
'versicolor&1&11': np.array([0.10063786451829529, 0.4085974066833644]),
'versicolor&1&12': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&13': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&14': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&15': np.array([0.37157553889555184, 0.1221600832023858]),
'versicolor&1&16': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&17': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&18': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&19': np.array([0.4964962439921071, 0.3798215458387346]),
'versicolor&1&20': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&21': np.array([0.2805345936193346, 0.6595182922149835]),
'versicolor&1&22': np.array([0.08302493125394889, 0.6186280682763334]),
'versicolor&1&23': np.array([0.22125635302655813, 0.2925832702358638]),
'versicolor&1&24': np.array([0.2365788606456636, 0.7120007179768731]),
'versicolor&1&25': np.array([0.022347126801293967, 0.6718013300441928]),
'versicolor&1&26': np.array([0.10063786451829529, 0.4085974066833644]),
'versicolor&1&27': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&28': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&29': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&30': np.array([-0.32199975656257646, 0.7482293552463756]),
'versicolor&1&31': np.array([-0.43843349141088417, 0.8642740701867917]),
'versicolor&1&32': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&33': np.array([0.44460014335081516, 0.6107546840046902]),
'versicolor&1&34': np.array([0.2619265016777598, 0.33491141590339474]),
'versicolor&1&35': np.array([-0.43843349141088417, 0.8642740701867917]),
'versicolor&1&36': np.array([0.20183015430619713, 0.7445346002055082]),
'versicolor&1&37': np.array([-0.05987874887638573, 0.6927937290176818]),
'versicolor&1&38': np.array([-0.2562642052727569, 0.6920266972283227]),
'versicolor&1&39': np.array([0.1736438124560164, 0.7898174616442941]),
'versicolor&1&40': np.array([-0.10114089899940126, 0.7326610366533243]),
'versicolor&1&41': np.array([-0.34479806250338163, 0.7789143553916729]),
'versicolor&1&42': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&43': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&44': np.array([0.44460014335081516, 0.6107546840046902]),
'versicolor&1&45': np.array([0.7749499208750119, 0.8147189440804429]),
'versicolor&1&46': np.array([0.8040309195416899, 0.8445152504134819]),
'versicolor&1&47': np.array([0.5826506963750848, -0.22335655671229107]),
'versicolor&1&48': np.array([0.33108168891715983, 0.13647816746351163]),
'versicolor&1&49': np.array([0.4079256832347186, 0.038455640985860955]),
'versicolor&1&50': np.array([0.8040309195416899, 0.8445152504134819]),
'versicolor&1&51': np.array([0.18555813792691386, 0.6940923833143309]),
'versicolor&1&52': np.array([0.32639262064172164, 0.6296083447134281]),
'versicolor&1&53': np.array([0.6964303997553315, 0.7444536452136676]),
'versicolor&1&54': np.array([0.18216358701833335, 0.747615101407194]),
'versicolor&1&55': np.array([0.33549445287370383, 0.6526039763053625]),
'versicolor&1&56': np.array([0.7213651642695392, 0.7718874443854203]),
'versicolor&1&57': np.array([0.5826506963750848, -0.22335655671229107]),
'versicolor&1&58': np.array([0.5826506963750848, -0.22335655671229107]),
'versicolor&1&59': np.array([0.33108168891715983, 0.13647816746351163]),
'versicolor&1&60': np.array([0.4933316375690332, 0.5272416708629276]),
'versicolor&1&61': np.array([0.5041830043657418, 0.5392782673950876]),
'versicolor&1&62': np.array([0.25657760110071476, 0.12592645350389123]),
'versicolor&1&63': np.array([0.13717260713320106, 0.3627779907901665]),
'versicolor&1&64': np.array([0.3093950298647913, 0.1140298206733954]),
'versicolor&1&65': np.array([0.5041830043657418, 0.5392782673950876]),
'versicolor&1&66': np.array([0.1413116283690917, 0.7479856297394165]),
'versicolor&1&67': np.array([0.189773257421942, 0.6552150653012478]),
'versicolor&1&68': np.array([0.40694846236352233, 0.5109051764198169]),
'versicolor&1&69': np.array([0.1390424906594644, 0.7991613016301518]),
'versicolor&1&70': np.array([0.1945777487290197, 0.6743932844312892]),
'versicolor&1&71': np.array([0.415695226122737, 0.5230815102377903]),
'versicolor&1&72': np.array([0.25657760110071476, 0.12592645350389123]),
'versicolor&1&73': np.array([0.25657760110071476, 0.12592645350389123]),
'versicolor&1&74': np.array([0.13717260713320106, 0.3627779907901665]),
'versicolor&1&75': np.array([0.0, 0.4756207622944677]),
'versicolor&1&76': np.array([0.0, 0.4854334805210761]),
'versicolor&1&77': np.array([0.0, 0.16885577975809635]),
'versicolor&1&78': np.array([0.0, 0.395805885538554]),
'versicolor&1&79': np.array([0.0, 0.2538072707138344]),
'versicolor&1&80': np.array([0.0, 0.4854334805210761]),
'versicolor&1&81': np.array([0.0, 0.7613919530844643]),
'versicolor&1&82': np.array([0.0, 0.6668230985485095]),
'versicolor&1&83': np.array([0.0, 0.4904755652105692]),
'versicolor&1&84': np.array([0.0, 0.8121046082359693]),
'versicolor&1&85': np.array([0.0, 0.6855766903749089]),
'versicolor&1&86': np.array([0.0, 0.5008471974438506]),
'versicolor&1&87': np.array([0.0, 0.16885577975809635]),
'versicolor&1&88': np.array([0.0, 0.16885577975809635]),
'versicolor&1&89': np.array([0.0, 0.395805885538554]),
'versicolor&1&90': np.array([0.37157553889555184, 0.1221600832023858]),
'versicolor&1&91': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&92': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&93': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&94': np.array([0.4964962439921071, 0.3798215458387346]),
'versicolor&1&95': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&96': np.array([0.2805345936193346, 0.6595182922149835]),
'versicolor&1&97': np.array([0.08302493125394889, 0.6186280682763334]),
'versicolor&1&98': np.array([0.22125635302655813, 0.2925832702358638]),
'versicolor&1&99': np.array([0.2365788606456636, 0.7120007179768731]),
'versicolor&1&100': np.array([0.022347126801293967, 0.6718013300441928]),
'versicolor&1&101': np.array([0.10063786451829529, 0.4085974066833644]),
'versicolor&1&102': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&103': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&104': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&105': np.array([-0.32199975656257646, 0.7482293552463756]),
'versicolor&1&106': np.array([-0.43843349141088417, 0.8642740701867917]),
'versicolor&1&107': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&108': np.array([0.44460014335081516, 0.6107546840046902]),
'versicolor&1&109': np.array([0.2619265016777598, 0.33491141590339474]),
'versicolor&1&110': np.array([-0.43843349141088417, 0.8642740701867917]),
'versicolor&1&111': np.array([0.20183015430619713, 0.7445346002055082]),
'versicolor&1&112': np.array([-0.05987874887638573, 0.6927937290176818]),
'versicolor&1&113': np.array([-0.2562642052727569, 0.6920266972283227]),
'versicolor&1&114': np.array([0.1736438124560164, 0.7898174616442941]),
'versicolor&1&115': np.array([-0.10114089899940126, 0.7326610366533243]),
'versicolor&1&116': np.array([-0.34479806250338163, 0.7789143553916729]),
'versicolor&1&117': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&118': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&119': np.array([0.44460014335081516, 0.6107546840046902]),
'versicolor&1&120': np.array([0.8224435822504677, 0.05315271528828394]),
'versicolor&1&121': np.array([0.820222886307464, 0.055413714884152906]),
'versicolor&1&122': np.array([0.8393089066702096, 0.0788980157959197]),
'versicolor&1&123': np.array([0.8282924295054531, 0.0752641855714259]),
'versicolor&1&124': np.array([0.8476206690613984, 0.02146454924522743]),
'versicolor&1&125': np.array([0.820222886307464, 0.055413714884152906]),
'versicolor&1&126': np.array([0.69362517791403, 0.2579390890424607]),
'versicolor&1&127': np.array([0.7261791877801502, 0.16248655642013624]),
'versicolor&1&128': np.array([0.8190416077589757, 0.05661509439536992]),
'versicolor&1&129': np.array([0.6654762076749751, 0.2949291633432878]),
'versicolor&1&130': np.array([0.7118161070185614, 0.17683644094125878]),
'versicolor&1&131': np.array([0.8165214253946836, 0.059175619390630096]),
'versicolor&1&132': np.array([0.8393089066702096, 0.0788980157959197]),
'versicolor&1&133': np.array([0.8393089066702096, 0.0788980157959197]),
'versicolor&1&134': np.array([0.8282924295054531, 0.0752641855714259]),
'versicolor&1&135': np.array([0.5188109114552927, 0.03638964581864269]),
'versicolor&1&136': np.array([0.5131478569192371, 0.04203387599862816]),
'versicolor&1&137': np.array([0.73294627367007, 0.4610490766898855]),
'versicolor&1&138': np.array([0.5965042032375719, 0.48856644624972617]),
'versicolor&1&139': np.array([0.5436097000280874, 0.1461891067488832]),
'versicolor&1&140': np.array([0.5131478569192371, 0.04203387599862816]),
'versicolor&1&141': np.array([0.32513442685780247, 0.6124765483184536]),
'versicolor&1&142': np.array([0.1812883360919208, 0.5504982486874137]),
'versicolor&1&143': np.array([0.4788153032824012, 0.08625929936974323]),
'versicolor&1&144': np.array([0.28490718210609345, 0.6650298146522879]),
'versicolor&1&145': np.array([0.1313204067730033, 0.597079642504441]),
'versicolor&1&146': np.array([0.46583127837967303, 0.09875847161509169]),
'versicolor&1&147': np.array([0.73294627367007, 0.4610490766898855]),
'versicolor&1&148': np.array([0.73294627367007, 0.4610490766898855]),
'versicolor&1&149': np.array([0.5965042032375719, 0.48856644624972617]),
'versicolor&1&150': np.array([0.37157553889555184, 0.1221600832023858]),
'versicolor&1&151': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&152': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&153': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&154': np.array([0.4964962439921071, 0.3798215458387346]),
'versicolor&1&155': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&156': np.array([0.2805345936193346, 0.6595182922149835]),
'versicolor&1&157': np.array([0.08302493125394889, 0.6186280682763334]),
'versicolor&1&158': np.array([0.22125635302655813, 0.2925832702358638]),
'versicolor&1&159': np.array([0.2365788606456636, 0.7120007179768731]),
'versicolor&1&160': np.array([0.022347126801293967, 0.6718013300441928]),
'versicolor&1&161': np.array([0.10063786451829529, 0.4085974066833644]),
'versicolor&1&162': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&163': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&164': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&165': np.array([-0.32199975656257646, 0.7482293552463756]),
'versicolor&1&166': np.array([-0.43843349141088417, 0.8642740701867917]),
'versicolor&1&167': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&168': np.array([0.44460014335081516, 0.6107546840046902]),
'versicolor&1&169': np.array([0.2619265016777598, 0.33491141590339474]),
'versicolor&1&170': np.array([-0.43843349141088417, 0.8642740701867917]),
'versicolor&1&171': np.array([0.20183015430619713, 0.7445346002055082]),
'versicolor&1&172': np.array([-0.05987874887638573, 0.6927937290176818]),
'versicolor&1&173': np.array([-0.2562642052727569, 0.6920266972283227]),
'versicolor&1&174': np.array([0.1736438124560164, 0.7898174616442941]),
'versicolor&1&175': np.array([-0.10114089899940126, 0.7326610366533243]),
'versicolor&1&176': np.array([-0.34479806250338163, 0.7789143553916729]),
'versicolor&1&177': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&178': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&179': np.array([0.44460014335081516, 0.6107546840046902]),
'versicolor&1&180': np.array([0.8224435822504677, 0.05315271528828394]),
'versicolor&1&181': np.array([0.820222886307464, 0.055413714884152906]),
'versicolor&1&182': np.array([0.8393089066702096, 0.0788980157959197]),
'versicolor&1&183': np.array([0.8282924295054531, 0.0752641855714259]),
'versicolor&1&184': np.array([0.8476206690613984, 0.02146454924522743]),
'versicolor&1&185': np.array([0.820222886307464, 0.055413714884152906]),
'versicolor&1&186': np.array([0.69362517791403, 0.2579390890424607]),
'versicolor&1&187': np.array([0.7261791877801502, 0.16248655642013624]),
'versicolor&1&188': np.array([0.8190416077589757, 0.05661509439536992]),
'versicolor&1&189': np.array([0.6654762076749751, 0.2949291633432878]),
'versicolor&1&190': np.array([0.7118161070185614, 0.17683644094125878]),
'versicolor&1&191': np.array([0.8165214253946836, 0.059175619390630096]),
'versicolor&1&192': np.array([0.8393089066702096, 0.0788980157959197]),
'versicolor&1&193': np.array([0.8393089066702096, 0.0788980157959197]),
'versicolor&1&194': np.array([0.8282924295054531, 0.0752641855714259]),
'versicolor&1&195': np.array([0.5188109114552927, 0.03638964581864269]),
'versicolor&1&196': np.array([0.5131478569192371, 0.04203387599862816]),
'versicolor&1&197': np.array([0.73294627367007, 0.4610490766898855]),
'versicolor&1&198': np.array([0.5965042032375719, 0.48856644624972617]),
'versicolor&1&199': np.array([0.5436097000280874, 0.1461891067488832]),
'versicolor&1&200': np.array([0.5131478569192371, 0.04203387599862816]),
'versicolor&1&201': np.array([0.32513442685780247, 0.6124765483184536]),
'versicolor&1&202': np.array([0.1812883360919208, 0.5504982486874137]),
'versicolor&1&203': np.array([0.4788153032824012, 0.08625929936974323]),
'versicolor&1&204': np.array([0.28490718210609345, 0.6650298146522879]),
'versicolor&1&205': np.array([0.1313204067730033, 0.597079642504441]),
'versicolor&1&206': np.array([0.46583127837967303, 0.09875847161509169]),
'versicolor&1&207': np.array([0.73294627367007, 0.4610490766898855]),
'versicolor&1&208': np.array([0.73294627367007, 0.4610490766898855]),
'versicolor&1&209': np.array([0.5965042032375719, 0.48856644624972617]),
'versicolor&1&210': np.array([0.37157553889555184, 0.1221600832023858]),
'versicolor&1&211': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&212': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&213': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&214': np.array([0.4964962439921071, 0.3798215458387346]),
'versicolor&1&215': np.array([0.2463036871609408, 0.24630368716093934]),
'versicolor&1&216': np.array([0.2805345936193346, 0.6595182922149835]),
'versicolor&1&217': np.array([0.08302493125394889, 0.6186280682763334]),
'versicolor&1&218': np.array([0.22125635302655813, 0.2925832702358638]),
'versicolor&1&219': np.array([0.2365788606456636, 0.7120007179768731]),
'versicolor&1&220': np.array([0.022347126801293967, 0.6718013300441928]),
'versicolor&1&221': np.array([0.10063786451829529, 0.4085974066833644]),
'versicolor&1&222': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&223': np.array([0.9105775730167809, 0.6842162738602727]),
'versicolor&1&224': np.array([0.6718337295341267, 0.6620422637360075]),
'versicolor&1&225': np.array([0.6253337666017573, 0.21983620140147825]),
'versicolor&1&226': np.array([0.6178968870349187, 0.22747652768125623]),
'versicolor&1&227': np.array([0.7245803616608639, 0.18141483095066183]),
'versicolor&1&228': np.array([0.6762617119303499, 0.19305674697949574]),
'versicolor&1&229': np.array([0.7182033715159247, 0.0970420677941148]),
'versicolor&1&230': np.array([0.6178968870349187, 0.22747652768125623]),
'versicolor&1&231': np.array([0.4976586558055923, 0.5393318265947251]),
'versicolor&1&232': np.array([0.4361093214026388, 0.4279491486345008]),
'versicolor&1&233': np.array([0.613985959011319, 0.23148898930908424]),
'versicolor&1&234': np.array([0.46747697713468217, 0.586607956360002]),
'versicolor&1&235': np.array([0.41044950174869577, 0.45415985894965977]),
'versicolor&1&236': np.array([0.6057447478066579, 0.23993389556303918]),
'versicolor&1&237': np.array([0.7245803616608639, 0.18141483095066183]),
'versicolor&1&238': np.array([0.7245803616608639, 0.18141483095066183]),
'versicolor&1&239': np.array([0.6762617119303499, 0.19305674697949574]),
'versicolor&1&240': np.array([0.056623968925773045, 0.43360725859686644]),
'versicolor&1&241': np.array([0.020169511418752378, 0.47015948158260334]),
'versicolor&1&242': np.array([0.5806365328450954, 0.47262706807712623]),
'versicolor&1&243': np.array([0.4146290154471569, 0.4964318942067898]),
'versicolor&1&244': np.array([0.3351719071445682, 0.20616862401308342]),
'versicolor&1&245': np.array([0.020169511418752378, 0.47015948158260334]),
'versicolor&1&246': np.array([0.24022705822940116, 0.7185371033867092]),
'versicolor&1&247': np.array([0.010447231513465048, 0.6616528865917504]),
'versicolor&1&248': np.array([0.024556360933646205, 0.4723948285969902]),
'versicolor&1&249': np.array([0.21321406009810842, 0.7648907754638917]),
'versicolor&1&250': np.array([-0.027450681014480036, 0.6999336015080245]),
'versicolor&1&251': np.array([-0.0164329511444131, 0.5132208276383963]),
'versicolor&1&252': np.array([0.5806365328450954, 0.47262706807712623]),
'versicolor&1&253': np.array([0.5806365328450954, 0.47262706807712623]),
'versicolor&1&254': np.array([0.4146290154471569, 0.4964318942067898]),
'versicolor&1&255': np.array([-0.32199975656257646, 0.7482293552463756]),
'versicolor&1&256': np.array([-0.43843349141088417, 0.8642740701867917]),
'versicolor&1&257': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&258': np.array([0.44460014335081516, 0.6107546840046902]),
'versicolor&1&259': np.array([0.2619265016777598, 0.33491141590339474]),
'versicolor&1&260': np.array([-0.43843349141088417, 0.8642740701867917]),
'versicolor&1&261': np.array([0.20183015430619713, 0.7445346002055082]),
'versicolor&1&262': np.array([-0.05987874887638573, 0.6927937290176818]),
'versicolor&1&263': np.array([-0.2562642052727569, 0.6920266972283227]),
'versicolor&1&264': np.array([0.1736438124560164, 0.7898174616442941]),
'versicolor&1&265': np.array([-0.10114089899940126, 0.7326610366533243]),
'versicolor&1&266': np.array([-0.34479806250338163, 0.7789143553916729]),
'versicolor&1&267': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&268': np.array([0.7141739659554727, 0.6619819140152878]),
'versicolor&1&269': np.array([0.44460014335081516, 0.6107546840046902]),
'versicolor&1&270': np.array([0.7749499208750119, 0.8147189440804429]),
'versicolor&1&271': np.array([0.8040309195416899, 0.8445152504134819]),
'versicolor&1&272': np.array([0.5826506963750848, -0.22335655671229107]),
'versicolor&1&273': np.array([0.33108168891715983, 0.13647816746351163]),
'versicolor&1&274': np.array([0.4079256832347186, 0.038455640985860955]),
'versicolor&1&275': np.array([0.8040309195416899, 0.8445152504134819]),
'versicolor&1&276': np.array([0.18555813792691386, 0.6940923833143309]),
'versicolor&1&277': np.array([0.32639262064172164, 0.6296083447134281]),
'versicolor&1&278': np.array([0.6964303997553315, 0.7444536452136676]),
'versicolor&1&279': np.array([0.18216358701833335, 0.747615101407194]),
'versicolor&1&280': np.array([0.33549445287370383, 0.6526039763053625]),
'versicolor&1&281': np.array([0.7213651642695392, 0.7718874443854203]),
'versicolor&1&282': np.array([0.5826506963750848, -0.22335655671229107]),
'versicolor&1&283': np.array([0.5826506963750848, -0.22335655671229107]),
'versicolor&1&284': np.array([0.33108168891715983, 0.13647816746351163]),
'versicolor&1&285': np.array([0.7749499208750119, 0.8147189440804429]),
'versicolor&1&286': np.array([0.8040309195416899, 0.8445152504134819]),
'versicolor&1&287': np.array([0.5826506963750848, -0.22335655671229107]),
'versicolor&1&288': np.array([0.33108168891715983, 0.13647816746351163]),
'versicolor&1&289': np.array([0.4079256832347186, 0.038455640985860955]),
'versicolor&1&290': np.array([0.8040309195416899, 0.8445152504134819]),
'versicolor&1&291': np.array([0.18555813792691386, 0.6940923833143309]),
'versicolor&1&292': np.array([0.32639262064172164, 0.6296083447134281]),
'versicolor&1&293': np.array([0.6964303997553315, 0.7444536452136676]),
'versicolor&1&294': np.array([0.18216358701833335, 0.747615101407194]),
'versicolor&1&295': np.array([0.33549445287370383, 0.6526039763053625]),
'versicolor&1&296': np.array([0.7213651642695392, 0.7718874443854203]),
'versicolor&1&297': np.array([0.5826506963750848, -0.22335655671229107]),
'versicolor&1&298': np.array([0.5826506963750848, -0.22335655671229107]),
'versicolor&1&299': np.array([0.33108168891715983, 0.13647816746351163]),
'versicolor&1&300': np.array([0.4933316375690332, 0.5272416708629276]),
'versicolor&1&301': np.array([0.5041830043657418, 0.5392782673950876]),
'versicolor&1&302': np.array([0.25657760110071476, 0.12592645350389123]),
'versicolor&1&303': np.array([0.13717260713320106, 0.3627779907901665]),
'versicolor&1&304': np.array([0.3093950298647913, 0.1140298206733954]),
'versicolor&1&305': np.array([0.5041830043657418, 0.5392782673950876]),
'versicolor&1&306': np.array([0.1413116283690917, 0.7479856297394165]),
'versicolor&1&307': np.array([0.189773257421942, 0.6552150653012478]),
'versicolor&1&308': np.array([0.40694846236352233, 0.5109051764198169]),
'versicolor&1&309': np.array([0.1390424906594644, 0.7991613016301518]),
'versicolor&1&310': np.array([0.1945777487290197, 0.6743932844312892]),
'versicolor&1&311': np.array([0.415695226122737, 0.5230815102377903]),
'versicolor&1&312': np.array([0.25657760110071476, 0.12592645350389123]),
'versicolor&1&313': np.array([0.25657760110071476, 0.12592645350389123]),
'versicolor&1&314': np.array([0.13717260713320106, 0.3627779907901665]),
'versicolor&2&0': np.array([0.37157691321004915, 0.12216227283618836]),
'versicolor&2&1': np.array([0.24630541996506908, 0.24630541996506994]),
'versicolor&2&2': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&3': np.array([0.2953784217387408, -0.6750352694420283]),
'versicolor&2&4': np.array([0.4741571944522723, -0.3872697414416878]),
'versicolor&2&5': np.array([0.24630541996506908, 0.24630541996506994]),
'versicolor&2&6': np.array([0.68663266357557, -0.6475988779804592]),
'versicolor&2&7': np.array([0.8701760330833639, -0.5914646440996656]),
'versicolor&2&8': np.array([0.6273836195848199, -0.15720981251964872]),
'versicolor&2&9': np.array([0.7292373173099087, -0.6975400952780954]),
'versicolor&2&10': np.array([0.9270035696082471, -0.640582639672401]),
'versicolor&2&11': np.array([0.6863652799597699, -0.21335694415409426]),
'versicolor&2&12': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&13': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&14': np.array([0.2953784217387408, -0.6750352694420283]),
'versicolor&2&15': np.array([0.37157691321004915, 0.12216227283618836]),
'versicolor&2&16': np.array([0.24630541996506908, 0.24630541996506994]),
'versicolor&2&17': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&18': np.array([0.2953784217387408, -0.6750352694420283]),
'versicolor&2&19': np.array([0.4741571944522723, -0.3872697414416878]),
'versicolor&2&20': np.array([0.24630541996506908, 0.24630541996506994]),
'versicolor&2&21': np.array([0.68663266357557, -0.6475988779804592]),
'versicolor&2&22': np.array([0.8701760330833639, -0.5914646440996656]),
'versicolor&2&23': np.array([0.6273836195848199, -0.15720981251964872]),
'versicolor&2&24': np.array([0.7292373173099087, -0.6975400952780954]),
'versicolor&2&25': np.array([0.9270035696082471, -0.640582639672401]),
'versicolor&2&26': np.array([0.6863652799597699, -0.21335694415409426]),
'versicolor&2&27': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&28': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&29': np.array([0.2953784217387408, -0.6750352694420283]),
'versicolor&2&30': np.array([0.5188517506916897, 0.036358567813067386]),
'versicolor&2&31': np.array([0.5131939273945454, 0.04199748266790813]),
'versicolor&2&32': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&33': np.array([0.34904320225465857, -0.6233384360811872]),
'versicolor&2&34': np.array([0.5354807894355184, -0.3418054346754283]),
'versicolor&2&35': np.array([0.5131939273945454, 0.04199748266790813]),
'versicolor&2&36': np.array([0.5761361484884252, -0.44637460220261904]),
'versicolor&2&37': np.array([0.7268664040181829, -0.40159406680426807]),
'versicolor&2&38': np.array([0.5917672401610737, -0.061499563231173816]),
'versicolor&2&39': np.array([0.5921993039887428, -0.46498571089163954]),
'versicolor&2&40': np.array([0.7470482158282458, -0.4169281153671854]),
'versicolor&2&41': np.array([0.5967658480721675, -0.06546963852548916]),
'versicolor&2&42': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&43': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&44': np.array([0.34904320225465857, -0.6233384360811872]),
'versicolor&2&45': np.array([-0.8252668830593566, 0.11450866713130668]),
'versicolor&2&46': np.array([-0.8211795643076095, 0.11869650771610692]),
'versicolor&2&47': np.array([-0.6441664102689847, -0.3012046426099901]),
'versicolor&2&48': np.array([-0.7640280271176497, -0.19364537761420375]),
'versicolor&2&49': np.array([-0.8735738195653328, -0.046438180466149094]),
'versicolor&2&50': np.array([-0.8211795643076095, 0.11869650771610692]),
'versicolor&2&51': np.array([-0.8470213454017305, -0.0910504504559782]),
'versicolor&2&52': np.array([-0.8783521565540571, 0.01381094589198601]),
'versicolor&2&53': np.array([-0.8388485924434891, 0.09800790238640067]),
'versicolor&2&54': np.array([-0.8495871633670822, -0.08820642363054954]),
'versicolor&2&55': np.array([-0.8784816772224661, 0.017184907022714958]),
'versicolor&2&56': np.array([-0.835455914569297, 0.10189258327760495]),
'versicolor&2&57': np.array([-0.6441664102689847, -0.3012046426099901]),
'versicolor&2&58': np.array([-0.6441664102689847, -0.3012046426099901]),
'versicolor&2&59': np.array([-0.7640280271176497, -0.19364537761420375]),
'versicolor&2&60': np.array([-0.5227340800279543, 0.4209267574088147]),
'versicolor&2&61': np.array([-0.5140708637198534, 0.4305361238057349]),
'versicolor&2&62': np.array([-0.2661726847443776, -0.6902916602462779]),
'versicolor&2&63': np.array([-0.2741128763380603, -0.7260889090887469]),
'versicolor&2&64': np.array([-0.6188410763351541, -0.22803625884668638]),
'versicolor&2&65': np.array([-0.5140708637198534, 0.4305361238057349]),
'versicolor&2&66': np.array([-0.56940429361245, -0.3442345437882425]),
'versicolor&2&67': np.array([-0.6452502612229726, -0.04686872432129788]),
'versicolor&2&68': np.array([-0.596973015481227, 0.37395461795328944]),
'versicolor&2&69': np.array([-0.5760086048531655, -0.3353570725513232]),
'versicolor&2&70': np.array([-0.6488228567611906, -0.03186184826812757]),
'versicolor&2&71': np.array([-0.5903420131350324, 0.384224764046184]),
'versicolor&2&72': np.array([-0.2661726847443776, -0.6902916602462779]),
'versicolor&2&73': np.array([-0.2661726847443776, -0.6902916602462779]),
'versicolor&2&74': np.array([-0.2741128763380603, -0.7260889090887469]),
'versicolor&2&75': np.array([0.0, 0.47562425924289314]),
'versicolor&2&76': np.array([0.0, 0.4854368956593117]),
'versicolor&2&77': np.array([0.0, -0.7348263896003956]),
'versicolor&2&78': np.array([0.0, -0.7920887571493729]),
'versicolor&2&79': np.array([0.0, -0.507614207038711]),
'versicolor&2&80': np.array([0.0, 0.4854368956593117]),
'versicolor&2&81': np.array([0.0, -0.3982542883933272]),
'versicolor&2&82': np.array([0.0, -0.08633733326458487]),
'versicolor&2&83': np.array([0.0, 0.4039238345412103]),
'versicolor&2&84': np.array([0.0, -0.38897705551367706]),
'versicolor&2&85': np.array([0.0, -0.06915310813754129]),
'versicolor&2&86': np.array([0.0, 0.41580041887839214]),
'versicolor&2&87': np.array([0.0, -0.7348263896003956]),
'versicolor&2&88': np.array([0.0, -0.7348263896003956]),
'versicolor&2&89': np.array([0.0, -0.7920887571493729]),
'versicolor&2&90': np.array([0.37157691321004915, 0.12216227283618836]),
'versicolor&2&91': np.array([0.24630541996506908, 0.24630541996506994]),
'versicolor&2&92': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&93': np.array([0.2953784217387408, -0.6750352694420283]),
'versicolor&2&94': np.array([0.4741571944522723, -0.3872697414416878]),
'versicolor&2&95': np.array([0.24630541996506908, 0.24630541996506994]),
'versicolor&2&96': np.array([0.68663266357557, -0.6475988779804592]),
'versicolor&2&97': np.array([0.8701760330833639, -0.5914646440996656]),
'versicolor&2&98': np.array([0.6273836195848199, -0.15720981251964872]),
'versicolor&2&99': np.array([0.7292373173099087, -0.6975400952780954]),
'versicolor&2&100': np.array([0.9270035696082471, -0.640582639672401]),
'versicolor&2&101': np.array([0.6863652799597699, -0.21335694415409426]),
'versicolor&2&102': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&103': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&104': np.array([0.2953784217387408, -0.6750352694420283]),
'versicolor&2&105': np.array([0.5188517506916897, 0.036358567813067386]),
'versicolor&2&106': np.array([0.5131939273945454, 0.04199748266790813]),
'versicolor&2&107': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&108': np.array([0.34904320225465857, -0.6233384360811872]),
'versicolor&2&109': np.array([0.5354807894355184, -0.3418054346754283]),
'versicolor&2&110': np.array([0.5131939273945454, 0.04199748266790813]),
'versicolor&2&111': np.array([0.5761361484884252, -0.44637460220261904]),
'versicolor&2&112': np.array([0.7268664040181829, -0.40159406680426807]),
'versicolor&2&113': np.array([0.5917672401610737, -0.061499563231173816]),
'versicolor&2&114': np.array([0.5921993039887428, -0.46498571089163954]),
'versicolor&2&115': np.array([0.7470482158282458, -0.4169281153671854]),
'versicolor&2&116': np.array([0.5967658480721675, -0.06546963852548916]),
'versicolor&2&117': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&118': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&119': np.array([0.34904320225465857, -0.6233384360811872]),
'versicolor&2&120': np.array([-0.7638917827493686, 0.868015757634957]),
'versicolor&2&121': np.array([-0.8001553485824509, 0.9049358162753539]),
'versicolor&2&122': np.array([-0.26179245521040034, -0.7067672760776678]),
'versicolor&2&123': np.array([-0.14690789675963867, -0.7352367260447958]),
'versicolor&2&124': np.array([-0.32941440381886555, -0.4173178729969913]),
'versicolor&2&125': np.array([-0.8001553485824509, 0.9049358162753539]),
'versicolor&2&126': np.array([-0.18291442454393395, -0.2654898014002494]),
'versicolor&2&127': np.array([-0.5797728557269727, 0.3163189837954924]),
'versicolor&2&128': np.array([-0.7579323596667402, 0.8054136823046655]),
'versicolor&2&129': np.array([-0.1948624323669993, -0.23753953755286383]),
'versicolor&2&130': np.array([-0.6437698977881832, 0.3909540110317858]),
'versicolor&2&131': np.array([-0.7963046521980063, 0.846536369471985]),
'versicolor&2&132': np.array([-0.26179245521040034, -0.7067672760776678]),
'versicolor&2&133': np.array([-0.26179245521040034, -0.7067672760776678]),
'versicolor&2&134': np.array([-0.14690789675963867, -0.7352367260447958]),
'versicolor&2&135': np.array([-0.3219660907491514, 0.7482043503408669]),
'versicolor&2&136': np.array([-0.43839553940476644, 0.8642446918440131]),
'versicolor&2&137': np.array([-0.05474251929945989, -0.7566498134597841]),
'versicolor&2&138': np.array([0.17291299562995102, -0.7651995812779756]),
'versicolor&2&139': np.array([0.2626914501948546, -0.5596191134224637]),
'versicolor&2&140': np.array([-0.43839553940476644, 0.8642446918440131]),
'versicolor&2&141': np.array([0.4734444929420575, -0.6150974537943872]),
'versicolor&2&142': np.array([0.5369392542176313, -0.430867927332838]),
'versicolor&2&143': np.array([-0.19892251970509112, 0.5718543863753405]),
'versicolor&2&144': np.array([0.5071047612208237, -0.6507546896558788]),
'versicolor&2&145': np.array([0.5629877361048359, -0.4485515113017818]),
'versicolor&2&146': np.array([-0.3047657227470458, 0.6788631774846587]),
'versicolor&2&147': np.array([-0.05474251929945989, -0.7566498134597841]),
'versicolor&2&148': np.array([-0.05474251929945989, -0.7566498134597841]),
'versicolor&2&149': np.array([0.17291299562995102, -0.7651995812779756]),
'versicolor&2&150': np.array([0.37157691321004915, 0.12216227283618836]),
'versicolor&2&151': np.array([0.24630541996506908, 0.24630541996506994]),
'versicolor&2&152': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&153': np.array([0.2953784217387408, -0.6750352694420283]),
'versicolor&2&154': np.array([0.4741571944522723, -0.3872697414416878]),
'versicolor&2&155': np.array([0.24630541996506908, 0.24630541996506994]),
'versicolor&2&156': np.array([0.68663266357557, -0.6475988779804592]),
'versicolor&2&157': np.array([0.8701760330833639, -0.5914646440996656]),
'versicolor&2&158': np.array([0.6273836195848199, -0.15720981251964872]),
'versicolor&2&159': np.array([0.7292373173099087, -0.6975400952780954]),
'versicolor&2&160': np.array([0.9270035696082471, -0.640582639672401]),
'versicolor&2&161': np.array([0.6863652799597699, -0.21335694415409426]),
'versicolor&2&162': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&163': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&164': np.array([0.2953784217387408, -0.6750352694420283]),
'versicolor&2&165': np.array([0.5188517506916897, 0.036358567813067386]),
'versicolor&2&166': np.array([0.5131939273945454, 0.04199748266790813]),
'versicolor&2&167': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&168': np.array([0.34904320225465857, -0.6233384360811872]),
'versicolor&2&169': np.array([0.5354807894355184, -0.3418054346754283]),
'versicolor&2&170': np.array([0.5131939273945454, 0.04199748266790813]),
'versicolor&2&171': np.array([0.5761361484884252, -0.44637460220261904]),
'versicolor&2&172': np.array([0.7268664040181829, -0.40159406680426807]),
'versicolor&2&173': np.array([0.5917672401610737, -0.061499563231173816]),
'versicolor&2&174': np.array([0.5921993039887428, -0.46498571089163954]),
'versicolor&2&175': np.array([0.7470482158282458, -0.4169281153671854]),
'versicolor&2&176': np.array([0.5967658480721675, -0.06546963852548916]),
'versicolor&2&177': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&178': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&179': np.array([0.34904320225465857, -0.6233384360811872]),
'versicolor&2&180': np.array([-0.7638917827493686, 0.868015757634957]),
'versicolor&2&181': np.array([-0.8001553485824509, 0.9049358162753539]),
'versicolor&2&182': np.array([-0.26179245521040034, -0.7067672760776678]),
'versicolor&2&183': np.array([-0.14690789675963867, -0.7352367260447958]),
'versicolor&2&184': np.array([-0.32941440381886555, -0.4173178729969913]),
'versicolor&2&185': np.array([-0.8001553485824509, 0.9049358162753539]),
'versicolor&2&186': np.array([-0.18291442454393395, -0.2654898014002494]),
'versicolor&2&187': np.array([-0.5797728557269727, 0.3163189837954924]),
'versicolor&2&188': np.array([-0.7579323596667402, 0.8054136823046655]),
'versicolor&2&189': np.array([-0.1948624323669993, -0.23753953755286383]),
'versicolor&2&190': np.array([-0.6437698977881832, 0.3909540110317858]),
'versicolor&2&191': np.array([-0.7963046521980063, 0.846536369471985]),
'versicolor&2&192': np.array([-0.26179245521040034, -0.7067672760776678]),
'versicolor&2&193': np.array([-0.26179245521040034, -0.7067672760776678]),
'versicolor&2&194': np.array([-0.14690789675963867, -0.7352367260447958]),
'versicolor&2&195': np.array([-0.3219660907491514, 0.7482043503408669]),
'versicolor&2&196': np.array([-0.43839553940476644, 0.8642446918440131]),
'versicolor&2&197': np.array([-0.05474251929945989, -0.7566498134597841]),
'versicolor&2&198': np.array([0.17291299562995102, -0.7651995812779756]),
'versicolor&2&199': np.array([0.2626914501948546, -0.5596191134224637]),
'versicolor&2&200': np.array([-0.43839553940476644, 0.8642446918440131]),
'versicolor&2&201': np.array([0.4734444929420575, -0.6150974537943872]),
'versicolor&2&202': np.array([0.5369392542176313, -0.430867927332838]),
'versicolor&2&203': np.array([-0.19892251970509112, 0.5718543863753405]),
'versicolor&2&204': np.array([0.5071047612208237, -0.6507546896558788]),
'versicolor&2&205': np.array([0.5629877361048359, -0.4485515113017818]),
'versicolor&2&206': np.array([-0.3047657227470458, 0.6788631774846587]),
'versicolor&2&207': np.array([-0.05474251929945989, -0.7566498134597841]),
'versicolor&2&208': np.array([-0.05474251929945989, -0.7566498134597841]),
'versicolor&2&209': np.array([0.17291299562995102, -0.7651995812779756]),
'versicolor&2&210': np.array([0.37157691321004915, 0.12216227283618836]),
'versicolor&2&211': np.array([0.24630541996506908, 0.24630541996506994]),
'versicolor&2&212': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&213': np.array([0.2953784217387408, -0.6750352694420283]),
'versicolor&2&214': np.array([0.4741571944522723, -0.3872697414416878]),
'versicolor&2&215': np.array([0.24630541996506908, 0.24630541996506994]),
'versicolor&2&216': np.array([0.68663266357557, -0.6475988779804592]),
'versicolor&2&217': np.array([0.8701760330833639, -0.5914646440996656]),
'versicolor&2&218': np.array([0.6273836195848199, -0.15720981251964872]),
'versicolor&2&219': np.array([0.7292373173099087, -0.6975400952780954]),
'versicolor&2&220': np.array([0.9270035696082471, -0.640582639672401]),
'versicolor&2&221': np.array([0.6863652799597699, -0.21335694415409426]),
'versicolor&2&222': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&223': np.array([0.04449246321056282, -0.709644945972203]),
'versicolor&2&224': np.array([0.2953784217387408, -0.6750352694420283]),
'versicolor&2&225': np.array([-0.5775629083348267, 0.7118687782288384]),
'versicolor&2&226': np.array([-0.6016445709024666, 0.7366089009875252]),
'versicolor&2&227': np.array([-0.28356111726513855, -0.739741315226852]),
'versicolor&2&228': np.array([-0.0917622729715107, -0.7645776302158537]),
'versicolor&2&229': np.array([-0.25603689955471853, -0.451727980232351]),
'versicolor&2&230': np.array([-0.6016445709024666, 0.7366089009875252]),
'versicolor&2&231': np.array([-0.1269405801024398, -0.34161216844748166]),
'versicolor&2&232': np.array([-0.33176333807327857, 0.09538228407203546]),
'versicolor&2&233': np.array([-0.564696311454556, 0.6421194512020755]),
'versicolor&2&234': np.array([-0.12669523681593967, -0.32786313310034665]),
'versicolor&2&235': np.array([-0.35960845047491363, 0.1335988694092619]),
'versicolor&2&236': np.array([-0.589572650064144, 0.6697478899606418]),
'versicolor&2&237': np.array([-0.28356111726513855, -0.739741315226852]),
'versicolor&2&238': np.array([-0.28356111726513855, -0.739741315226852]),
'versicolor&2&239': np.array([-0.0917622729715107, -0.7645776302158537]),
'versicolor&2&240': np.array([0.05667262840030629, 0.4335746514880877]),
'versicolor&2&241': np.array([0.0202211257171063, 0.470123810164804]),
'versicolor&2&242': np.array([-0.052990507284891984, -0.7625494034929868]),
'versicolor&2&243': np.array([0.22461127196921116, -0.7375780139111495]),
'versicolor&2&244': np.array([0.3463149754241171, -0.5568366400939154]),
'versicolor&2&245': np.array([0.0202211257171063, 0.470123810164804]),
'versicolor&2&246': np.array([0.4022739113634462, -0.4700171786183992]),
'versicolor&2&247': np.array([0.5046771347249378, -0.33609610934748635]),
'versicolor&2&248': np.array([0.1370187510624256, 0.30303755274337163]),
'versicolor&2&249': np.array([0.41683021879255133, -0.4812793747667524]),
'versicolor&2&250': np.array([0.5150371666265885, -0.33852139184639396]),
'versicolor&2&251': np.array([0.10611499646955676, 0.33589829339460586]),
'versicolor&2&252': np.array([-0.052990507284891984, -0.7625494034929868]),
'versicolor&2&253': np.array([-0.052990507284891984, -0.7625494034929868]),
'versicolor&2&254': np.array([0.22461127196921116, -0.7375780139111495]),
'versicolor&2&255': np.array([0.5188517506916897, 0.036358567813067386]),
'versicolor&2&256': np.array([0.5131939273945454, 0.04199748266790813]),
'versicolor&2&257': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&258': np.array([0.34904320225465857, -0.6233384360811872]),
'versicolor&2&259': np.array([0.5354807894355184, -0.3418054346754283]),
'versicolor&2&260': np.array([0.5131939273945454, 0.04199748266790813]),
'versicolor&2&261': np.array([0.5761361484884252, -0.44637460220261904]),
'versicolor&2&262': np.array([0.7268664040181829, -0.40159406680426807]),
'versicolor&2&263': np.array([0.5917672401610737, -0.061499563231173816]),
'versicolor&2&264': np.array([0.5921993039887428, -0.46498571089163954]),
'versicolor&2&265': np.array([0.7470482158282458, -0.4169281153671854]),
'versicolor&2&266': np.array([0.5967658480721675, -0.06546963852548916]),
'versicolor&2&267': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&268': np.array([0.06285591932387405, -0.6914253444924359]),
'versicolor&2&269': np.array([0.34904320225465857, -0.6233384360811872]),
'versicolor&2&270': np.array([-0.8252668830593566, 0.11450866713130668]),
'versicolor&2&271': np.array([-0.8211795643076095, 0.11869650771610692]),
'versicolor&2&272': np.array([-0.6441664102689847, -0.3012046426099901]),
'versicolor&2&273': np.array([-0.7640280271176497, -0.19364537761420375]),
'versicolor&2&274': np.array([-0.8735738195653328, -0.046438180466149094]),
'versicolor&2&275': np.array([-0.8211795643076095, 0.11869650771610692]),
'versicolor&2&276': np.array([-0.8470213454017305, -0.0910504504559782]),
'versicolor&2&277': np.array([-0.8783521565540571, 0.01381094589198601]),
'versicolor&2&278': np.array([-0.8388485924434891, 0.09800790238640067]),
'versicolor&2&279': np.array([-0.8495871633670822, -0.08820642363054954]),
'versicolor&2&280': np.array([-0.8784816772224661, 0.017184907022714958]),
'versicolor&2&281': np.array([-0.835455914569297, 0.10189258327760495]),
'versicolor&2&282': np.array([-0.6441664102689847, -0.3012046426099901]),
'versicolor&2&283': np.array([-0.6441664102689847, -0.3012046426099901]),
'versicolor&2&284': np.array([-0.7640280271176497, -0.19364537761420375]),
'versicolor&2&285': np.array([-0.8252668830593566, 0.11450866713130668]),
'versicolor&2&286': np.array([-0.8211795643076095, 0.11869650771610692]),
'versicolor&2&287': np.array([-0.6441664102689847, -0.3012046426099901]),
'versicolor&2&288': np.array([-0.7640280271176497, -0.19364537761420375]),
'versicolor&2&289': np.array([-0.8735738195653328, -0.046438180466149094]),
'versicolor&2&290': np.array([-0.8211795643076095, 0.11869650771610692]),
'versicolor&2&291': np.array([-0.8470213454017305, -0.0910504504559782]),
'versicolor&2&292': np.array([-0.8783521565540571, 0.01381094589198601]),
'versicolor&2&293': np.array([-0.8388485924434891, 0.09800790238640067]),
'versicolor&2&294': np.array([-0.8495871633670822, -0.08820642363054954]),
'versicolor&2&295': np.array([-0.8784816772224661, 0.017184907022714958]),
'versicolor&2&296': np.array([-0.835455914569297, 0.10189258327760495]),
'versicolor&2&297': np.array([-0.6441664102689847, -0.3012046426099901]),
'versicolor&2&298': np.array([-0.6441664102689847, -0.3012046426099901]),
'versicolor&2&299': np.array([-0.7640280271176497, -0.19364537761420375]),
'versicolor&2&300': np.array([-0.5227340800279543, 0.4209267574088147]),
'versicolor&2&301': np.array([-0.5140708637198534, 0.4305361238057349]),
'versicolor&2&302': np.array([-0.2661726847443776, -0.6902916602462779]),
'versicolor&2&303': np.array([-0.2741128763380603, -0.7260889090887469]),
'versicolor&2&304': np.array([-0.6188410763351541, -0.22803625884668638]),
'versicolor&2&305': np.array([-0.5140708637198534, 0.4305361238057349]),
'versicolor&2&306': np.array([-0.56940429361245, -0.3442345437882425]),
'versicolor&2&307': np.array([-0.6452502612229726, -0.04686872432129788]),
'versicolor&2&308': np.array([-0.596973015481227, 0.37395461795328944]),
'versicolor&2&309': np.array([-0.5760086048531655, -0.3353570725513232]),
'versicolor&2&310': np.array([-0.6488228567611906, -0.03186184826812757]),
'versicolor&2&311': np.array([-0.5903420131350324, 0.384224764046184]),
'versicolor&2&312': np.array([-0.2661726847443776, -0.6902916602462779]),
'versicolor&2&313': np.array([-0.2661726847443776, -0.6902916602462779]),
'versicolor&2&314': np.array([-0.2741128763380603, -0.7260889090887469]),
'virginica&0&0': np.array([-0.7431524521056113, -0.24432235603856345]),
'virginica&0&1': np.array([-0.4926091071260067, -0.49260910712601286]),
'virginica&0&2': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&3': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&4': np.array([-0.9706534384443797, 0.007448195602953232]),
'virginica&0&5': np.array([-0.4926091071260067, -0.49260910712601286]),
'virginica&0&6': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&7': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&8': np.array([-0.8486399726113752, -0.13537345771621853]),
'virginica&0&9': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&10': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&11': np.array([-0.7870031444780577, -0.1952404625292782]),
'virginica&0&12': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&13': np.array([-0.9569238464170641, -0.02354905845282574]),
'virginica&0&14': np.array([-0.9677320606992984, -0.012432557482778654]),
'virginica&0&15': np.array([-0.7431524521056113, -0.24432235603856345]),
'virginica&0&16': np.array([-0.4926091071260067, -0.49260910712601286]),
'virginica&0&17': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&18': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&19': np.array([-0.9706534384443797, 0.007448195602953232]),
'virginica&0&20': np.array([-0.4926091071260067, -0.49260910712601286]),
'virginica&0&21': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&22': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&23': np.array([-0.8486399726113752, -0.13537345771621853]),
'virginica&0&24': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&25': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&26': np.array([-0.7870031444780577, -0.1952404625292782]),
'virginica&0&27': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&28': np.array([-0.9569238464170641, -0.02354905845282574]),
'virginica&0&29': np.array([-0.9677320606992984, -0.012432557482778654]),
'virginica&0&30': np.array([-0.19685199412911655, -0.7845879230594393]),
'virginica&0&31': np.array([-0.07476043598366228, -0.9062715528546994]),
'virginica&0&32': np.array([-0.7770298852793477, -0.029443430477147373]),
'virginica&0&33': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&34': np.array([-0.7974072911132788, 0.006894018772033604]),
'virginica&0&35': np.array([-0.07476043598366228, -0.9062715528546994]),
'virginica&0&36': np.array([-0.7770298852793477, -0.029443430477147373]),
'virginica&0&37': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&38': np.array([-0.3355030348883163, -0.6305271339971502]),
'virginica&0&39': np.array([-0.7770298852793477, -0.029443430477147373]),
'virginica&0&40': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&41': np.array([-0.2519677855687844, -0.7134447168661863]),
'virginica&0&42': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&43': np.array([-0.7799744386472778, -0.026476616324402506]),
'virginica&0&44': np.array([-0.7942342242967624, -0.0119572163963601]),
'virginica&0&45': np.array([-0.05031696218434577, -0.929227611211748]),
'virginica&0&46': np.array([-0.017148644765919676, -0.9632117581295891]),
'virginica&0&47': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&48': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&49': np.array([-0.4656481363306145, 0.007982539480288167]),
'virginica&0&50': np.array([-0.017148644765919676, -0.9632117581295891]),
'virginica&0&51': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&52': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&53': np.array([-0.14241819268815753, -0.8424615476000691]),
'virginica&0&54': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&55': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&56': np.array([-0.1140907502997574, -0.8737800276630269]),
'virginica&0&57': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&58': np.array([-0.14198277461566922, -0.4577720226157396]),
'virginica&0&59': np.array([-0.4385442121294165, -0.05333645823279597]),
'virginica&0&60': np.array([0.029402442458921384, -0.9481684282717414]),
'virginica&0&61': np.array([0.009887859354111524, -0.9698143912008228]),
'virginica&0&62': np.array([0.009595083643662688, -0.5643652067423869]),
'virginica&0&63': np.array([0.13694026920485936, -0.36331091829858003]),
'virginica&0&64': np.array([0.3094460464703627, 0.11400643817329122]),
'virginica&0&65': np.array([0.009887859354111524, -0.9698143912008228]),
'virginica&0&66': np.array([0.009595083643662688, -0.5643652067423869]),
'virginica&0&67': np.array([0.13694026920485936, -0.36331091829858003]),
'virginica&0&68': np.array([0.19002455311770447, -0.8848597943731074]),
'virginica&0&69': np.array([0.009595083643662688, -0.5643652067423869]),
'virginica&0&70': np.array([0.13694026920485936, -0.36331091829858003]),
'virginica&0&71': np.array([0.1746467870122951, -0.9073062742839755]),
'virginica&0&72': np.array([0.13694026920485936, -0.36331091829858003]),
'virginica&0&73': np.array([0.11200181312407695, -0.5330612470996793]),
'virginica&0&74': np.array([0.19998284600732558, -0.3489062419702088]),
'virginica&0&75': np.array([0.0, -0.95124502153736]),
'virginica&0&76': np.array([0.0, -0.9708703761803881]),
'virginica&0&77': np.array([0.0, -0.5659706098422994]),
'virginica&0&78': np.array([0.0, -0.3962828716108186]),
'virginica&0&79': np.array([0.0, 0.2538069363248767]),
'virginica&0&80': np.array([0.0, -0.9708703761803881]),
'virginica&0&81': np.array([0.0, -0.5659706098422994]),
'virginica&0&82': np.array([0.0, -0.3962828716108186]),
'virginica&0&83': np.array([0.0, -0.8943993997517804]),
'virginica&0&84': np.array([0.0, -0.5659706098422994]),
'virginica&0&85': np.array([0.0, -0.3962828716108186]),
'virginica&0&86': np.array([0.0, -0.9166476163222441]),
'virginica&0&87': np.array([0.0, -0.3962828716108186]),
'virginica&0&88': np.array([0.0, -0.5466925844560601]),
'virginica&0&89': np.array([0.0, -0.38529908946531777]),
'virginica&0&90': np.array([-0.7431524521056113, -0.24432235603856345]),
'virginica&0&91': np.array([-0.4926091071260067, -0.49260910712601286]),
'virginica&0&92': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&93': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&94': np.array([-0.9706534384443797, 0.007448195602953232]),
'virginica&0&95': np.array([-0.4926091071260067, -0.49260910712601286]),
'virginica&0&96': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&97': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&98': np.array([-0.8486399726113752, -0.13537345771621853]),
'virginica&0&99': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&100': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&101': np.array([-0.7870031444780577, -0.1952404625292782]),
'virginica&0&102': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&103': np.array([-0.9569238464170641, -0.02354905845282574]),
'virginica&0&104': np.array([-0.9677320606992984, -0.012432557482778654]),
'virginica&0&105': np.array([-0.19685199412911655, -0.7845879230594393]),
'virginica&0&106': np.array([-0.07476043598366228, -0.9062715528546994]),
'virginica&0&107': np.array([-0.7770298852793477, -0.029443430477147373]),
'virginica&0&108': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&109': np.array([-0.7974072911132788, 0.006894018772033604]),
'virginica&0&110': np.array([-0.07476043598366228, -0.9062715528546994]),
'virginica&0&111': np.array([-0.7770298852793477, -0.029443430477147373]),
'virginica&0&112': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&113': np.array([-0.3355030348883163, -0.6305271339971502]),
'virginica&0&114': np.array([-0.7770298852793477, -0.029443430477147373]),
'virginica&0&115': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&116': np.array([-0.2519677855687844, -0.7134447168661863]),
'virginica&0&117': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&118': np.array([-0.7799744386472778, -0.026476616324402506]),
'virginica&0&119': np.array([-0.7942342242967624, -0.0119572163963601]),
'virginica&0&120': np.array([-0.05031696218434577, -0.929227611211748]),
'virginica&0&121': np.array([-0.017148644765919676, -0.9632117581295891]),
'virginica&0&122': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&123': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&124': np.array([-0.4656481363306145, 0.007982539480288167]),
'virginica&0&125': np.array([-0.017148644765919676, -0.9632117581295891]),
'virginica&0&126': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&127': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&128': np.array([-0.14241819268815753, -0.8424615476000691]),
'virginica&0&129': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&130': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&131': np.array([-0.1140907502997574, -0.8737800276630269]),
'virginica&0&132': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&133': np.array([-0.14198277461566922, -0.4577720226157396]),
'virginica&0&134': np.array([-0.4385442121294165, -0.05333645823279597]),
'virginica&0&135': np.array([-0.19684482070614498, -0.7845939961595055]),
'virginica&0&136': np.array([-0.07475231751447156, -0.9062785678426409]),
'virginica&0&137': np.array([-0.6782037543706109, -0.29560073676989834]),
'virginica&0&138': np.array([-0.7694171988675237, -0.276633135028249]),
'virginica&0&139': np.array([-0.8063011502229427, 0.4134300066735808]),
'virginica&0&140': np.array([-0.07475231751447156, -0.9062785678426409]),
'virginica&0&141': np.array([-0.6782037543706109, -0.29560073676989834]),
'virginica&0&142': np.array([-0.7694171988675237, -0.276633135028249]),
'virginica&0&143': np.array([-0.2798927835773098, -0.6581136857450849]),
'virginica&0&144': np.array([-0.6782037543706109, -0.29560073676989834]),
'virginica&0&145': np.array([-0.7694171988675237, -0.276633135028249]),
'virginica&0&146': np.array([-0.16106555563262584, -0.777621649099753]),
'virginica&0&147': np.array([-0.7694171988675237, -0.276633135028249]),
'virginica&0&148': np.array([-0.6898990333725056, -0.2534947697713122]),
'virginica&0&149': np.array([-0.769491694075929, -0.22884642137519118]),
'virginica&0&150': np.array([-0.7431524521056113, -0.24432235603856345]),
'virginica&0&151': np.array([-0.4926091071260067, -0.49260910712601286]),
'virginica&0&152': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&153': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&154': np.array([-0.9706534384443797, 0.007448195602953232]),
'virginica&0&155': np.array([-0.4926091071260067, -0.49260910712601286]),
'virginica&0&156': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&157': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&158': np.array([-0.8486399726113752, -0.13537345771621853]),
'virginica&0&159': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&160': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&161': np.array([-0.7870031444780577, -0.1952404625292782]),
'virginica&0&162': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&163': np.array([-0.9569238464170641, -0.02354905845282574]),
'virginica&0&164': np.array([-0.9677320606992984, -0.012432557482778654]),
'virginica&0&165': np.array([-0.19685199412911655, -0.7845879230594393]),
'virginica&0&166': np.array([-0.07476043598366228, -0.9062715528546994]),
'virginica&0&167': np.array([-0.7770298852793477, -0.029443430477147373]),
'virginica&0&168': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&169': np.array([-0.7974072911132788, 0.006894018772033604]),
'virginica&0&170': np.array([-0.07476043598366228, -0.9062715528546994]),
'virginica&0&171': np.array([-0.7770298852793477, -0.029443430477147373]),
'virginica&0&172': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&173': np.array([-0.3355030348883163, -0.6305271339971502]),
'virginica&0&174': np.array([-0.7770298852793477, -0.029443430477147373]),
'virginica&0&175': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&176': np.array([-0.2519677855687844, -0.7134447168661863]),
'virginica&0&177': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&178': np.array([-0.7799744386472778, -0.026476616324402506]),
'virginica&0&179': np.array([-0.7942342242967624, -0.0119572163963601]),
'virginica&0&180': np.array([-0.05031696218434577, -0.929227611211748]),
'virginica&0&181': np.array([-0.017148644765919676, -0.9632117581295891]),
'virginica&0&182': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&183': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&184': np.array([-0.4656481363306145, 0.007982539480288167]),
'virginica&0&185': np.array([-0.017148644765919676, -0.9632117581295891]),
'virginica&0&186': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&187': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&188': np.array([-0.14241819268815753, -0.8424615476000691]),
'virginica&0&189': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&190': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&191': np.array([-0.1140907502997574, -0.8737800276630269]),
'virginica&0&192': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&193': np.array([-0.14198277461566922, -0.4577720226157396]),
'virginica&0&194': np.array([-0.4385442121294165, -0.05333645823279597]),
'virginica&0&195': np.array([-0.19684482070614498, -0.7845939961595055]),
'virginica&0&196': np.array([-0.07475231751447156, -0.9062785678426409]),
'virginica&0&197': np.array([-0.6782037543706109, -0.29560073676989834]),
'virginica&0&198': np.array([-0.7694171988675237, -0.276633135028249]),
'virginica&0&199': np.array([-0.8063011502229427, 0.4134300066735808]),
'virginica&0&200': np.array([-0.07475231751447156, -0.9062785678426409]),
'virginica&0&201': np.array([-0.6782037543706109, -0.29560073676989834]),
'virginica&0&202': np.array([-0.7694171988675237, -0.276633135028249]),
'virginica&0&203': np.array([-0.2798927835773098, -0.6581136857450849]),
'virginica&0&204': np.array([-0.6782037543706109, -0.29560073676989834]),
'virginica&0&205': np.array([-0.7694171988675237, -0.276633135028249]),
'virginica&0&206': np.array([-0.16106555563262584, -0.777621649099753]),
'virginica&0&207': np.array([-0.7694171988675237, -0.276633135028249]),
'virginica&0&208': np.array([-0.6898990333725056, -0.2534947697713122]),
'virginica&0&209': np.array([-0.769491694075929, -0.22884642137519118]),
'virginica&0&210': np.array([-0.7431524521056113, -0.24432235603856345]),
'virginica&0&211': np.array([-0.4926091071260067, -0.49260910712601286]),
'virginica&0&212': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&213': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&214': np.array([-0.9706534384443797, 0.007448195602953232]),
'virginica&0&215': np.array([-0.4926091071260067, -0.49260910712601286]),
'virginica&0&216': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&217': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&218': np.array([-0.8486399726113752, -0.13537345771621853]),
'virginica&0&219': np.array([-0.9550700362273441, -0.025428672111930138]),
'virginica&0&220': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&221': np.array([-0.7870031444780577, -0.1952404625292782]),
'virginica&0&222': np.array([-0.9672121512728677, -0.012993005706020504]),
'virginica&0&223': np.array([-0.9569238464170641, -0.02354905845282574]),
'virginica&0&224': np.array([-0.9677320606992984, -0.012432557482778654]),
'virginica&0&225': np.array([-0.05031696218434577, -0.929227611211748]),
'virginica&0&226': np.array([-0.017148644765919676, -0.9632117581295891]),
'virginica&0&227': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&228': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&229': np.array([-0.4656481363306145, 0.007982539480288167]),
'virginica&0&230': np.array([-0.017148644765919676, -0.9632117581295891]),
'virginica&0&231': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&232': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&233': np.array([-0.14241819268815753, -0.8424615476000691]),
'virginica&0&234': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&235': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&236': np.array([-0.1140907502997574, -0.8737800276630269]),
'virginica&0&237': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&238': np.array([-0.14198277461566922, -0.4577720226157396]),
'virginica&0&239': np.array([-0.4385442121294165, -0.05333645823279597]),
'virginica&0&240': np.array([-0.11329659732608087, -0.8671819100849522]),
'virginica&0&241': np.array([-0.040390637135858574, -0.9402832917474078]),
'virginica&0&242': np.array([-0.5276460255602035, -0.28992233541586077]),
'virginica&0&243': np.array([-0.6392402874163683, -0.24114611970435948]),
'virginica&0&244': np.array([-0.6814868825686854, 0.35066801608083215]),
'virginica&0&245': np.array([-0.040390637135858574, -0.9402832917474078]),
'virginica&0&246': np.array([-0.5276460255602035, -0.28992233541586077]),
'virginica&0&247': np.array([-0.6392402874163683, -0.24114611970435948]),
'virginica&0&248': np.array([-0.16157511199607094, -0.7754323813403634]),
'virginica&0&249': np.array([-0.5276460255602035, -0.28992233541586077]),
'virginica&0&250': np.array([-0.6392402874163683, -0.24114611970435948]),
'virginica&0&251': np.array([-0.08968204532514226, -0.8491191210330045]),
'virginica&0&252': np.array([-0.6392402874163683, -0.24114611970435948]),
'virginica&0&253': np.array([-0.544626974647221, -0.24972982107967573]),
'virginica&0&254': np.array([-0.6426355680762406, -0.20016519137103667]),
'virginica&0&255': np.array([-0.19685199412911655, -0.7845879230594393]),
'virginica&0&256': np.array([-0.07476043598366228, -0.9062715528546994]),
'virginica&0&257': np.array([-0.7770298852793477, -0.029443430477147373]),
'virginica&0&258': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&259': np.array([-0.7974072911132788, 0.006894018772033604]),
'virginica&0&260': np.array([-0.07476043598366228, -0.9062715528546994]),
'virginica&0&261': np.array([-0.7770298852793477, -0.029443430477147373]),
'virginica&0&262': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&263': np.array([-0.3355030348883163, -0.6305271339971502]),
'virginica&0&264': np.array([-0.7770298852793477, -0.029443430477147373]),
'virginica&0&265': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&266': np.array([-0.2519677855687844, -0.7134447168661863]),
'virginica&0&267': np.array([-0.7936433456054744, -0.012583752076496493]),
'virginica&0&268': np.array([-0.7799744386472778, -0.026476616324402506]),
'virginica&0&269': np.array([-0.7942342242967624, -0.0119572163963601]),
'virginica&0&270': np.array([-0.04201361383207032, -0.9372571358382161]),
'virginica&0&271': np.array([-0.014237661899709955, -0.9660323357290304]),
'virginica&0&272': np.array([-0.04813346258022244, -0.5416229439456887]),
'virginica&0&273': np.array([-0.3109532939139045, -0.22759134703604383]),
'virginica&0&274': np.array([-0.4167677904879879, 0.22207334821665425]),
'virginica&0&275': np.array([-0.014237661899709955, -0.9660323357290304]),
'virginica&0&276': np.array([-0.04813346258022244, -0.5416229439456887]),
'virginica&0&277': np.array([-0.3109532939139045, -0.22759134703604383]),
'virginica&0&278': np.array([-0.07857689135903215, -0.8696882596532965]),
'virginica&0&279': np.array([-0.04813346258022244, -0.5416229439456887]),
'virginica&0&280': np.array([-0.3109532939139045, -0.22759134703604383]),
'virginica&0&281': np.array([-0.05160969201296555, -0.9000166344885441]),
'virginica&0&282': np.array([-0.3109532939139045, -0.22759134703604383]),
'virginica&0&283': np.array([-0.0766197045034485, -0.5080325256323984]),
'virginica&0&284': np.array([-0.32767091750230254, -0.19689316772421933]),
'virginica&0&285': np.array([-0.05031696218434577, -0.929227611211748]),
'virginica&0&286': np.array([-0.017148644765919676, -0.9632117581295891]),
'virginica&0&287': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&288': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&289': np.array([-0.4656481363306145, 0.007982539480288167]),
'virginica&0&290': np.array([-0.017148644765919676, -0.9632117581295891]),
'virginica&0&291': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&292': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&293': np.array([-0.14241819268815753, -0.8424615476000691]),
'virginica&0&294': np.array([-0.061515713893900315, -0.524561199322281]),
'virginica&0&295': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&296': np.array([-0.1140907502997574, -0.8737800276630269]),
'virginica&0&297': np.array([-0.4329463382004908, -0.057167210150691136]),
'virginica&0&298': np.array([-0.14198277461566922, -0.4577720226157396]),
'virginica&0&299': np.array([-0.4385442121294165, -0.05333645823279597]),
'virginica&0&300': np.array([0.029402442458921384, -0.9481684282717414]),
'virginica&0&301': np.array([0.009887859354111524, -0.9698143912008228]),
'virginica&0&302': np.array([0.009595083643662688, -0.5643652067423869]),
'virginica&0&303': np.array([0.13694026920485936, -0.36331091829858003]),
'virginica&0&304': np.array([0.3094460464703627, 0.11400643817329122]),
'virginica&0&305': np.array([0.009887859354111524, -0.9698143912008228]),
'virginica&0&306': np.array([0.009595083643662688, -0.5643652067423869]),
'virginica&0&307': np.array([0.13694026920485936, -0.36331091829858003]),
'virginica&0&308': np.array([0.19002455311770447, -0.8848597943731074]),
'virginica&0&309': np.array([0.009595083643662688, -0.5643652067423869]),
'virginica&0&310': np.array([0.13694026920485936, -0.36331091829858003]),
'virginica&0&311': np.array([0.1746467870122951, -0.9073062742839755]),
'virginica&0&312': np.array([0.13694026920485936, -0.36331091829858003]),
'virginica&0&313': np.array([0.11200181312407695, -0.5330612470996793]),
'virginica&0&314': np.array([0.19998284600732558, -0.3489062419702088]),
'virginica&1&0': np.array([0.37157553889555184, 0.1221600832023858]),
'virginica&1&1': np.array([0.2463036871609408, 0.24630368716093934]),
'virginica&1&2': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&3': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&4': np.array([0.4964962439921071, 0.3798215458387346]),
'virginica&1&5': np.array([0.2463036871609408, 0.24630368716093934]),
'virginica&1&6': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&7': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&8': np.array([0.22125635302655813, 0.2925832702358638]),
'virginica&1&9': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&10': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&11': np.array([0.10063786451829529, 0.4085974066833644]),
'virginica&1&12': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&13': np.array([0.8441748651745272, -0.6057436494968107]),
'virginica&1&14': np.array([0.6453274192140858, -0.6334259878992301]),
'virginica&1&15': np.array([0.37157553889555184, 0.1221600832023858]),
'virginica&1&16': np.array([0.2463036871609408, 0.24630368716093934]),
'virginica&1&17': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&18': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&19': np.array([0.4964962439921071, 0.3798215458387346]),
'virginica&1&20': np.array([0.2463036871609408, 0.24630368716093934]),
'virginica&1&21': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&22': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&23': np.array([0.22125635302655813, 0.2925832702358638]),
'virginica&1&24': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&25': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&26': np.array([0.10063786451829529, 0.4085974066833644]),
'virginica&1&27': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&28': np.array([0.8441748651745272, -0.6057436494968107]),
'virginica&1&29': np.array([0.6453274192140858, -0.6334259878992301]),
'virginica&1&30': np.array([-0.32199975656257646, 0.7482293552463756]),
'virginica&1&31': np.array([-0.43843349141088417, 0.8642740701867917]),
'virginica&1&32': np.array([0.7141739659554729, -0.661981914015288]),
'virginica&1&33': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&34': np.array([0.2619265016777598, 0.33491141590339474]),
'virginica&1&35': np.array([-0.43843349141088417, 0.8642740701867917]),
'virginica&1&36': np.array([0.7141739659554729, -0.661981914015288]),
'virginica&1&37': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&38': np.array([-0.2562642052727569, 0.6920266972283227]),
'virginica&1&39': np.array([0.7141739659554729, -0.661981914015288]),
'virginica&1&40': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&41': np.array([-0.34479806250338163, 0.7789143553916729]),
'virginica&1&42': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&43': np.array([0.6253066100206679, -0.5612970743228719]),
'virginica&1&44': np.array([0.4159041613345079, -0.5802838287107943]),
'virginica&1&45': np.array([-0.7749499208750119, 0.8147189440804429]),
'virginica&1&46': np.array([-0.8040309195416899, 0.8445152504134819]),
'virginica&1&47': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&48': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&49': np.array([-0.4079256832347186, 0.038455640985860955]),
'virginica&1&50': np.array([-0.8040309195416899, 0.8445152504134819]),
'virginica&1&51': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&52': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&53': np.array([-0.6964303997553315, 0.7444536452136676]),
'virginica&1&54': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&55': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&56': np.array([-0.7213651642695392, 0.7718874443854203]),
'virginica&1&57': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&58': np.array([-0.5538416840542331, 0.2026191723113616]),
'virginica&1&59': np.array([-0.3472412936248763, -0.1219322389673262]),
'virginica&1&60': np.array([0.4933316375690332, 0.5272416708629276]),
'virginica&1&61': np.array([0.5041830043657418, 0.5392782673950876]),
'virginica&1&62': np.array([0.25657760110071476, -0.12592645350389117]),
'virginica&1&63': np.array([0.13717260713320115, -0.36277799079016637]),
'virginica&1&64': np.array([0.3093950298647913, 0.1140298206733954]),
'virginica&1&65': np.array([0.5041830043657418, 0.5392782673950876]),
'virginica&1&66': np.array([0.25657760110071476, -0.12592645350389117]),
'virginica&1&67': np.array([0.13717260713320115, -0.36277799079016637]),
'virginica&1&68': np.array([0.40694846236352233, 0.5109051764198169]),
'virginica&1&69': np.array([0.25657760110071476, -0.12592645350389117]),
'virginica&1&70': np.array([0.13717260713320115, -0.36277799079016637]),
'virginica&1&71': np.array([0.415695226122737, 0.5230815102377903]),
'virginica&1&72': np.array([0.13717260713320115, -0.36277799079016637]),
'virginica&1&73': np.array([0.28313251310829024, -0.10978015869508362]),
'virginica&1&74': np.array([0.20013484983664692, -0.3483612449300506]),
'virginica&1&75': np.array([0.0, 0.4756207622944677]),
'virginica&1&76': np.array([0.0, 0.4854334805210761]),
'virginica&1&77': np.array([0.0, -0.16885577975809632]),
'virginica&1&78': np.array([0.0, -0.39580588553855395]),
'virginica&1&79': np.array([0.0, 0.2538072707138344]),
'virginica&1&80': np.array([0.0, 0.4854334805210761]),
'virginica&1&81': np.array([0.0, -0.16885577975809632]),
'virginica&1&82': np.array([0.0, -0.39580588553855395]),
'virginica&1&83': np.array([0.0, 0.4904755652105692]),
'virginica&1&84': np.array([0.0, -0.16885577975809632]),
'virginica&1&85': np.array([0.0, -0.39580588553855395]),
'virginica&1&86': np.array([0.0, 0.5008471974438506]),
'virginica&1&87': np.array([0.0, -0.39580588553855395]),
'virginica&1&88': np.array([0.0, -0.14423919730424817]),
'virginica&1&89': np.array([0.0, -0.3847817540585927]),
'virginica&1&90': np.array([0.37157553889555184, 0.1221600832023858]),
'virginica&1&91': np.array([0.2463036871609408, 0.24630368716093934]),
'virginica&1&92': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&93': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&94': np.array([0.4964962439921071, 0.3798215458387346]),
'virginica&1&95': np.array([0.2463036871609408, 0.24630368716093934]),
'virginica&1&96': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&97': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&98': np.array([0.22125635302655813, 0.2925832702358638]),
'virginica&1&99': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&100': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&101': np.array([0.10063786451829529, 0.4085974066833644]),
'virginica&1&102': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&103': np.array([0.8441748651745272, -0.6057436494968107]),
'virginica&1&104': np.array([0.6453274192140858, -0.6334259878992301]),
'virginica&1&105': np.array([-0.32199975656257646, 0.7482293552463756]),
'virginica&1&106': np.array([-0.43843349141088417, 0.8642740701867917]),
'virginica&1&107': np.array([0.7141739659554729, -0.661981914015288]),
'virginica&1&108': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&109': np.array([0.2619265016777598, 0.33491141590339474]),
'virginica&1&110': np.array([-0.43843349141088417, 0.8642740701867917]),
'virginica&1&111': np.array([0.7141739659554729, -0.661981914015288]),
'virginica&1&112': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&113': np.array([-0.2562642052727569, 0.6920266972283227]),
'virginica&1&114': np.array([0.7141739659554729, -0.661981914015288]),
'virginica&1&115': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&116': np.array([-0.34479806250338163, 0.7789143553916729]),
'virginica&1&117': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&118': np.array([0.6253066100206679, -0.5612970743228719]),
'virginica&1&119': np.array([0.4159041613345079, -0.5802838287107943]),
'virginica&1&120': np.array([-0.7749499208750119, 0.8147189440804429]),
'virginica&1&121': np.array([-0.8040309195416899, 0.8445152504134819]),
'virginica&1&122': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&123': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&124': np.array([-0.4079256832347186, 0.038455640985860955]),
'virginica&1&125': np.array([-0.8040309195416899, 0.8445152504134819]),
'virginica&1&126': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&127': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&128': np.array([-0.6964303997553315, 0.7444536452136676]),
'virginica&1&129': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&130': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&131': np.array([-0.7213651642695392, 0.7718874443854203]),
'virginica&1&132': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&133': np.array([-0.5538416840542331, 0.2026191723113616]),
'virginica&1&134': np.array([-0.3472412936248763, -0.1219322389673262]),
'virginica&1&135': np.array([0.5188109114552927, 0.03638964581864269]),
'virginica&1&136': np.array([0.5131478569192371, 0.04203387599862816]),
'virginica&1&137': np.array([0.7329462736700701, -0.4610490766898857]),
'virginica&1&138': np.array([0.5965042032375719, -0.48856644624972617]),
'virginica&1&139': np.array([0.5436097000280874, 0.1461891067488832]),
'virginica&1&140': np.array([0.5131478569192371, 0.04203387599862816]),
'virginica&1&141': np.array([0.7329462736700701, -0.4610490766898857]),
'virginica&1&142': np.array([0.5965042032375719, -0.48856644624972617]),
'virginica&1&143': np.array([0.4788153032824012, 0.08625929936974323]),
'virginica&1&144': np.array([0.7329462736700701, -0.4610490766898857]),
'virginica&1&145': np.array([0.5965042032375719, -0.48856644624972617]),
'virginica&1&146': np.array([0.46583127837967303, 0.09875847161509169]),
'virginica&1&147': np.array([0.5965042032375719, -0.48856644624972617]),
'virginica&1&148': np.array([0.7419884013108898, -0.4595742931114029]),
'virginica&1&149': np.array([0.6092194175719845, -0.5086479426935605]),
'virginica&1&150': np.array([0.37157553889555184, 0.1221600832023858]),
'virginica&1&151': np.array([0.2463036871609408, 0.24630368716093934]),
'virginica&1&152': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&153': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&154': np.array([0.4964962439921071, 0.3798215458387346]),
'virginica&1&155': np.array([0.2463036871609408, 0.24630368716093934]),
'virginica&1&156': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&157': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&158': np.array([0.22125635302655813, 0.2925832702358638]),
'virginica&1&159': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&160': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&161': np.array([0.10063786451829529, 0.4085974066833644]),
'virginica&1&162': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&163': np.array([0.8441748651745272, -0.6057436494968107]),
'virginica&1&164': np.array([0.6453274192140858, -0.6334259878992301]),
'virginica&1&165': np.array([-0.32199975656257646, 0.7482293552463756]),
'virginica&1&166': np.array([-0.43843349141088417, 0.8642740701867917]),
'virginica&1&167': np.array([0.7141739659554729, -0.661981914015288]),
'virginica&1&168': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&169': np.array([0.2619265016777598, 0.33491141590339474]),
'virginica&1&170': np.array([-0.43843349141088417, 0.8642740701867917]),
'virginica&1&171': np.array([0.7141739659554729, -0.661981914015288]),
'virginica&1&172': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&173': np.array([-0.2562642052727569, 0.6920266972283227]),
'virginica&1&174': np.array([0.7141739659554729, -0.661981914015288]),
'virginica&1&175': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&176': np.array([-0.34479806250338163, 0.7789143553916729]),
'virginica&1&177': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&178': np.array([0.6253066100206679, -0.5612970743228719]),
'virginica&1&179': np.array([0.4159041613345079, -0.5802838287107943]),
'virginica&1&180': np.array([-0.7749499208750119, 0.8147189440804429]),
'virginica&1&181': np.array([-0.8040309195416899, 0.8445152504134819]),
'virginica&1&182': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&183': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&184': np.array([-0.4079256832347186, 0.038455640985860955]),
'virginica&1&185': np.array([-0.8040309195416899, 0.8445152504134819]),
'virginica&1&186': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&187': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&188': np.array([-0.6964303997553315, 0.7444536452136676]),
'virginica&1&189': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&190': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&191': np.array([-0.7213651642695392, 0.7718874443854203]),
'virginica&1&192': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&193': np.array([-0.5538416840542331, 0.2026191723113616]),
'virginica&1&194': np.array([-0.3472412936248763, -0.1219322389673262]),
'virginica&1&195': np.array([0.5188109114552927, 0.03638964581864269]),
'virginica&1&196': np.array([0.5131478569192371, 0.04203387599862816]),
'virginica&1&197': np.array([0.7329462736700701, -0.4610490766898857]),
'virginica&1&198': np.array([0.5965042032375719, -0.48856644624972617]),
'virginica&1&199': np.array([0.5436097000280874, 0.1461891067488832]),
'virginica&1&200': np.array([0.5131478569192371, 0.04203387599862816]),
'virginica&1&201': np.array([0.7329462736700701, -0.4610490766898857]),
'virginica&1&202': np.array([0.5965042032375719, -0.48856644624972617]),
'virginica&1&203': np.array([0.4788153032824012, 0.08625929936974323]),
'virginica&1&204': np.array([0.7329462736700701, -0.4610490766898857]),
'virginica&1&205': np.array([0.5965042032375719, -0.48856644624972617]),
'virginica&1&206': np.array([0.46583127837967303, 0.09875847161509169]),
'virginica&1&207': np.array([0.5965042032375719, -0.48856644624972617]),
'virginica&1&208': np.array([0.7419884013108898, -0.4595742931114029]),
'virginica&1&209': np.array([0.6092194175719845, -0.5086479426935605]),
'virginica&1&210': np.array([0.37157553889555184, 0.1221600832023858]),
'virginica&1&211': np.array([0.2463036871609408, 0.24630368716093934]),
'virginica&1&212': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&213': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&214': np.array([0.4964962439921071, 0.3798215458387346]),
'virginica&1&215': np.array([0.2463036871609408, 0.24630368716093934]),
'virginica&1&216': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&217': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&218': np.array([0.22125635302655813, 0.2925832702358638]),
'virginica&1&219': np.array([0.9105775730167809, -0.6842162738602727]),
'virginica&1&220': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&221': np.array([0.10063786451829529, 0.4085974066833644]),
'virginica&1&222': np.array([0.6718337295341265, -0.6620422637360074]),
'virginica&1&223': np.array([0.8441748651745272, -0.6057436494968107]),
'virginica&1&224': np.array([0.6453274192140858, -0.6334259878992301]),
'virginica&1&225': np.array([-0.7749499208750119, 0.8147189440804429]),
'virginica&1&226': np.array([-0.8040309195416899, 0.8445152504134819]),
'virginica&1&227': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&228': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&229': np.array([-0.4079256832347186, 0.038455640985860955]),
'virginica&1&230': np.array([-0.8040309195416899, 0.8445152504134819]),
'virginica&1&231': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&232': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&233': np.array([-0.6964303997553315, 0.7444536452136676]),
'virginica&1&234': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&235': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&236': np.array([-0.7213651642695392, 0.7718874443854203]),
'virginica&1&237': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&238': np.array([-0.5538416840542331, 0.2026191723113616]),
'virginica&1&239': np.array([-0.3472412936248763, -0.1219322389673262]),
'virginica&1&240': np.array([0.056623968925773045, 0.43360725859686644]),
'virginica&1&241': np.array([0.020169511418752378, 0.47015948158260334]),
'virginica&1&242': np.array([0.5806365328450952, -0.4726270680771261]),
'virginica&1&243': np.array([0.41462901544715686, -0.4964318942067897]),
'virginica&1&244': np.array([0.3351719071445682, 0.20616862401308342]),
'virginica&1&245': np.array([0.020169511418752378, 0.47015948158260334]),
'virginica&1&246': np.array([0.5806365328450952, -0.4726270680771261]),
'virginica&1&247': np.array([0.41462901544715686, -0.4964318942067897]),
'virginica&1&248': np.array([0.024556360933646205, 0.4723948285969902]),
'virginica&1&249': np.array([0.5806365328450952, -0.4726270680771261]),
'virginica&1&250': np.array([0.41462901544715686, -0.4964318942067897]),
'virginica&1&251': np.array([-0.0164329511444131, 0.5132208276383963]),
'virginica&1&252': np.array([0.41462901544715686, -0.4964318942067897]),
'virginica&1&253': np.array([0.581569928198426, -0.46134543884925855]),
'virginica&1&254': np.array([0.42361197252581306, -0.5068181610814407]),
'virginica&1&255': np.array([-0.32199975656257646, 0.7482293552463756]),
'virginica&1&256': np.array([-0.43843349141088417, 0.8642740701867917]),
'virginica&1&257': np.array([0.7141739659554729, -0.661981914015288]),
'virginica&1&258': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&259': np.array([0.2619265016777598, 0.33491141590339474]),
'virginica&1&260': np.array([-0.43843349141088417, 0.8642740701867917]),
'virginica&1&261': np.array([0.7141739659554729, -0.661981914015288]),
'virginica&1&262': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&263': np.array([-0.2562642052727569, 0.6920266972283227]),
'virginica&1&264': np.array([0.7141739659554729, -0.661981914015288]),
'virginica&1&265': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&266': np.array([-0.34479806250338163, 0.7789143553916729]),
'virginica&1&267': np.array([0.4446001433508151, -0.6107546840046901]),
'virginica&1&268': np.array([0.6253066100206679, -0.5612970743228719]),
'virginica&1&269': np.array([0.4159041613345079, -0.5802838287107943]),
'virginica&1&270': np.array([-0.6288817118959938, 0.6849987400957501]),
'virginica&1&271': np.array([-0.6491819158994796, 0.7060292771859485]),
'virginica&1&272': np.array([-0.36354251586275393, 0.01503732165107865]),
'virginica&1&273': np.array([-0.2224264339516076, -0.2751400010362469]),
'virginica&1&274': np.array([-0.3507937472799825, 0.22709708691079003]),
'virginica&1&275': np.array([-0.6491819158994796, 0.7060292771859485]),
'virginica&1&276': np.array([-0.36354251586275393, 0.01503732165107865]),
'virginica&1&277': np.array([-0.2224264339516076, -0.2751400010362469]),
'virginica&1&278': np.array([-0.6219129029345898, 0.6860569455333333]),
'virginica&1&279': np.array([-0.36354251586275393, 0.01503732165107865]),
'virginica&1&280': np.array([-0.2224264339516076, -0.2751400010362469]),
'virginica&1&281': np.array([-0.6423063482710314, 0.7078274136226649]),
'virginica&1&282': np.array([-0.2224264339516076, -0.2751400010362469]),
'virginica&1&283': np.array([-0.38798262782075055, 0.05152547330256509]),
'virginica&1&284': np.array([-0.23804537254556749, -0.24790919248823104]),
'virginica&1&285': np.array([-0.7749499208750119, 0.8147189440804429]),
'virginica&1&286': np.array([-0.8040309195416899, 0.8445152504134819]),
'virginica&1&287': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&288': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&289': np.array([-0.4079256832347186, 0.038455640985860955]),
'virginica&1&290': np.array([-0.8040309195416899, 0.8445152504134819]),
'virginica&1&291': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&292': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&293': np.array([-0.6964303997553315, 0.7444536452136676]),
'virginica&1&294': np.array([-0.582650696375085, 0.22335655671229132]),
'virginica&1&295': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&296': np.array([-0.7213651642695392, 0.7718874443854203]),
'virginica&1&297': np.array([-0.33108168891715994, -0.1364781674635115]),
'virginica&1&298': np.array([-0.5538416840542331, 0.2026191723113616]),
'virginica&1&299': np.array([-0.3472412936248763, -0.1219322389673262]),
'virginica&1&300': np.array([0.4933316375690332, 0.5272416708629276]),
'virginica&1&301': np.array([0.5041830043657418, 0.5392782673950876]),
'virginica&1&302': np.array([0.25657760110071476, -0.12592645350389117]),
'virginica&1&303': np.array([0.13717260713320115, -0.36277799079016637]),
'virginica&1&304': np.array([0.3093950298647913, 0.1140298206733954]),
'virginica&1&305': np.array([0.5041830043657418, 0.5392782673950876]),
'virginica&1&306': np.array([0.25657760110071476, -0.12592645350389117]),
'virginica&1&307': np.array([0.13717260713320115, -0.36277799079016637]),
'virginica&1&308': np.array([0.40694846236352233, 0.5109051764198169]),
'virginica&1&309': np.array([0.25657760110071476, -0.12592645350389117]),
'virginica&1&310': np.array([0.13717260713320115, -0.36277799079016637]),
'virginica&1&311': np.array([0.415695226122737, 0.5230815102377903]),
'virginica&1&312': np.array([0.13717260713320115, -0.36277799079016637]),
'virginica&1&313': np.array([0.28313251310829024, -0.10978015869508362]),
'virginica&1&314': np.array([0.20013484983664692, -0.3483612449300506]),
'virginica&2&0': np.array([0.37157691321004915, 0.12216227283618836]),
'virginica&2&1': np.array([0.24630541996506908, 0.24630541996506994]),
'virginica&2&2': np.array([0.04449246321056297, 0.7096449459722027]),
'virginica&2&3': np.array([0.2953784217387408, 0.6750352694420284]),
'virginica&2&4': np.array([0.4741571944522723, -0.3872697414416878]),
'virginica&2&5': np.array([0.24630541996506908, 0.24630541996506994]),
'virginica&2&6': np.array([0.04449246321056297, 0.7096449459722027]),
'virginica&2&7': np.array([0.2953784217387408, 0.6750352694420284]),
'virginica&2&8': np.array([0.6273836195848199, -0.15720981251964872]),
'virginica&2&9': np.array([0.04449246321056297, 0.7096449459722027]),
'virginica&2&10': np.array([0.2953784217387408, 0.6750352694420284]),
'virginica&2&11': np.array([0.6863652799597699, -0.21335694415409426]),
'virginica&2&12': np.array([0.2953784217387408, 0.6750352694420284]),
'virginica&2&13': np.array([0.11274898124253621, 0.6292927079496371]),
'virginica&2&14': np.array([0.32240464148521225, 0.645858545382009]),
'virginica&2&15': np.array([0.37157691321004915, 0.12216227283618836]),
'virginica&2&16': np.array([0.24630541996506908, 0.24630541996506994]),
'virginica&2&17': np.array([0.04449246321056297, 0.7096449459722027]),
'virginica&2&18': np.array([0.2953784217387408, 0.6750352694420284]),
'virginica&2&19': np.array([0.4741571944522723, -0.3872697414416878]),
'virginica&2&20': np.array([0.24630541996506908, 0.24630541996506994]),
'virginica&2&21': np.array([0.04449246321056297, 0.7096449459722027]),
'virginica&2&22': np.array([0.2953784217387408, 0.6750352694420284]),
'virginica&2&23': np.array([0.6273836195848199, -0.15720981251964872]),
'virginica&2&24': np.array([0.04449246321056297, 0.7096449459722027]),
'virginica&2&25': np.array([0.2953784217387408, 0.6750352694420284]),
'virginica&2&26': np.array([0.6863652799597699, -0.21335694415409426]),
'virginica&2&27': np.array([0.2953784217387408, 0.6750352694420284]),
'virginica&2&28': np.array([0.11274898124253621, 0.6292927079496371]),
'virginica&2&29': np.array([0.32240464148521225, 0.645858545382009]),
'virginica&2&30': np.array([0.5188517506916897, 0.036358567813067386]),
'virginica&2&31': np.array([0.5131939273945454, 0.04199748266790813]),
'virginica&2&32': np.array([0.06285591932387397, 0.6914253444924359]),
'virginica&2&33': np.array([0.34904320225465857, 0.6233384360811872]),
'virginica&2&34': np.array([0.5354807894355184, -0.3418054346754283]),
'virginica&2&35': np.array([0.5131939273945454, 0.04199748266790813]),
'virginica&2&36': np.array([0.06285591932387397, 0.6914253444924359]),
'virginica&2&37': np.array([0.34904320225465857, 0.6233384360811872]),
'virginica&2&38': np.array([0.5917672401610737, -0.061499563231173816]),
'virginica&2&39': np.array([0.06285591932387397, 0.6914253444924359]),
'virginica&2&40': np.array([0.34904320225465857, 0.6233384360811872]),
'virginica&2&41': np.array([0.5967658480721675, -0.06546963852548916]),
'virginica&2&42': np.array([0.34904320225465857, 0.6233384360811872]),
'virginica&2&43': np.array([0.15466782862660866, 0.5877736906472755]),
'virginica&2&44': np.array([0.37833006296225374, 0.5922410451071548]),
'virginica&2&45': np.array([0.8252668830593566, 0.11450866713130668]),
'virginica&2&46': np.array([0.8211795643076095, 0.11869650771610692]),
'virginica&2&47': np.array([0.644166410268985, 0.30120464260998964]),
'virginica&2&48': np.array([0.7640280271176497, 0.19364537761420375]),
'virginica&2&49': np.array([0.8735738195653328, -0.046438180466149094]),
'virginica&2&50': np.array([0.8211795643076095, 0.11869650771610692]),
'virginica&2&51': np.array([0.644166410268985, 0.30120464260998964]),
'virginica&2&52': np.array([0.7640280271176497, 0.19364537761420375]),
'virginica&2&53': np.array([0.8388485924434891, 0.09800790238640067]),
'virginica&2&54': np.array([0.644166410268985, 0.30120464260998964]),
'virginica&2&55': np.array([0.7640280271176497, 0.19364537761420375]),
'virginica&2&56': np.array([0.835455914569297, 0.10189258327760495]),
'virginica&2&57': np.array([0.7640280271176497, 0.19364537761420375]),
'virginica&2&58': np.array([0.6958244586699014, 0.2551528503043789]),
'virginica&2&59': np.array([0.7857855057542923, 0.17526869720012267]),
'virginica&2&60': np.array([-0.5227340800279543, 0.4209267574088147]),
'virginica&2&61': np.array([-0.5140708637198534, 0.4305361238057349]),
'virginica&2&62': np.array([-0.2661726847443776, 0.6902916602462779]),
'virginica&2&63': np.array([-0.2741128763380603, 0.7260889090887469]),
'virginica&2&64': np.array([-0.6188410763351541, -0.22803625884668638]),
'virginica&2&65': np.array([-0.5140708637198534, 0.4305361238057349]),
'virginica&2&66': np.array([-0.2661726847443776, 0.6902916602462779]),
'virginica&2&67': np.array([-0.2741128763380603, 0.7260889090887469]),
'virginica&2&68': np.array([-0.596973015481227, 0.37395461795328944]),
'virginica&2&69': np.array([-0.2661726847443776, 0.6902916602462779]),
'virginica&2&70': np.array([-0.2741128763380603, 0.7260889090887469]),
'virginica&2&71': np.array([-0.5903420131350324, 0.384224764046184]),
'virginica&2&72': np.array([-0.2741128763380603, 0.7260889090887469]),
'virginica&2&73': np.array([-0.3951343262323671, 0.6428414057947632]),
'virginica&2&74': np.array([-0.4001176958439725, 0.6972674869002595]),
'virginica&2&75': np.array([0.0, 0.47562425924289314]),
'virginica&2&76': np.array([0.0, 0.4854368956593117]),
'virginica&2&77': np.array([0.0, 0.7348263896003954]),
'virginica&2&78': np.array([0.0, 0.7920887571493729]),
'virginica&2&79': np.array([0.0, -0.507614207038711]),
'virginica&2&80': np.array([0.0, 0.4854368956593117]),
'virginica&2&81': np.array([0.0, 0.7348263896003954]),
'virginica&2&82': np.array([0.0, 0.7920887571493729]),
'virginica&2&83': np.array([0.0, 0.4039238345412103]),
'virginica&2&84': np.array([0.0, 0.7348263896003954]),
'virginica&2&85': np.array([0.0, 0.7920887571493729]),
'virginica&2&86': np.array([0.0, 0.41580041887839214]),
'virginica&2&87': np.array([0.0, 0.7920887571493729]),
'virginica&2&88': np.array([0.0, 0.6909317817603084]),
'virginica&2&89': np.array([0.0, 0.7700808435239105]),
'virginica&2&90': np.array([0.37157691321004915, 0.12216227283618836]),
'virginica&2&91': np.array([0.24630541996506908, 0.24630541996506994]),
'virginica&2&92': np.array([0.04449246321056297, 0.7096449459722027]),
'virginica&2&93': np.array([0.2953784217387408, 0.6750352694420284]),
'virginica&2&94': np.array([0.4741571944522723, -0.3872697414416878]),
'virginica&2&95': np.array([0.24630541996506908, 0.24630541996506994]),
'virginica&2&96': np.array([0.04449246321056297, 0.7096449459722027]),
'virginica&2&97': np.array([0.2953784217387408, 0.6750352694420284]),
'virginica&2&98': np.array([0.6273836195848199, -0.15720981251964872]),
'virginica&2&99': np.array([0.04449246321056297, 0.7096449459722027]),
'virginica&2&100': np.array([0.2953784217387408, 0.6750352694420284]),
'virginica&2&101': np.array([0.6863652799597699, -0.21335694415409426]),
'virginica&2&102': np.array([0.2953784217387408, 0.6750352694420284]),
'virginica&2&103': np.array([0.11274898124253621, 0.6292927079496371]),
'virginica&2&104': np.array([0.32240464148521225, 0.645858545382009]),
'virginica&2&105': np.array([0.5188517506916897, 0.036358567813067386]),
'virginica&2&106': np.array([0.5131939273945454, 0.04199748266790813]),
'virginica&2&107': np.array([0.06285591932387397, 0.6914253444924359]),
'virginica&2&108': np.array([0.34904320225465857, 0.6233384360811872]),
'virginica&2&109': np.array([0.5354807894355184, -0.3418054346754283]),
'virginica&2&110': np.array([0.5131939273945454, 0.04199748266790813]),
'virginica&2&111': np.array([0.06285591932387397, 0.6914253444924359]),
'virginica&2&112': np.array([0.34904320225465857, 0.6233384360811872]),
'virginica&2&113': np.array([0.5917672401610737, -0.061499563231173816]),
'virginica&2&114': np.array([0.06285591932387397, 0.6914253444924359]),
'virginica&2&115': np.array([0.34904320225465857, 0.6233384360811872]),
'virginica&2&116': np.array([0.5967658480721675, -0.06546963852548916]),
'virginica&2&117': np.array([0.34904320225465857, 0.6233384360811872]),
'virginica&2&118': np.array([0.15466782862660866, 0.5877736906472755]),
'virginica&2&119': np.array([0.37833006296225374, 0.5922410451071548]),
'virginica&2&120': np.array([0.8252668830593566, 0.11450866713130668]),
'virginica&2&121': np.array([0.8211795643076095, 0.11869650771610692]),
'virginica&2&122': np.array([0.644166410268985, 0.30120464260998964]),
'virginica&2&123': np.array([0.7640280271176497, 0.19364537761420375]),
'virginica&2&124': | np.array([0.8735738195653328, -0.046438180466149094]) | numpy.array |
import os
import numpy as np
import cv2
from typing import Union
from scipy.io import loadmat
from image import Image
from multispectral_object import MSObject
#from b_Segmentation.RoI_selection import RoI
import pandas as pd
import pickle
import argparse
def get_parser():
"""Parse input arguments"""
parser = argparse.ArgumentParser(description='Generate multi-channel single-cell objects')
parser.add_argument(
'--path', dest='path', required=True,
help='Path to folder of one sample')
return parser
def get_FID(x):
temp = x['path'].split('/')
_temp = temp[len(temp) - 1].split('.')[0].split('_')
return _temp[0] + '_' + _temp[1]
def read_data(pan: list, folder: str) -> Union[list, np.ndarray]:
fluor_images = []
for i in range(0, len(pan)):
fluor_images.append(cv2.imread(pan[i], cv2.IMREAD_UNCHANGED))
with open(folder + '/nuclei_mask.pickle', 'rb') as f:
nuclei_mask = pickle.load(f)
with open(folder + '/cell_mask.pickle', 'rb') as f:
cell_mask = pickle.load(f)
return fluor_images, cell_mask, nuclei_mask
def identify_largest_cell(mask: np.ndarray) -> list:
# calculate biggest width and height
max_y = 0
max_x = 0
max_values = []
for j in range(1, mask.max() + 1):
cell_coordinates = np.where(mask == j)
if len(cell_coordinates[0]) <= 1:
continue
else:
current_max_y = cell_coordinates[0].max() - cell_coordinates[0].min()
current_max_x = cell_coordinates[1].max() - cell_coordinates[1].min()
if current_max_y > max_y:
max_y = current_max_y
if current_max_x > max_x:
max_x = current_max_x
max_values.append(max_y)
max_values.append(max_x)
return max_values
def cut_img(mask: np.ndarray, index: int, dim: list, raw: np.ndarray = None, is_raw: bool = False) -> np.ndarray:
one_mask = mask * (mask == index)
coordinates = np.where(mask == index)
min_y = coordinates[0].min()
max_y = coordinates[0].max()
min_x = coordinates[1].min()
max_x = coordinates[1].max()
new_img = np.zeros((dim[0] + 7, dim[1] + 7), np.uint16)
mask_cut = one_mask[min_y:max_y, min_x:max_x]
lower_y = int(new_img.shape[0] / 2 - mask_cut.shape[0] / 2)
upper_y = lower_y + mask_cut.shape[0]
lower_x = int(new_img.shape[1] / 2 - mask_cut.shape[1] / 2)
upper_x = lower_x + mask_cut.shape[1]
if is_raw:
one_raw = raw * (one_mask != 0)
raw_cut = one_raw[min_y:max_y, min_x:max_x]
raw_new = np.zeros((dim[0] + 7, dim[1] + 7), np.uint16)
raw_new[lower_y:upper_y, lower_x:upper_x] = raw_cut
img = raw_new
else:
mask_new = np.zeros((dim[0] + 7, dim[1] + 7), np.uint16)
mask_new[lower_y:upper_y, lower_x:upper_x] = mask_cut
co = | np.where(mask_new != 0) | numpy.where |
# -*- coding: utf-8 -*-
"""
Coursework 4: PCA and Analogous finding
References:
https://www.esrl.noaa.gov/psd/data/gridded/data.ncep.reanalysis2.pressure.html
https://www.esrl.noaa.gov/psd/cgi-bin/db_search/DBListFiles.pl?did=59&tid=81620&vid=1498
https://www.esrl.noaa.gov/psd/cgi-bin/db_search/DBListFiles.pl?did=59&tid=81620&vid=1497
"""
import os
import datetime as dt # Python standard library datetime module
import numpy as np
import matplotlib.pyplot as plt
from scipy.io import netcdf as nc
from sklearn.decomposition import PCA
# Compute the euclidean distance between two days
def distance(a,b):
fact1 = 0.5*np.sum((a[5,:,:] - b[5,:,:]).astype('int64')**2)
fact2 = 0.5*np.sum((a[0,:,:] - b[0,:,:]).astype('int64')**2)
return np.sqrt(fact1 + fact2)
# Compute the n most analogous days to a given target day a0 from a set an
def analogues(a0,an,n):
dis = [distance(a0,a) for a in an]
ind = np.argsort(dis)[:n]
return ind
#%%
"""
Exercise 1: PCA
"""
# Load data and attributes
workpath = "C:/Users/guill/Documents/Carrera/GEOComp/PCA"
os.getcwd()
files = os.listdir(workpath)
f = nc.netcdf_file(workpath + "/hgt.2019.nc", 'r')
print(f.history)
print(f.dimensions)
print(f.variables)
time = f.variables['time'][:].copy()
time_bnds = f.variables['time_bnds'][:].copy()
time_units = f.variables['time'].units
level = f.variables['level'][:].copy()
lats = f.variables['lat'][:].copy()
lons = f.variables['lon'][:].copy()
hgt = f.variables['hgt'][:].copy()
hgt_units = f.variables['hgt'].units
hgt_scale = f.variables['hgt'].scale_factor
hgt_offset = f.variables['hgt'].add_offset
print(hgt.shape)
f.close()
"""
Example of the evolution of an air element
"""
plt.plot(time, hgt_offset + hgt[:, 1, 1, 1]*hgt_scale, c='r')
plt.show()
dt_time = [dt.date(1800, 1, 1) + dt.timedelta(hours=t)
for t in time]
np.min(dt_time)
np.max(dt_time)
"""
Spatial distribution of the geopotential altitude at level 500hPa, for the first day
"""
plt.contour(lons, lats, hgt[0,5,:,:])
plt.show()
hgt2 = hgt[:,5,:,:].reshape(len(time),len(lats)*len(lons))
# Find with PCA the 4 principal components
n_components=4
Y = hgt2.transpose()
pca = PCA(n_components=n_components)
pca.fit(Y)
print(pca.explained_variance_ratio_)
out = pca.singular_values_
Element_pca = pca.fit_transform(Y)
Element_pca = Element_pca.transpose(1,0).reshape(n_components,len(lats),len(lons))
# Plot 4 principal components spacially
fig = plt.figure()
fig.subplots_adjust(hspace=0.4, wspace=0.4)
for i in range(1, 5):
ax = fig.add_subplot(2, 2, i)
ax.text(0.5, 90, 'PCA-'+str(i),
fontsize=18, ha='center')
plt.contour(lons, lats, Element_pca[i-1,:,:])
plt.show()
#%%
"""
Exercise 2: Analogous finding
"""
f = nc.netcdf_file(workpath + "/hgt.2019.nc", 'r')
hgt_19 = f.variables['hgt'][:].copy()
f.close()
f = nc.netcdf_file(workpath + "/hgt.2020.nc", 'r')
hgt_20 = f.variables['hgt'][:].copy()
time_bnds_20 = f.variables['time_bnds'][:].copy()
f.close()
# Indexes of x in (-20,20) and y in (30,50)
lats_index = np.arange(16,25)
lons_index = np.arange(-8,9)
# Get day 2020/01/20 in desired subset
hours = (dt.date(2020,1,20) - dt.date(1800,1,1)).days*24
idx = | np.where(time_bnds_20[:,0] == hours) | numpy.where |
import sys
import os
src = os.path.join(os.pardir, 'src')
sys.path.append(src)
sys.path.append(os.path.join(src, 'neuralNetwork'))
sys.path.append(os.path.join(src, 'constrainedChasingEscapingEnv'))
sys.path.append(os.path.join(src, 'algorithms'))
sys.path.append(os.path.join(os.pardir, 'exec'))
import unittest
from ddt import ddt, data, unpack
import numpy as np
import math
from evaluateAugmentationWithinMujoco.augmentData import GenerateSymmetricData, \
GenerateSymmetricState, GenerateSymmetricDistribution, CalibrateState
from evaluateByStateDimension.preprocessData import AddFramesForTrajectory, ZeroValueInState
from analyticGeometryFunctions import transitePolarToCartesian
from evaluateByStateDimension.evaluate import ModifyEscaperInputState
from dataTools import createSymmetricVector
xBoundary = [-10, 10]
yBoundary = [-10, 10]
@ddt
class TestGenerateData(unittest.TestCase):
def setUp(self):
sheepSpeed = 2
degrees = [
math.pi / 2, 0, math.pi, -math.pi / 2, math.pi / 4,
-math.pi * 3 / 4, -math.pi / 4, math.pi * 3 / 4
]
self.sheepActionSpace = [
tuple(np.round(sheepSpeed * transitePolarToCartesian(degree)))
for degree in degrees
]
self.symmetries = [
np.array([1, 1]),
np.array([0, 1]),
np.array([1, 0]),
np.array([-1, 1])
]
self.xPosIndex = [0, 1]
self.velIndex = [2, 3]
@data((2, 4, [1, 0, 10, 10, 0, 1, 10,
10], [1, 1], [0, 1, 10, 10, 1, 0, 10, 10]))
@unpack
def testGenerateSymmetricState(self, numOfAgent, stateDim, state, symmetry,
groundTruth):
round = lambda state: np.round(state, 10)
calibrateState = CalibrateState(xBoundary, yBoundary, round)
generateSymmetricState = GenerateSymmetricState(numOfAgent, stateDim,
self.xPosIndex,
self.velIndex,
createSymmetricVector,
calibrateState)
testState = generateSymmetricState(state, np.array(symmetry))
self.assertTrue(np.allclose(testState, np.array(groundTruth)))
@data(([0.25, 0.3, 0, 0, 0.45, 0, 0,
0], [1, 1], [0.3, 0.25, 0, 0, 0.45, 0, 0, 0]))
@unpack
def testGenerateSymmetricDistribution(self, distribution, symmetry,
groundTruth):
generateSymmetricDistribution = GenerateSymmetricDistribution(
self.sheepActionSpace, createSymmetricVector)
symmetricDistribution = generateSymmetricDistribution(
distribution, np.array(symmetry))
self.assertTrue(np.allclose(symmetricDistribution, groundTruth))
# (0, 1), (1, 0), (-1, 0), (0, -1), (1, 1), (-1, -1), (1, -1), (-1, 1)
@data((
[[1, 0.5, 0, 0, 2, 0.5, 0, 0], [0.25, 0.3, 0, 0, 0.45, 0, 0, 0], [1]],
2,
4,
[
np.array([[1, 0.5, 0, 0, 2, 0.5, 0, 0],
[0.25, 0.3, 0, 0, 0.45, 0, 0, 0]]),
np.array([[0.5, 1, 0, 0, 0.5, 2, 0, 0],
[0.3, 0.25, 0, 0, 0.45, 0, 0, 0]]), # symmetry: [1,1]
np.array([[-1, 0.5, 0, 0, -2, 0.5, 0, 0],
[0.25, 0, 0.3, 0, 0, 0, 0, 0.45]]), # symmetry: [0,1]
np.array([[1, -0.5, 0, 0, 2, -0.5, 0, 0],
[0, 0.3, 0, 0.25, 0, 0, 0.45, 0]]), # symmetry: [1,0]
np.array([[-0.5, -1, 0, 0, -0.5, -2, 0, 0],
[0, 0, 0.25, 0.3, 0, 0.45, 0, 0]]),
# symmetry: [-1,1]
np.array([[-0.5, 1, 0, 0, -0.5, 2, 0, 0],
[0.3, 0, 0.25, 0, 0, 0, 0, 0.45]]), # symmetry: [0,1]
np.array([[0.5, -1, 0, 0, 0.5, -2, 0, 0],
[0, 0.25, 0, 0.3, 0, 0, 0.45, 0]]), # symmetry: [1,0]
np.array([[-1, -0.5, 0, 0, -2, -0.5, 0, 0],
[0, 0, 0.3, 0.25, 0, 0.45, 0, 0]])
] # symmetry: [-1,1]
))
@unpack
def testGenerateSymmetricData(self, originalData, numOfAgent, stateDim,
groundTruth):
round = lambda state: np.round(state, 10)
calibrateState = CalibrateState(xBoundary, yBoundary, round)
generateSymmetricState = GenerateSymmetricState(numOfAgent, stateDim,
self.xPosIndex,
self.velIndex,
createSymmetricVector,
calibrateState)
generateSymmetricDistribution = GenerateSymmetricDistribution(
self.sheepActionSpace, createSymmetricVector)
generateSymmetricData = GenerateSymmetricData(
self.symmetries, generateSymmetricState,
generateSymmetricDistribution)
symmetricDataSet = generateSymmetricData(originalData)
symmetricDict = {
tuple(np.round(data[0], 2)): list(data[1])
for data in symmetricDataSet
}
groundTruthDict = {
tuple(data[0]): list(data[1]) for data in groundTruth
}
for key in symmetricDict.keys():
self.assertTrue(
np.all(
np.array(symmetricDict[key]) == np.array(
groundTruthDict[key])))
@data((0, [[[0, 1, 0, 1], [1]], [[1, 1, 1, 1], [1]], [[2, 2, 2, 2], [1]],
[[3, 3, 3, 3], [1]]], 3, [[[0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 1],
[1]],
[[0, 0, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1],
[1]],
[[0, 1, 0, 1, 1, 1, 1, 1, 2, 2, 2, 2],
[1]],
[[1, 1, 1, 1, 2, 2, 2, 2, 3, 3, 3, 3],
[1]]]))
@unpack
def testAddFramesForTrajectory(self, stateIndex, trajectory, numOfFrame,
groundTruth):
zeroValueInState = ZeroValueInState([1, 3])
addFrameForTraj = AddFramesForTrajectory(stateIndex, zeroValueInState)
newTraj = addFrameForTraj(trajectory, numOfFrame)
for index in range(len(groundTruth)):
self.assertTrue(
np.all(
np.array(newTraj[index][stateIndex] == np.array(
groundTruth[index][stateIndex]))))
@data((3, 4, [[0, 1, 2], [0, 1, 2]], [[2, 2, 3], [2, 2, 3]]))
@unpack
def testModifyEscaperInputState(self, numOfFrame, stateDim, initState,
nextState):
removeIndex = [2]
zeroValueInState = ZeroValueInState([1, 3])
modifyInputState = ModifyEscaperInputState(removeIndex, numOfFrame,
stateDim, zeroValueInState)
firstModify = modifyInputState(initState)
self.assertTrue(
np.all(
| np.array(firstModify) | numpy.array |
# -*- coding: utf-8 -*-
import numpy as np
import matplotlib.pyplot as plt
import h5py
from nn_utils import *
class deep_nn():
def __init__(self, layer_dims, learning_rate = 0.0075, num_iterations = 3000, print_cost=False):
self.layer_dims = layer_dims
self.learning_rate = learning_rate
self.num_iterations = num_iterations
self.print_cost = print_cost
self.parameters = {}
self.caches = []
def initialize_parameters_deep(self, layer_dims):
"""
Arguments:
layer_dims -- python array (list) containing the dimensions of each layer in our network
Returns:
parameters -- python dictionary containing your parameters "W1", "b1", ..., "WL", "bL":
Wl -- weight matrix of shape (layer_dims[l], layer_dims[l-1])
bl -- bias vector of shape (layer_dims[l], 1)
"""
L = len(layer_dims) # number of layers in the network
for l in range(1, L):
self.parameters['W' + str(l)] = np.random.randn(layer_dims[l], layer_dims[l-1]) / np.sqrt(layer_dims[l-1]) #*0.01
self.parameters['b' + str(l)] = np.zeros((layer_dims[l], 1))
assert(self.parameters['W' + str(l)].shape == (layer_dims[l], layer_dims[l-1]))
assert(self.parameters['b' + str(l)].shape == (layer_dims[l], 1))
def L_model_forward(self, X, parameters):
"""
Implement forward propagation for the [LINEAR->RELU]*(L-1)->LINEAR->SIGMOID computation
Arguments:
X -- data, numpy array of shape (input size, number of examples)
parameters -- output of initialize_parameters_deep()
Returns:
AL -- last post-activation value
caches -- list of caches containing:
every cache of linear_relu_forward() (there are L-1 of them, indexed from 0 to L-2)
the cache of linear_sigmoid_forward() (there is one, indexed L-1)
"""
caches = []
A = X
L = len(parameters) // 2 # number of layers in the neural network
# Implement [LINEAR -> RELU]*(L-1). Add "cache" to the "caches" list.
for l in range(1, L):
A_prev = A
A, cache = linear_activation_forward(A_prev, parameters['W' + str(l)], parameters['b' + str(l)], activation = "relu")
caches.append(cache)
# Implement LINEAR -> SIGMOID. Add "cache" to the "caches" list.
AL, cache = linear_activation_forward(A, parameters['W' + str(L)], parameters['b' + str(L)], activation = "sigmoid")
caches.append(cache)
assert(AL.shape == (1,X.shape[1]))
return AL, caches
def compute_cost(self, AL, Y):
"""
Implement the cost function defined by equation (7).
Arguments:
AL -- probability vector corresponding to your label predictions, shape (1, number of examples)
Y -- true "label" vector (for example: containing 0 if non-cat, 1 if cat), shape (1, number of examples)
Returns:
cost -- cross-entropy cost
"""
m = Y.shape[1]
# Compute loss from aL and y.
cost = (1./m) * (-np.dot(Y,np.log(AL).T) - np.dot(1-Y, np.log(1-AL).T))
cost = np.squeeze(cost) # To make sure your cost's shape is what we expect (e.g. this turns [[17]] into 17).
assert(cost.shape == ())
return cost
def L_model_backward(self, AL, Y, caches):
"""
Implement the backward propagation for the [LINEAR->RELU] * (L-1) -> LINEAR -> SIGMOID group
Arguments:
AL -- probability vector, output of the forward propagation (L_model_forward())
Y -- true "label" vector (containing 0 if non-cat, 1 if cat)
caches -- list of caches containing:
every cache of linear_activation_forward() with "relu" (there are (L-1) or them, indexes from 0 to L-2)
the cache of linear_activation_forward() with "sigmoid" (there is one, index L-1)
Returns:
grads -- A dictionary with the gradients
grads["dA" + str(l)] = ...
grads["dW" + str(l)] = ...
grads["db" + str(l)] = ...
"""
grads = {}
L = len(caches) # the number of layers
m = AL.shape[1]
Y = Y.reshape(AL.shape) # after this line, Y is the same shape as AL
# Initializing the backpropagation
dAL = - (np.divide(Y, AL) - np.divide(1 - Y, 1 - AL))
grads["dA" + str(L)] = dAL
# Lth layer (SIGMOID -> LINEAR) gradients. Inputs: "AL, Y, caches". Outputs: "grads["dAL"], grads["dWL"], grads["dbL"]
current_cache = caches[L-1]
grads["dA" + str(L - 1)], grads["dW" + str(L)], grads["db" + str(L)] = linear_activation_backward(dAL, current_cache, activation = "sigmoid")
for l in reversed(range(L-1)):
# lth layer: (RELU -> LINEAR) gradients.
current_cache = caches[l]
dA_prev_temp, dW_temp, db_temp = linear_activation_backward(grads["dA" + str(l + 1)], current_cache, activation = "relu")
grads["dA" + str(l)] = dA_prev_temp
grads["dW" + str(l + 1)] = dW_temp
grads["db" + str(l + 1)] = db_temp
return grads
def fit(self, X, Y):#lr was 0.009
"""
Implements a L-layer neural network: [LINEAR->RELU]*(L-1)->LINEAR->SIGMOID.
Arguments:
X -- data, numpy array of shape (number of examples, num_px * num_px * 3)
Y -- true "label" vector (containing 0 if cat, 1 if non-cat), of shape (1, number of examples)
layers_dims -- list containing the input size and each layer size, of length (number of layers + 1).
learning_rate -- learning rate of the gradient descent update rule
num_iterations -- number of iterations of the optimization loop
print_cost -- if True, it prints the cost every 100 steps
Returns:
parameters -- parameters learnt by the model. They can then be used to predict.
"""
costs = [] # keep track of cost
# Parameters initialization.
### START CODE HERE ###
self.initialize_parameters_deep(self.layer_dims)
### END CODE HERE ###
# Loop (gradient descent)
for i in range(0, self.num_iterations):
# Forward propagation: [LINEAR -> RELU]*(L-1) -> LINEAR -> SIGMOID.
### START CODE HERE ### (≈ 1 line of code)
AL, caches = self.L_model_forward(X, self.parameters)
### END CODE HERE ###
# Compute cost.
### START CODE HERE ### (≈ 1 line of code)
cost = self.compute_cost(AL, Y)
### END CODE HERE ###
# Backward propagation.
### START CODE HERE ### (≈ 1 line of code)
grads = self.L_model_backward(AL, Y, caches)
### END CODE HERE ###
# Update parameters.
### START CODE HERE ### (≈ 1 line of code)
self.parameters = update_parameters(self.parameters, grads, self.learning_rate)
### END CODE HERE ###
# Print the cost every 100 training example
if self.print_cost and i % 100 == 0:
print ("Cost after iteration %i: %f" %(i, cost))
if self.print_cost and i % 100 == 0:
costs.append(cost)
'''
# plot the cost
plt.plot(np.squeeze(costs))
plt.ylabel('cost')
plt.xlabel('iterations (per tens)')
plt.title("Learning rate =" + str(self.learning_rate))
plt.show()
'''
def predict(self, X, y):
"""
This function is used to predict the results of a L-layer neural network.
Arguments:
X -- data set of examples you would like to label
parameters -- parameters of the trained model
Returns:
p -- predictions for the given dataset X
"""
m = X.shape[1]
n = len(self.parameters) // 2 # number of layers in the neural network
p = np.zeros((1,m))
# Forward propagation
probas, caches = self.L_model_forward(X, self.parameters)
# convert probas to 0/1 predictions
for i in range(0, probas.shape[1]):
if probas[0,i] > 0.5:
p[0,i] = 1
else:
p[0,i] = 0
#print results
#print ("predictions: " + str(p))
#print ("true labels: " + str(y))
print("Accuracy: " + str(np.sum((p == y)/m)))
return p
if __name__ == '__main__':
# load data
train_dataset = h5py.File('datasets/train_catvnoncat.h5', "r")
train_x_orig = np.array(train_dataset["train_set_x"][:]) # your train set features
train_y = np.array(train_dataset["train_set_y"][:]) # your train set labels
test_dataset = h5py.File('datasets/test_catvnoncat.h5', "r")
test_x_orig = | np.array(test_dataset["test_set_x"][:]) | numpy.array |
#!/usr/bin/env python
# Copyright 2014-2020 The PySCF Developers. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Author: <NAME> <<EMAIL>>
#
'''
PBC spin-restricted G0W0-AC QP eigenvalues with k-point sampling
This implementation has N^4 scaling, and is faster than GW-CD (N^4)
and analytic GW (N^6) methods.
GW-AC is recommended for valence states only, and is inaccuarate for core states.
Method:
See <NAME> and <NAME>, arxiv:2007.03148 (2020) for details
Compute Sigma on imaginary frequency with density fitting,
then analytically continued to real frequency.
Gaussian density fitting must be used (FFTDF and MDF are not supported).
'''
from functools import reduce
import time
import numpy
import numpy as np
import h5py
from scipy.optimize import newton, least_squares
from pyscf import lib
from pyscf.lib import logger
from pyscf.ao2mo import _ao2mo
from pyscf.ao2mo.incore import _conc_mos
from pyscf.pbc import df, dft, scf
from pyscf.pbc.mp.kmp2 import get_nocc, get_nmo, get_frozen_mask
from pyscf import __config__
einsum = lib.einsum
def kernel(gw, mo_energy, mo_coeff, orbs=None,
kptlist=None, nw=None, verbose=logger.NOTE):
'''GW-corrected quasiparticle orbital energies
Returns:
A list : converged, mo_energy, mo_coeff
'''
mf = gw._scf
if gw.frozen is None:
frozen = 0
else:
frozen = gw.frozen
assert (frozen == 0)
if orbs is None:
orbs = range(gw.nmo)
if kptlist is None:
kptlist = range(gw.nkpts)
nkpts = gw.nkpts
nklist = len(kptlist)
norbs = len(orbs)
# v_xc
dm = np.array(mf.make_rdm1())
v_mf = np.array(mf.get_veff()) - np.array(mf.get_j(dm_kpts=dm))
for k in range(nkpts):
v_mf[k] = reduce(numpy.dot, (mo_coeff[k].T.conj(), v_mf[k], mo_coeff[k]))
nocc = gw.nocc
nmo = gw.nmo
nvir = nmo-nocc
# v_hf from DFT/HF density
if gw.fc:
exxdiv = 'ewald'
else:
exxdiv = None
rhf = scf.KRHF(gw.mol, gw.kpts, exxdiv=exxdiv)
rhf.with_df = gw.with_df
if getattr(gw.with_df, '_cderi', None) is None:
raise RuntimeError('Found incompatible integral scheme %s.'
'KGWAC can be only used with GDF integrals' %
gw.with_df.__class__)
vk = rhf.get_veff(gw.mol,dm_kpts=dm) - rhf.get_j(gw.mol,dm_kpts=dm)
for k in range(nkpts):
vk[k] = reduce(numpy.dot, (mo_coeff[k].T.conj(), vk[k], mo_coeff[k]))
# Grids for integration on imaginary axis
freqs,wts = _get_scaled_legendre_roots(nw)
# Compute self-energy on imaginary axis i*[0,iw_cutoff]
sigmaI, omega = get_sigma_diag(gw, orbs, kptlist, freqs, wts, iw_cutoff=5.)
# Analytic continuation
coeff = []
if gw.ac == 'twopole':
for k in range(nklist):
coeff.append(AC_twopole_diag(sigmaI[k], omega, orbs, nocc))
elif gw.ac == 'pade':
for k in range(nklist):
coeff_tmp, omega_fit = AC_pade_thiele_diag(sigmaI[k], omega)
coeff.append(coeff_tmp)
coeff = np.array(coeff)
conv = True
# This code does not support metals
homo = -99.; lumo = 99.
for k in range(nkpts):
if homo < mf.mo_energy[k][nocc-1]:
homo = mf.mo_energy[k][nocc-1]
if lumo > mf.mo_energy[k][nocc]:
lumo = mf.mo_energy[k][nocc]
ef = (homo+lumo)/2.
mo_energy = np.zeros_like(np.array(mf.mo_energy))
for k in range(nklist):
kn = kptlist[k]
for p in orbs:
if gw.linearized:
# linearized G0W0
de = 1e-6
ep = mf.mo_energy[kn][p]
#TODO: analytic sigma derivative
if gw.ac == 'twopole':
sigmaR = two_pole(ep-ef, coeff[k,:,p-orbs[0]]).real
dsigma = two_pole(ep-ef+de, coeff[k,:,p-orbs[0]]).real - sigmaR.real
elif gw.ac == 'pade':
sigmaR = pade_thiele(ep-ef, omega_fit[p-orbs[0]], coeff[k,:,p-orbs[0]]).real
dsigma = pade_thiele(ep-ef+de, omega_fit[p-orbs[0]], coeff[k,:,p-orbs[0]]).real - sigmaR.real
zn = 1.0/(1.0-dsigma/de)
e = ep + zn*(sigmaR.real + vk[kn,p,p].real - v_mf[kn,p,p].real)
mo_energy[kn,p] = e
else:
# self-consistently solve QP equation
def quasiparticle(omega):
if gw.ac == 'twopole':
sigmaR = two_pole(omega-ef, coeff[k,:,p-orbs[0]]).real
elif gw.ac == 'pade':
sigmaR = pade_thiele(omega-ef, omega_fit[p-orbs[0]], coeff[k,:,p-orbs[0]]).real
return omega - mf.mo_energy[kn][p] - (sigmaR.real + vk[kn,p,p].real - v_mf[kn,p,p].real)
try:
e = newton(quasiparticle, mf.mo_energy[kn][p], tol=1e-6, maxiter=100)
mo_energy[kn,p] = e
except RuntimeError:
conv = False
mo_coeff = mf.mo_coeff
if gw.verbose >= logger.DEBUG:
numpy.set_printoptions(threshold=nmo)
for k in range(nkpts):
logger.debug(gw, ' GW mo_energy @ k%d =\n%s', k,mo_energy[k])
numpy.set_printoptions(threshold=1000)
return conv, mo_energy, mo_coeff
def get_rho_response(gw, omega, mo_energy, Lpq, kL, kidx):
'''
Compute density response function in auxiliary basis at freq iw
'''
nkpts, naux, nmo, nmo = Lpq.shape
nocc = gw.nocc
kpts = gw.kpts
kscaled = gw.mol.get_scaled_kpts(kpts)
kscaled -= kscaled[0]
# Compute Pi for kL
Pi = np.zeros((naux,naux),dtype=np.complex128)
for i, kpti in enumerate(kpts):
# Find ka that conserves with ki and kL (-ki+ka+kL=G)
a = kidx[i]
eia = mo_energy[i,:nocc,None] - mo_energy[a,None,nocc:]
eia = eia/(omega**2+eia*eia)
Pia = einsum('Pia,ia->Pia',Lpq[i][:,:nocc,nocc:],eia)
# Response from both spin-up and spin-down density
Pi += 4./nkpts * einsum('Pia,Qia->PQ',Pia,Lpq[i][:,:nocc,nocc:].conj())
return Pi
def get_sigma_diag(gw, orbs, kptlist, freqs, wts, iw_cutoff=None, max_memory=8000):
'''
Compute GW correlation self-energy (diagonal elements)
in MO basis on imaginary axis
'''
mo_energy = np.array(gw._scf.mo_energy)
mo_coeff = np.array(gw._scf.mo_coeff)
nocc = gw.nocc
nmo = gw.nmo
nkpts = gw.nkpts
kpts = gw.kpts
nklist = len(kptlist)
nw = len(freqs)
norbs = len(orbs)
mydf = gw.with_df
# possible kpts shift center
kscaled = gw.mol.get_scaled_kpts(kpts)
kscaled -= kscaled[0]
# This code does not support metals
homo = -99.; lumo = 99.
for k in range(nkpts):
if homo < mo_energy[k][nocc-1]:
homo = mo_energy[k][nocc-1]
if lumo > mo_energy[k][nocc]:
lumo = mo_energy[k][nocc]
if (lumo-homo)<1e-3:
logger.warn(gw, 'This GW-AC code is not supporting metals!')
ef = (homo+lumo)/2.
# Integration on numerical grids
if iw_cutoff is not None:
nw_sigma = sum(iw < iw_cutoff for iw in freqs) + 1
else:
nw_sigma = nw + 1
# Compute occ for -iw and vir for iw separately
# to avoid branch cuts in analytic continuation
omega_occ = np.zeros((nw_sigma),dtype=np.complex128)
omega_vir = np.zeros((nw_sigma),dtype=np.complex128)
omega_occ[0] = 1j*0.; omega_occ[1:] = -1j*freqs[:(nw_sigma-1)]
omega_vir[0] = 1j*0.; omega_vir[1:] = 1j*freqs[:(nw_sigma-1)]
orbs_occ = [i for i in orbs if i < nocc]
norbs_occ = len(orbs_occ)
emo_occ = np.zeros((nkpts,nmo,nw_sigma),dtype=np.complex128)
emo_vir = np.zeros((nkpts,nmo,nw_sigma),dtype=np.complex128)
for k in range(nkpts):
emo_occ[k] = omega_occ[None,:] + ef - mo_energy[k][:,None]
emo_vir[k] = omega_vir[None,:] + ef - mo_energy[k][:,None]
sigma = np.zeros((nklist,norbs,nw_sigma),dtype=np.complex128)
omega = np.zeros((norbs,nw_sigma),dtype=np.complex128)
for p in range(norbs):
orbp = orbs[p]
if orbp < nocc:
omega[p] = omega_occ.copy()
else:
omega[p] = omega_vir.copy()
if gw.fc:
# Set up q mesh for q->0 finite size correction
q_pts = np.array([1e-3,0,0]).reshape(1,3)
nq_pts = len(q_pts)
q_abs = gw.mol.get_abs_kpts(q_pts)
# Get qij = 1/sqrt(Omega) * < psi_{ik} | e^{iqr} | psi_{ak-q} > at q: (nkpts, nocc, nvir)
qij = get_qij(gw, q_abs[0], mo_coeff)
for kL in range(nkpts):
# Lij: (ki, L, i, j) for looping every kL
Lij = []
# kidx: save kj that conserves with kL and ki (-ki+kj+kL=G)
# kidx_r: save ki that conserves with kL and kj (-ki+kj+kL=G)
kidx = np.zeros((nkpts),dtype=np.int64)
kidx_r = np.zeros((nkpts),dtype=np.int64)
for i, kpti in enumerate(kpts):
for j, kptj in enumerate(kpts):
# Find (ki,kj) that satisfies momentum conservation with kL
kconserv = -kscaled[i] + kscaled[j] + kscaled[kL]
is_kconserv = np.linalg.norm(np.round(kconserv) - kconserv) < 1e-12
if is_kconserv:
kidx[i] = j
kidx_r[j] = i
logger.debug(gw, "Read Lpq (kL: %s / %s, ki: %s, kj: %s)"%(kL+1, nkpts, i, j))
Lij_out = None
# Read (L|pq) and ao2mo transform to (L|ij)
Lpq = []
for LpqR, LpqI, sign in mydf.sr_loop([kpti, kptj], max_memory=0.1*gw._scf.max_memory, compact=False):
Lpq.append(LpqR+LpqI*1.0j)
# support uneqaul naux on different k points
Lpq = np.vstack(Lpq).reshape(-1,nmo**2)
tao = []
ao_loc = None
moij, ijslice = _conc_mos(mo_coeff[i], mo_coeff[j])[2:]
Lij_out = _ao2mo.r_e2(Lpq, moij, ijslice, tao, ao_loc, out=Lij_out)
Lij.append(Lij_out.reshape(-1,nmo,nmo))
Lij = np.asarray(Lij)
naux = Lij.shape[1]
if kL == 0:
for w in range(nw):
# body dielectric matrix eps_body
Pi = get_rho_response(gw, freqs[w], mo_energy, Lij, kL, kidx)
eps_body_inv = np.linalg.inv(np.eye(naux)-Pi)
if gw.fc:
# head dielectric matrix eps_00
Pi_00 = get_rho_response_head(gw, freqs[w], mo_energy, qij)
eps_00 = 1. - 4. * np.pi/np.linalg.norm(q_abs[0])**2 * Pi_00
# wings dielectric matrix eps_P0
Pi_P0 = get_rho_response_wing(gw, freqs[w], mo_energy, Lij, qij)
eps_P0 = -np.sqrt(4.*np.pi) / np.linalg.norm(q_abs[0]) * Pi_P0
# inverse dielectric matrix
eps_inv_00 = 1./(eps_00 - np.dot(np.dot(eps_P0.conj(),eps_body_inv),eps_P0))
eps_inv_P0 = -eps_inv_00 * np.dot(eps_body_inv, eps_P0)
# head correction
Del_00 = 2./np.pi * (6.*np.pi**2/gw.mol.vol/nkpts)**(1./3.) * (eps_inv_00 - 1.)
eps_inv_PQ = eps_body_inv
g0_occ = wts[w] * emo_occ / (emo_occ**2+freqs[w]**2)
g0_vir = wts[w] * emo_vir / (emo_vir**2+freqs[w]**2)
for k in range(nklist):
kn = kptlist[k]
# Find km that conserves with kn and kL (-km+kn+kL=G)
km = kidx_r[kn]
Qmn = einsum('Pmn,PQ->Qmn',Lij[km][:,:,orbs].conj(),eps_inv_PQ-np.eye(naux))
Wmn = 1./nkpts * einsum('Qmn,Qmn->mn',Qmn,Lij[km][:,:,orbs])
sigma[k][:norbs_occ] += -einsum('mn,mw->nw',Wmn[:,:norbs_occ],g0_occ[km])/np.pi
sigma[k][norbs_occ:] += -einsum('mn,mw->nw',Wmn[:,norbs_occ:],g0_vir[km])/np.pi
if gw.fc:
# apply head correction
assert(kn == km)
sigma[k][:norbs_occ] += -Del_00 * g0_occ[kn][orbs][:norbs_occ] /np.pi
sigma[k][norbs_occ:] += -Del_00 * g0_vir[kn][orbs][norbs_occ:] /np.pi
# apply wing correction
Wn_P0 = einsum('Pnm,P->nm',Lij[kn],eps_inv_P0).diagonal()
Wn_P0 = Wn_P0.real * 2.
Del_P0 = np.sqrt(gw.mol.vol/4./np.pi**3) * (6.*np.pi**2/gw.mol.vol/nkpts)**(2./3.) * Wn_P0[orbs]
sigma[k][:norbs_occ] += -einsum('n,nw->nw',Del_P0[:norbs_occ],g0_occ[kn][orbs][:norbs_occ]) /np.pi
sigma[k][norbs_occ:] += -einsum('n,nw->nw',Del_P0[norbs_occ:],g0_vir[kn][orbs][norbs_occ:]) /np.pi
else:
for w in range(nw):
Pi = get_rho_response(gw, freqs[w], mo_energy, Lij, kL, kidx)
Pi_inv = np.linalg.inv(np.eye(naux)-Pi)-np.eye(naux)
g0_occ = wts[w] * emo_occ / (emo_occ**2+freqs[w]**2)
g0_vir = wts[w] * emo_vir / (emo_vir**2+freqs[w]**2)
for k in range(nklist):
kn = kptlist[k]
# Find km that conserves with kn and kL (-km+kn+kL=G)
km = kidx_r[kn]
Qmn = einsum('Pmn,PQ->Qmn',Lij[km][:,:,orbs].conj(),Pi_inv)
Wmn = 1./nkpts * einsum('Qmn,Qmn->mn',Qmn,Lij[km][:,:,orbs])
sigma[k][:norbs_occ] += -einsum('mn,mw->nw',Wmn[:,:norbs_occ],g0_occ[km])/np.pi
sigma[k][norbs_occ:] += -einsum('mn,mw->nw',Wmn[:,norbs_occ:],g0_vir[km])/np.pi
return sigma, omega
def get_rho_response_head(gw, omega, mo_energy, qij):
'''
Compute head (G=0, G'=0) density response function in auxiliary basis at freq iw
'''
nkpts, nocc, nvir = qij.shape
nocc = gw.nocc
kpts = gw.kpts
# Compute Pi head
Pi_00 = 0j
for i, kpti in enumerate(kpts):
eia = mo_energy[i,:nocc,None] - mo_energy[i,None,nocc:]
eia = eia/(omega**2+eia*eia)
Pi_00 += 4./nkpts * einsum('ia,ia->',eia,qij[i].conj()*qij[i])
return Pi_00
def get_rho_response_wing(gw, omega, mo_energy, Lpq, qij):
'''
Compute wing (G=P, G'=0) density response function in auxiliary basis at freq iw
'''
nkpts, naux, nmo, nmo = Lpq.shape
nocc = gw.nocc
kpts = gw.kpts
# Compute Pi wing
Pi = | np.zeros(naux,dtype=np.complex128) | numpy.zeros |
import os.path
import os.path as osp
import sys
sys.path.append(osp.dirname(osp.dirname(osp.abspath(__file__))))
import numpy as np
from scipy import stats
from sklearn.metrics import average_precision_score
from sklearn.metrics import roc_auc_score
from . import anom_utils
def eval_ood_measure(conf, pred, seg_label, mask=None):
correct_map = pred == seg_label
out_label = np.logical_not(correct_map)
in_scores = - conf[np.logical_not(out_label)]
out_scores = - conf[out_label]
if (len(out_scores) != 0) and (len(in_scores) != 0):
auroc, aupr, fpr = anom_utils.get_and_print_results(out_scores, in_scores)
return auroc, aupr, fpr
else:
print("This image does not contain any OOD pixels or is only OOD.")
return None
def eval_alarm_metrics(pred_ious, real_ious):
mae = np.nanmean(np.abs(np.array(pred_ious) - np.array(real_ious)), axis=0)
std = np.nanstd(np.abs( | np.array(pred_ious) | numpy.array |
import numpy as np
import pygame
import math
from pygame import mixer
import random
ROW_COUNT = 8
COLUMN_COUNT = 8
SQUARESIZE = 80
DARK_GREEN = (123, 164, 40)
LIGHT_GREEN = (139, 185, 45)
WHITE = (255, 255, 255)
BLACK = (0, 0, 0)
white_disc = pygame.image.load("white.png")
black_disc = pygame.image.load("black.png")
icon = pygame.image.load("icon.png")
pygame.init()
screen = pygame.display.set_mode((658, 806))
pygame.display.set_caption("othello")
pygame.display.set_icon(icon)
def indexToPosition(i, j):
pos = [0, 0]
pos[0] = SQUARESIZE * j + (2 * (j + 1))
pos[1] = 150 + (SQUARESIZE * i) + (2 * i)
return pos
def positionToIndex(x, y):
index = [0, 0]
index[0] = int(math.floor((x - 2) / (SQUARESIZE + 2)))
index[1] = math.floor((y - 150) / (SQUARESIZE + 2))
return index
def create_board():
# 1 stands for black
# 2 stands for white
board = | np.zeros((ROW_COUNT, COLUMN_COUNT)) | numpy.zeros |
"""
Plot figure 2 in manuscript for dynamical responses to sea ice loss in WACCM4
experiments [FIT-HIT, FIC-CIT, FICT-HIT]. Current variables include T2M and
RNET. Time period includes December through February [DJF].
Notes
-----
Author : <NAME>
Date : 4 February 2018
"""
### Import modules
import numpy as np
import datetime
import read_MonthlyOutput as MO
import calc_Utilities as UT
import matplotlib.pyplot as plt
### Define directories
directorydata = '/surtsey/zlabe/simu/'
directorydata2 = '/home/zlabe/Documents/Research/SITperturb/Data/'
directoryfigure = '/home/zlabe/Desktop/'
#directoryfigure = '/home/zlabe/Documents/Research/SITperturb/Figures/'
### Define time
now = datetime.datetime.now()
currentmn = str(now.month)
currentdy = str(now.day)
currentyr = str(now.year)
currenttime = currentmn + '_' + currentdy + '_' + currentyr
titletime = currentmn + '/' + currentdy + '/' + currentyr
print('\n' '----Plotting Fig 2 - %s----' % titletime)
### Alott time series
year1 = 1900
year2 = 2000
years = np.arange(year1,year2+1,1)
### Define constants
runnames = [r'HIT',r'FIT',r'HIT2',r'FICT2',r'FICT']
experiments = ['FIT--HIT','FICT2--HIT2','FICT--HIT']
### Read in SIC data
lat,lon,time,lev,sic = MO.readExperi(directorydata,'SIC','HIT','surface')
### Find where ice is < 15% (values are 0 to 100 in sic array)
sicq = sic[5,:,:,:].copy()
sicq[np.where(sicq < 10)] = 0.0
sicq[ | np.where((sicq >= 10) & (sicq <= 100)) | numpy.where |
from __future__ import division
from collections import defaultdict
import math
import numpy as np
import pandas as pd
from scipy import optimize
from I3Tray import NaN, Inf
from icecube import icetray, dataio, dataclasses, toprec, phys_services, recclasses
from icecube.icetop_Level3_scripts.functions import count_stations
from icecube.icetop_Level3_scripts.segments import level2_IceTop, IceTopQualityCuts
from icecube.icetop_Level3_scripts.modules import SnowCorrectPulses
from icecube.icetop_Level3_scripts import icetop_globals
from comptools.LDFfunctions import fit_DLP_params
def add_opening_angle(frame, particle1='MCPrimary', particle2='Laputop', key='angle'):
angle = phys_services.I3Calculator.angle(frame[particle1], frame[particle2])
frame[key] = dataclasses.I3Double(angle)
def add_IceTop_quality_cuts(frame):
passed = all(frame['IT73AnalysisIceTopQualityCuts'].values())
frame['passed_IceTopQualityCuts'] = icetray.I3Bool(passed)
def add_InIce_quality_cuts(frame):
passed = all(frame['IT73AnalysisInIceQualityCuts'].values())
frame['passed_InIceQualityCuts'] = icetray.I3Bool(passed)
# Add individual InIce quality cuts to frame
for key, value in frame['IT73AnalysisInIceQualityCuts']:
frame['passed_{}'.format(key)] = icetray.I3Bool(value)
def add_nstations(frame, pulses='SRTCoincPulses'):
nstation = count_stations(dataclasses.I3RecoPulseSeriesMap.from_frame(frame, pulses))
frame['NStations'] = icetray.I3Int(nstation)
def lap_fitstatus_ok(frame):
status_ok = frame['Laputop'].fit_status == dataclasses.I3Particle.OK
frame['lap_fitstatus_ok'] = icetray.I3Bool(status_ok)
def add_fraction_containment(frame, track):
scaling = phys_services.I3ScaleCalculator(frame['I3Geometry'])
icetop_containment = scaling.scale_icetop(frame[track])
frame['FractionContainment_{}_IceTop'.format(track)] = dataclasses.I3Double(icetop_containment)
inice_containment = scaling.scale_inice(frame[track])
frame['FractionContainment_{}_InIce'.format(track)] = dataclasses.I3Double(inice_containment)
class AddFractionContainment(icetray.I3ConditionalModule):
def __init__(self, context):
icetray.I3ConditionalModule.__init__(self, context)
self.AddParameter('track', 'Track to be used in fraction containment', 'Laputop')
self.AddOutBox('OutBox')
def Configure(self):
self.track = self.GetParameter('track')
pass
def Geometry(self, frame):
# print('Working on Geometry frame')
self.geometry = frame['I3Geometry']
self.scaling = phys_services.I3ScaleCalculator(self.geometry)
self.PushFrame(frame)
def Physics(self, frame):
# print('Working on Physics frame')
# print('track = {}'.format(self.track))
# print('keys = {}'.format(frame.keys()))
icetop_containment = self.scaling.scale_icetop(frame[self.track])
frame['FractionContainment_{}_IceTop'.format(self.track)] = dataclasses.I3Double(icetop_containment)
inice_containment = self.scaling.scale_inice(frame[self.track])
frame['FractionContainment_{}_InIce'.format(self.track)] = dataclasses.I3Double(inice_containment)
self.PushFrame(frame)
def Finish(self):
return
def add_IceTop_tankXYcharge(frame, pulses):
frame['I3RecoPulseSeriesMap_union'] = dataclasses.I3RecoPulseSeriesMapUnion(frame, pulses)
pulse_map = dataclasses.I3RecoPulseSeriesMap.from_frame(frame, 'I3RecoPulseSeriesMap_union')
geomap = frame['I3Geometry'].omgeo
tanks_x, tanks_y, tanks_charge = [], [], []
for omkey, pulses in pulse_map:
x, y, z = geomap[omkey].position
tanks_x.append(x)
tanks_y.append(y)
charge = sum([pulse.charge for pulse in pulses])
tanks_charge.append(charge)
if tanks_x and tanks_y and tanks_charge:
frame['tanks_x'] = dataclasses.I3VectorDouble(tanks_x)
frame['tanks_y'] = dataclasses.I3VectorDouble(tanks_y)
frame['tanks_charge'] = dataclasses.I3VectorDouble(tanks_charge)
del frame['I3RecoPulseSeriesMap_union']
class AddIceTopTankXYCharge(icetray.I3ConditionalModule):
def __init__(self, context):
icetray.I3ConditionalModule.__init__(self, context)
pulses = ['IceTopHLCSeedRTPulses', 'IceTopLaputopSeededSelectedSLC']
self.AddParameter('pulses',
'Pulses to caluclate distances to from track',
pulses,
)
# bins = np.linspace(-1000, 1000, 25, dtype=float)
self.AddOutBox('OutBox')
def Configure(self):
self.pulses = self.GetParameter('pulses')
pass
def Geometry(self, frame):
self.geometry = frame['I3Geometry']
self.geomap = self.geometry.omgeo
self.PushFrame(frame)
def Physics(self, frame):
pulses_key = 'I3RecoPulseSeriesMap_union'
frame[pulses_key] = dataclasses.I3RecoPulseSeriesMapUnion(frame, self.pulses)
pulse_map = dataclasses.I3RecoPulseSeriesMap.from_frame(frame, pulses_key)
tank_x, tank_y, tank_charge = [], [], []
for omkey, pulses in pulse_map:
x, y, z = self.geomap[omkey].position
tank_x.append(x)
tank_y.append(y)
# Check for nan charges
charge = 0
for pulse in pulses:
if pulse.charge != NaN:
charge += pulse.charge
else:
print('pulse.charge is NaN!!')
# charge = sum([pulse.charge for pulse in pulses])
tank_charge.append(charge)
if tank_x and tank_y and tank_charge:
tank_charge = np.nan_to_num(tank_charge)
frame['tank_x'] = dataclasses.I3VectorDouble(tank_x)
frame['tank_y'] = dataclasses.I3VectorDouble(tank_y)
frame['tank_charge'] = dataclasses.I3VectorDouble(tank_charge)
# hist, _, _ = np.histogram2d(tanks_x, tanks_y,
# bins=[self.xbins, self.ybins],
# weights=tanks_charge)
# self.hists.append(hist)
# event_header = frame['I3EventHeader']
# event_id = '{}_{}_{}_{}'.format(self.sim,
# event_header.run_id,
# event_header.event_id,
# event_header.sub_event_id)
# self.event_ids.append(event_id)
del frame[pulses_key]
self.PushFrame(frame)
def Finish(self):
# with h5py.File(self.outfile, 'w') as f:
# charge_dist_dataset = f.create_dataset('charge_dist',
# data=np.asarray(self.hists))
# event_id_dataset = f.create_dataset('event_id',
# data=np.asarray(self.event_ids))
return
class AddInIceMuonRadius(icetray.I3ConditionalModule):
def __init__(self, context):
icetray.I3ConditionalModule.__init__(self, context)
self.AddParameter('track', 'Track to calculate distances from', 'Laputop')
self.AddParameter('pulses', 'Pulses to caluclate distances to from track', 'CoincMuonReco_LineFit')
self.AddParameter('min_DOM', 'Minimum DOM number to be considered', 1)
self.AddParameter('max_DOM', 'Maximum DOM number to be considered', 60)
self.AddOutBox('OutBox')
def Configure(self):
self.track = self.GetParameter('track')
self.pulses = self.GetParameter('pulses')
self.min_DOM = self.GetParameter('min_DOM')
self.max_DOM = self.GetParameter('max_DOM')
self.get_dist = phys_services.I3Calculator.closest_approach_distance
pass
def Geometry(self, frame):
self.geometry = frame['I3Geometry']
self.geomap = self.geometry.omgeo
self.PushFrame(frame)
def Physics(self, frame):
track = frame[self.track]
pulse_map = dataclasses.I3RecoPulseSeriesMap.from_frame(frame, self.pulses)
dists, charges = [], []
for omkey, pulses in pulse_map:
# Throw out Deep Core strings (want homogenized total charge)
if (omkey.string < 79) and (omkey.om >= self.min_DOM) and (omkey.om <= self.max_DOM):
# Get distance of clostest approach to DOM from track
dist = self.get_dist(track, self.geomap[omkey].position)
dists.append(dist)
# Get charge recorded in DOM
charge = np.sum([pulse.charge for pulse in pulses])
charges.append(charge)
# Ensure that both dists and charges have non-zero size
if dists and charges:
frame['inice_dom_dists_{}_{}'.format(self.min_DOM, self.max_DOM)] = dataclasses.I3VectorDouble(dists)
frame['inice_dom_charges_{}_{}'.format(self.min_DOM, self.max_DOM)] = dataclasses.I3VectorDouble(charges)
dists = np.asarray(dists)
charges = np.asarray(charges)
avg_dist = np.average(dists)
median_dist = np.median(dists)
std_dists = np.std(dists)
one_std_mask = (dists > avg_dist + std_dists) | (dists < avg_dist - std_dists)
half_std_mask = (dists > avg_dist + 2*std_dists) | (dists < avg_dist - 2*std_dists)
frac_outside_one_std = dists[one_std_mask].shape[0]/dists.shape[0]
frac_outside_two_std = dists[half_std_mask].shape[0]/dists.shape[0]
# Add variables to frame
frame['avg_inice_radius'] = dataclasses.I3Double(avg_dist)
frame['median_inice_radius'] = dataclasses.I3Double(median_dist)
frame['std_inice_radius'] = dataclasses.I3Double(std_dists)
frame['frac_outside_one_std_inice_radius'] = dataclasses.I3Double(frac_outside_one_std)
frame['frac_outside_two_std_inice_radius'] = dataclasses.I3Double(frac_outside_two_std)
# frame['qweighted_inice_radius_{}_{}'.format(self.min_DOM, self.max_DOM)] = dataclasses.I3Double(np.average(dists, weights=charges))
#
# frame['invqweighted_inice_radius_{}_{}'.format(self.min_DOM, self.max_DOM)] = dataclasses.I3Double(np.average(dists, weights=1/charges))
self.PushFrame(frame)
def Finish(self):
return
class AddIceTopNNCharges(icetray.I3ConditionalModule):
def __init__(self, context):
icetray.I3ConditionalModule.__init__(self, context)
self.AddParameter('pulses',
'Pulses to caluclate distances to from track',
'SRTCoincPulses')
self.AddOutBox('OutBox')
def Configure(self):
self.pulses = self.GetParameter('pulses')
self.omkey_to_position = {}
pass
def Geometry(self, frame):
self.geometry = frame['I3Geometry']
self.geomap = self.geometry.omgeo
self.PushFrame(frame)
def Physics(self, frame):
union_key = 'I3RecoPulseSeriesMap_union'
frame[union_key] = dataclasses.I3RecoPulseSeriesMapUnion(frame,
self.pulses)
pulse_map = dataclasses.I3RecoPulseSeriesMap.from_frame(frame,
union_key)
# tanks_x, tanks_y, tanks_charge = [], [], []
tank_charges = defaultdict(list)
# tank_x = defaultdict(list)
# tank_y = defaultdict(list)
for omkey, omgeo in self.geomap:
# Only interested in saving IceTop OM charges
if omgeo.omtype.name != 'IceTop':
continue
# x, y, z = omgeo.position
# tank_x[omkey].append(x)
# tank_y[omkey].append(y)
try:
pulses = pulse_map[omkey]
charge = sum([pulse.charge for pulse in pulses])
except KeyError:
charge = 0
tank_charges[omkey].append(charge)
# if tanks_x and tanks_y and tanks_charge:
# frame['tanks_x'] = dataclasses.I3VectorDouble(tanks_x)
# frame['tanks_y'] = dataclasses.I3VectorDouble(tanks_y)
# frame['tanks_charge'] = dataclasses.I3VectorDouble(tanks_charge)
# self.tank_charges.append(pd.DataFrame(tank_charges))
del frame[union_key]
frame['NNcharges'] = dataclasses.I3MapKeyVectorDouble(tank_charges)
print(frame['NNcharges'])
# frame['tank_x'] = dataclasses.I3MapKeyVectorDouble(tank_x)
# frame['tank_y'] = dataclasses.I3MapKeyVectorDouble(tank_y)
self.PushFrame(frame)
def Finish(self):
# df_charges = pd.DataFrame(self.tank_charges)
# columns = {c:'{}_{}_{}'.format(c.string, c.om, c.pmt) for c in df_charges.columns}
# df_charges.rename(index=str, columns=columns, inplace=True)
# with pd.HDFStore('test_charges_1.hdf') as output_store:
# output_store['dataframe'] = df_charges
return
class AddIceTopChargeDistance(icetray.I3ConditionalModule):
def __init__(self, context):
icetray.I3ConditionalModule.__init__(self, context)
self.AddParameter('track', 'Track to calculate distances from', 'Laputop')
self.AddParameter('pulses', 'Pulses to caluclate distances to from track', 'SRTCoincPulses')
self.AddParameter('min_dist', 'Minimum distance to include', 0)
self.AddOutBox('OutBox')
def Configure(self):
self.track = self.GetParameter('track')
self.pulses = self.GetParameter('pulses')
self.min_dist = self.GetParameter('min_dist')
self.get_dist = phys_services.I3Calculator.closest_approach_distance
pass
def Geometry(self, frame):
self.geometry = frame['I3Geometry']
self.geomap = self.geometry.omgeo
self.PushFrame(frame)
def Physics(self, frame):
track = frame[self.track]
x_track, y_track, z_track = track.pos
frame['I3RecoPulseSeriesMap_union'] = dataclasses.I3RecoPulseSeriesMapUnion(frame, self.pulses)
pulse_map = dataclasses.I3RecoPulseSeriesMap.from_frame(frame,
'I3RecoPulseSeriesMap_union')
charges = []
dists = []
for omkey, omgeo in self.geomap:
# Only interested in saving IceTop OM charges
if omgeo.omtype.name != 'IceTop':
continue
try:
pulses = pulse_map[omkey]
charge = sum([pulse.charge for pulse in pulses])
x, y, z = omgeo.position
dist = np.sqrt((x - x_track)**2 + (y - y_track)**2)
except KeyError:
continue
charges.append(charge)
dists.append(dist)
# tanks_dist, tanks_charge = [], []
# for omkey, pulses in pulse_map:
# # Get distance of clostest approach to DOM from track
# dist = self.get_dist(track, self.geomap[omkey].position)
# tanks_dist.append(dist)
# # Get charge recorded in DOM
# charge = sum([pulse.charge for pulse in pulses])
# tanks_charge.append(charge)
if dists and charges:
# frame.Put('tanks_charge_{}'.format(self.track), dataclasses.I3VectorDouble(tanks_charge))
# frame.Put('tanks_dist_{}'.format(self.track), dataclasses.I3VectorDouble(tanks_dist))
# frame.Put('IceTop_charge', dataclasses.I3Double( np.sum(charges) ))
# Convert to ndarrays for easy array manipulation
dists = np.asarray(dists)
charges = np.asarray(charges)
# Sometimes there are nan pulses...not sure why
charges = np.nan_to_num(charges)
distance_mask = dists >= self.min_dist
total_charge = np.sum(charges[distance_mask])
else:
total_charge = 0.0
if np.isnan(total_charge).any():
print('total_charge = {}'.format(total_charge))
print('dists = {}'.format(dists))
print('charges = {}'.format(charges))
print('distance_mask = {}'.format(distance_mask))
print('self.min_dist = {}'.format(self.min_dist))
total_charge = dataclasses.I3Double(total_charge)
frame.Put('IceTop_charge_beyond_{}m'.format(self.min_dist),
total_charge)
#
# try:
# lap_params = frame['LaputopParams']
# lap_log_s125 = lap_params.value(recclasses.LaputopParameter.Log10_S125)
# lap_beta = lap_params.value(recclasses.LaputopParameter.Beta)
# tank_dist_mask = tanks_dist > 11
# # beta, log_s125 = fit_DLP_params(tanks_charge[distance_mask],
# # tanks_dist[distance_mask], lap_log_s125, lap_beta)
# log_s125, beta = fit_DLP_params(tanks_charge[distance_mask],
# tanks_dist[distance_mask], lap_log_s125, lap_beta)
# # print('lap_beta, refit_beta = {}, {}'.format(lap_beta, beta))
# # print('lap_log_s125, refit_log_s125 = {}, {}'.format(lap_log_s125, log_s125))
# # print('='*20)
# except Exception as e:
# print('Refitting shower to DLP didn\'t work out. '
# 'Setting to NaN...')
# print(e)
# log_s125, beta = NaN, NaN
# pass
# frame.Put('refit_beta', dataclasses.I3Double(beta))
# frame.Put('refit_log_s125', dataclasses.I3Double(log_s125))
# # print('='*20)
del frame['I3RecoPulseSeriesMap_union']
self.PushFrame(frame)
def Finish(self):
return
class AddIceTopLogQLogR(icetray.I3ConditionalModule):
def __init__(self, context):
icetray.I3ConditionalModule.__init__(self, context)
self.AddParameter('track', 'Track to calculate distances from', 'Laputop')
self.AddParameter('pulses', 'Pulses to caluclate distances to from track', 'SRTCoincPulses')
self.AddOutBox('OutBox')
def Configure(self):
self.track = self.GetParameter('track')
self.pulses = self.GetParameter('pulses')
self.get_dist = phys_services.I3Calculator.closest_approach_distance
# self.charge_vs_dist = {}
pass
def Geometry(self, frame):
self.geometry = frame['I3Geometry']
self.geomap = self.geometry.omgeo
self.PushFrame(frame)
def Physics(self, frame):
track = frame[self.track]
frame['I3RecoPulseSeriesMap_union'] = dataclasses.I3RecoPulseSeriesMapUnion(frame, self.pulses)
pulse_map = dataclasses.I3RecoPulseSeriesMap.from_frame(frame, 'I3RecoPulseSeriesMap_union')
tanks_dist, tanks_charge = [], []
for omkey, pulses in pulse_map:
# Get distance of clostest approach to DOM from track
dist = self.get_dist(track, self.geomap[omkey].position)
tanks_dist.append(dist)
# Get charge recorded in DOM
charge = sum([pulse.charge for pulse in pulses])
tanks_charge.append(charge)
# dist_bins = np.linspace(10, 1000, 100)
dist_bins = | np.logspace(2, 3, 25) | numpy.logspace |
#!/usr/bin/env python
# coding: utf-8
import os
import cv2
import numpy as np
import pandas as pd
import pydicom
from tqdm import tqdm
# Global variables:
DATA_BASE_PATH = '../data/' # location of raw data
TRAIN_DIR = 'stage_2_train_images/'
TEST_DIR = 'stage_2_test_images/'
PNG_DIR = 'png_256/' # where to save preprocessed data
TRAIN_IMG_STATS_FILE = 'train_img_stats.csv' # where to write meta data and some pixel statistics for each train image
TEST_IMG_STATS_FILE = 'test_img_stats.csv' # where to write meta data and some pixel statistics for each test image
RESIZE = 256 # crop and resize images to a square of this size
MINCROP = 256 # don't crop so much so that the resulting image is less than this size
# Auxilliary functions
def get_first_of_dicom_field(x):
"""
Get x[0] if x is a 'pydicom.multival.MultiValue', otherwise get x.
Each result is transformed into an Int if possible, otherwise transformed to a String.
"""
if isinstance(x, pydicom.multival.MultiValue):
result = x[0]
else:
result = x
# transform to int or str
try:
result = int(result)
except (ValueError, TypeError):
result = str(result)
return result
def get_dicom_fields(data):
"""
Get slope, intercept, windowing parameters, etc. from the DICOM header
"""
dicom_field_names = [
"patient_ID",
"study_instance_ID",
"series_instance_ID",
"study_ID",
"bits_allocated",
"bits_stored",
"pixel_representation",
"window_center",
"window_width",
"intercept",
"slope"]
dicom_fields_raw = [
data[('0010', '0020')].value, # patient ID
data[('0020', '000D')].value, # study instance UID
data[('0020', '000E')].value, # series instance UID
data[('0020', '0010')].value, # study ID
data[('0028', '0100')].value, # bits allocated
data[('0028', '0101')].value, # bits stored
data[('0028', '0103')].value, # pixel representation (0: unsigned int, 1: signed int)
data[('0028', '1050')].value, # window center
data[('0028', '1051')].value, # window width
data[('0028', '1052')].value, # intercept
data[('0028', '1053')].value # slope
]
dicom_fields_values = [get_first_of_dicom_field(x) for x in dicom_fields_raw]
return dict(zip(dicom_field_names, dicom_fields_values))
def make_12bit(img):
"""
Note: this function is not needed for the given data.
Transform the input image form 16 bit into 12 bit format.
Assumes that the pixel values have been transformed to
floating point numbers.
"""
assert img.dtype == "float32"
if np.max(img) - np.min(img) > 4096:
img = img * 4096.0 / 65536.0
return img
def crop_and_square(img):
"""Crop background, then pad the resulting image to be square"""
assert | np.all(img >= 0.0) | numpy.all |
# 3D IoU caculate code for 3D object detection
# Kent 2018/12
# https://github.com/AlienCat-K/3D-IoU-Python/blob/master/3D-IoU-Python.py
import numpy as np
from scipy.spatial import ConvexHull
from numpy import *
def polygon_clip(subjectPolygon, clipPolygon):
""" Clip a polygon with another polygon.
Ref: https://rosettacode.org/wiki/Sutherland-Hodgman_polygon_clipping#Python
Args:
subjectPolygon: a list of (x,y) 2d points, any polygon.
clipPolygon: a list of (x,y) 2d points, has to be *convex*
Note:
**points have to be counter-clockwise ordered**
Return:
a list of (x,y) vertex point for the intersection polygon.
"""
def inside(p):
return(cp2[0]-cp1[0])*(p[1]-cp1[1]) > (cp2[1]-cp1[1])*(p[0]-cp1[0])
def computeIntersection():
dc = [ cp1[0] - cp2[0], cp1[1] - cp2[1] ]
dp = [ s[0] - e[0], s[1] - e[1] ]
n1 = cp1[0] * cp2[1] - cp1[1] * cp2[0]
n2 = s[0] * e[1] - s[1] * e[0]
n3 = 1.0 / (dc[0] * dp[1] - dc[1] * dp[0])
return [(n1*dp[0] - n2*dc[0]) * n3, (n1*dp[1] - n2*dc[1]) * n3]
outputList = subjectPolygon
cp1 = clipPolygon[-1]
for clipVertex in clipPolygon:
cp2 = clipVertex
inputList = outputList
outputList = []
s = inputList[-1]
for subjectVertex in inputList:
e = subjectVertex
if inside(e):
if not inside(s):
outputList.append(computeIntersection())
outputList.append(e)
elif inside(s):
outputList.append(computeIntersection())
s = e
cp1 = cp2
if len(outputList) == 0:
return None
return(outputList)
def poly_area(x,y):
""" Ref: http://stackoverflow.com/questions/24467972/calculate-area-of-polygon-given-x-y-coordinates """
return 0.5*np.abs(np.dot(x,np.roll(y,1))-np.dot(y,np.roll(x,1)))
def convex_hull_intersection(p1, p2):
""" Compute area of two convex hull's intersection area.
p1,p2 are a list of (x,y) tuples of hull vertices.
return a list of (x,y) for the intersection and its volume
"""
inter_p = polygon_clip(p1,p2)
if inter_p is not None:
hull_inter = ConvexHull(inter_p)
return inter_p, hull_inter.volume
else:
return None, 0.0
def box3d_vol(corners):
''' corners: (8,3) no assumption on axis direction '''
a = np.sqrt(np.sum((corners[0,:] - corners[1,:])**2))
b = np.sqrt(np.sum((corners[1,:] - corners[2,:])**2))
c = np.sqrt(np.sum((corners[0,:] - corners[4,:])**2))
return a*b*c
def is_clockwise(p):
x = p[:,0]
y = p[:,1]
return np.dot(x,np.roll(y,1))-np.dot(y,np.roll(x,1)) > 0
def box3d_iou(corners1, corners2):
''' Compute 3D bounding box IoU.
Input:
corners1: numpy array (8,3), assume up direction is negative Y
corners2: numpy array (8,3), assume up direction is negative Y
Output:
iou: 3D bounding box IoU
iou_2d: bird's eye view 2D bounding box IoU
todo (kent): add more description on corner points' orders.
'''
# corner points are in counter clockwise order
rect1 = [(corners1[i,0], corners1[i,2]) for i in range(3,-1,-1)]
rect2 = [(corners2[i,0], corners2[i,2]) for i in range(3,-1,-1)]
area1 = poly_area(np.array(rect1)[:,0], np.array(rect1)[:,1])
area2 = poly_area(np.array(rect2)[:,0], np.array(rect2)[:,1])
inter, inter_area = convex_hull_intersection(rect1, rect2)
iou_2d = inter_area/(area1+area2-inter_area)
ymax = min(corners1[0,1], corners2[0,1])
ymin = max(corners1[4,1], corners2[4,1])
inter_vol = inter_area * max(0.0, ymax-ymin)
vol1 = box3d_vol(corners1)
vol2 = box3d_vol(corners2)
iou = inter_vol / (vol1 + vol2 - inter_vol)
return iou, iou_2d
# ----------------------------------
# Helper functions for evaluation
# ----------------------------------
def get_3d_box(box_size, heading_angle, center):
''' Calculate 3D bounding box corners from its parameterization.
Input:
box_size: tuple of (length,wide,height)
heading_angle: rad scalar, clockwise from pos x axis
center: tuple of (x,y,z)
Output:
corners_3d: numpy array of shape (8,3) for 3D box cornders
'''
def roty(t):
c = | np.cos(t) | numpy.cos |
####################################################################
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import networkx as networkx
import numpy as numpy
import scipy as scipy
import scipy.integrate
import types
import random
#################################################################################################################################################
# Main stochastic model
class SEIRSNetworkModel():
"""
A class to simulate the SEIRS Stochastic Network Model
===================================================
Params: G Network adjacency matrix (numpy array) or Networkx graph object.
beta Rate of transmission (exposure) (global)
beta_local Rate(s) of transmission (exposure) for adjacent individuals (optional)
sigma Rate of infection (upon exposure)
gamma Rate of recovery (upon infection)
xi Rate of re-susceptibility (upon recovery)
mu_I Rate of infection-related death
mu_0 Rate of baseline death
nu Rate of baseline birth
p Probability of interaction outside adjacent nodes
Q Quarantine adjacency matrix (numpy array) or Networkx graph object.
beta_D Rate of transmission (exposure) for individuals with detected infections (global)
beta_local Rate(s) of transmission (exposure) for adjacent individuals with detected infections (optional)
sigma_D Rate of infection (upon exposure) for individuals with detected infections
gamma_D Rate of recovery (upon infection) for individuals with detected infections
mu_D Rate of infection-related death for individuals with detected infections
theta_E Rate of baseline testing for exposed individuals
theta_I Rate of baseline testing for infectious individuals
phi_E Rate of contact tracing testing for exposed individuals
phi_I Rate of contact tracing testing for infectious individuals
psi_E Probability of positive test results for exposed individuals
psi_I Probability of positive test results for exposed individuals
q Probability of quarantined individuals interaction outside adjacent nodes
initE Init number of exposed individuals
initI Init number of infectious individuals
initD_E Init number of detected infectious individuals
initD_I Init number of detected infectious individuals
initR Init number of recovered individuals
initF Init number of infection-related fatalities
(all remaining nodes initialized susceptible)
p_extern Probability of spontaneous infection
p_periodic Prob of period test
period Period for resting
batch True - periodic testing done in batches of p_periodic*numNodes new people
min_time : minimum time period to pass between testing same person twice
count_non_random : count tests apart from random routine tests
policy: policy function
"""
def policy(self):
pass
def __init__(self, G, beta, sigma, gamma, xi=0, mu_I=0, mu_0=0, nu=0, beta_local=None, p=0,
Q=None, beta_D=None, sigma_D=None, gamma_D=None, mu_D=None, beta_D_local=None,
theta_E=0, theta_I=0, phi_E=0, phi_I=0, psi_E=1, psi_I=1, q=0,
initE=0, initI=10, initD_E=0, initD_I=0, initR=0, initF=0,
node_groups=None, store_Xseries=False, p_extern=0, p_periodic=0, period=1, batch=True, min_time=1,
count_non_random=False, policy=None, test_recovered=False, initT=0):
self.has_policy = False
self.verbose = False
if policy:
self.has_policy = True
policy.__name__ = "policy"
self.policy = types.MethodType(policy, self)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Setup Adjacency matrix:
self.update_G(G)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Setup Quarantine Adjacency matrix:
if (Q is None):
Q = G # If no Q graph is provided, use G in its place
self.update_Q(Q)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Model Parameters:
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
self.parameters = {'beta': beta, 'sigma': sigma, 'gamma': gamma, 'xi': xi, 'mu_I': mu_I, 'mu_0': mu_0, 'nu': nu,
'beta_D': beta_D, 'sigma_D': sigma_D, 'gamma_D': gamma_D, 'mu_D': mu_D,
'beta_local': beta_local, 'beta_D_local': beta_D_local, 'p': p, 'q': q,
'theta_E': theta_E, 'theta_I': theta_I, 'phi_E': phi_E, 'phi_I': phi_I, 'psi_E': psi_E,
'psi_I': psi_I,
'p_extern': p_extern, 'p_periodic': p_periodic, "period": period, "batch": batch,
"min_time": min_time,
"count_non_random": count_non_random, "test_recovered": test_recovered}
self.init_parameters = dict(self.parameters)
self.node_groups = node_groups
self.update_parameters()
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Each node can undergo up to 4 transitions (sans vitality/re-susceptibility returns to S state),
# so there are ~numNodes*4 events/timesteps expected; initialize numNodes*5 timestep slots to start
# (will be expanded during run if needed)
self.tseries = numpy.zeros(5 * self.numNodes)
self.numE = numpy.zeros(5 * self.numNodes)
self.numI = numpy.zeros(5 * self.numNodes)
self.numD_E = numpy.zeros(5 * self.numNodes)
self.numD_I = numpy.zeros(5 * self.numNodes)
self.numR = numpy.zeros(5 * self.numNodes)
self.numF = numpy.zeros(5 * self.numNodes)
self.numS = numpy.zeros(5 * self.numNodes)
self.N = numpy.zeros(5 * self.numNodes)
self.numTested = numpy.zeros(5 * self.numNodes)
self.numPositive = numpy.zeros(5 * self.numNodes)
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Initialize Timekeeping:
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
if isinstance(initT, (int, float)):
self.t = initT
else:
self.t = random.uniform(initT[0], initT[1])
self.tmax = 0 # will be set when run() is called
self.tidx = 0
self.tseries[0] = self.t
self.wait_until_t = 0
self.currentR = 0
if (node_groups):
self.nodeToTest = {groupName: 0 for groupName in node_groups}
else:
self.nodeToTest = 0
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Initialize Counts of inidividuals with each state:
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
self.numE[0] = int(initE)
self.numI[0] = int(initI)
self.numD_E[0] = int(initD_E)
self.numD_I[0] = int(initD_I)
self.numR[0] = int(initR)
self.numF[0] = int(initF)
self.numS[0] = self.numNodes - self.numE[0] - self.numI[0] - self.numD_E[0] - self.numD_I[0] - self.numR[0] - \
self.numF[0]
self.N[0] = self.numS[0] + self.numE[0] + self.numI[0] + self.numD_E[0] + self.numD_I[0] + self.numR[0]
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Node states:
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
self.S = 1
self.E = 2
self.I = 3
self.D_E = 4
self.D_I = 5
self.R = 6
self.F = 7
self.X = numpy.array(
[self.S] * int(self.numS[0]) + [self.E] * int(self.numE[0]) + [self.I] * int(self.numI[0]) + [
self.D_E] * int(self.numD_E[0]) + [self.D_I] * int(self.numD_I[0]) + [self.R] * int(self.numR[0]) + [
self.F] * int(self.numF[0])).reshape((self.numNodes, 1))
numpy.random.shuffle(self.X)
self.store_Xseries = store_Xseries
if (store_Xseries):
self.Xseries = numpy.zeros(shape=(5 * self.numNodes, self.numNodes), dtype='uint8')
self.Xseries[0, :] = self.X.T
self.transitions = {
'StoE': {'currentState': self.S, 'newState': self.E},
'EtoI': {'currentState': self.E, 'newState': self.I},
'ItoR': {'currentState': self.I, 'newState': self.R},
'ItoF': {'currentState': self.I, 'newState': self.F},
'RtoS': {'currentState': self.R, 'newState': self.S},
'EtoDE': {'currentState': self.E, 'newState': self.D_E},
'ItoDI': {'currentState': self.I, 'newState': self.D_I},
'DEtoDI': {'currentState': self.D_E, 'newState': self.D_I},
'DItoR': {'currentState': self.D_I, 'newState': self.R},
'DItoF': {'currentState': self.D_I, 'newState': self.F},
'_toS': {'currentState': True, 'newState': self.S},
'StoNS': {'currentState': self.S, 'newState': self.S}
}
self.last_tested = numpy.zeros((self.numNodes, 1)) - 100 # everybody has a fake last tested time of -100 days
self.time_detected = 0
self.small_step = False
self.count_non_random = count_non_random
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Initialize node subgroup data series:
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
self.nodeGroupData = None
if (node_groups):
self.nodeGroupData = {}
for groupName, nodeList in node_groups.items():
self.nodeGroupData[groupName] = {'nodes': numpy.array(nodeList),
'mask': numpy.isin(range(self.numNodes), nodeList).reshape(
(self.numNodes, 1))}
self.nodeGroupData[groupName]['numS'] = numpy.zeros(5 * self.numNodes)
self.nodeGroupData[groupName]['numE'] = numpy.zeros(5 * self.numNodes)
self.nodeGroupData[groupName]['numI'] = numpy.zeros(5 * self.numNodes)
self.nodeGroupData[groupName]['numD_E'] = numpy.zeros(5 * self.numNodes)
self.nodeGroupData[groupName]['numD_I'] = numpy.zeros(5 * self.numNodes)
self.nodeGroupData[groupName]['numR'] = numpy.zeros(5 * self.numNodes)
self.nodeGroupData[groupName]['numF'] = numpy.zeros(5 * self.numNodes)
self.nodeGroupData[groupName]['N'] = numpy.zeros(5 * self.numNodes)
self.nodeGroupData[groupName]['numTested'] = numpy.zeros(5 * self.numNodes)
self.nodeGroupData[groupName]['numS'][0] = numpy.count_nonzero(
self.nodeGroupData[groupName]['mask'] * self.X == self.S)
self.nodeGroupData[groupName]['numE'][0] = numpy.count_nonzero(
self.nodeGroupData[groupName]['mask'] * self.X == self.E)
self.nodeGroupData[groupName]['numI'][0] = numpy.count_nonzero(
self.nodeGroupData[groupName]['mask'] * self.X == self.I)
self.nodeGroupData[groupName]['numD_E'][0] = numpy.count_nonzero(
self.nodeGroupData[groupName]['mask'] * self.X == self.D_E)
self.nodeGroupData[groupName]['numD_I'][0] = numpy.count_nonzero(
self.nodeGroupData[groupName]['mask'] * self.X == self.D_I)
self.nodeGroupData[groupName]['numR'][0] = numpy.count_nonzero(
self.nodeGroupData[groupName]['mask'] * self.X == self.R)
self.nodeGroupData[groupName]['numF'][0] = numpy.count_nonzero(
self.nodeGroupData[groupName]['mask'] * self.X == self.F)
self.nodeGroupData[groupName]['N'][0] = self.nodeGroupData[groupName]['numS'][0] + \
self.nodeGroupData[groupName]['numE'][0] + \
self.nodeGroupData[groupName]['numI'][0] + \
self.nodeGroupData[groupName]['numD_E'][0] + \
self.nodeGroupData[groupName]['numD_I'][0] + \
self.nodeGroupData[groupName]['numR'][0]
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def update_parameters(self):
import time
updatestart = time.time()
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Model parameters:
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# self.beta = numpy.array(self.parameters['beta']).reshape((self.numNodes, 1)) if isinstance(self.parameters['beta'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['beta'], shape=(self.numNodes,1))
self.sigma = numpy.array(self.parameters['sigma']).reshape((self.numNodes, 1)) if isinstance(
self.parameters['sigma'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['sigma'],
shape=(self.numNodes, 1))
self.gamma = numpy.array(self.parameters['gamma']).reshape((self.numNodes, 1)) if isinstance(
self.parameters['gamma'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['gamma'],
shape=(self.numNodes, 1))
self.xi = numpy.array(self.parameters['xi']).reshape((self.numNodes, 1)) if isinstance(self.parameters['xi'], (
list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['xi'], shape=(self.numNodes, 1))
self.mu_I = numpy.array(self.parameters['mu_I']).reshape((self.numNodes, 1)) if isinstance(
self.parameters['mu_I'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['mu_I'],
shape=(self.numNodes, 1))
self.mu_0 = numpy.array(self.parameters['mu_0']).reshape((self.numNodes, 1)) if isinstance(
self.parameters['mu_0'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['mu_0'],
shape=(self.numNodes, 1))
self.nu = numpy.array(self.parameters['nu']).reshape((self.numNodes, 1)) if isinstance(self.parameters['nu'], (
list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['nu'], shape=(self.numNodes, 1))
self.p = numpy.array(self.parameters['p']).reshape((self.numNodes, 1)) if isinstance(self.parameters['p'], (
list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['p'], shape=(self.numNodes, 1))
# Testing-related parameters:
self.beta_D = (
numpy.array(self.parameters['beta_D']).reshape((self.numNodes, 1)) if isinstance(self.parameters['beta_D'],
(list,
numpy.ndarray)) else numpy.full(
fill_value=self.parameters['beta_D'], shape=(self.numNodes, 1))) if self.parameters[
'beta_D'] is not None else self.beta
self.sigma_D = (numpy.array(self.parameters['sigma_D']).reshape((self.numNodes, 1)) if isinstance(
self.parameters['sigma_D'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['sigma_D'],
shape=(self.numNodes, 1))) if \
self.parameters['sigma_D'] is not None else self.sigma
self.gamma_D = (numpy.array(self.parameters['gamma_D']).reshape((self.numNodes, 1)) if isinstance(
self.parameters['gamma_D'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['gamma_D'],
shape=(self.numNodes, 1))) if \
self.parameters['gamma_D'] is not None else self.gamma
self.mu_D = (
numpy.array(self.parameters['mu_D']).reshape((self.numNodes, 1)) if isinstance(self.parameters['mu_D'], (
list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['mu_D'], shape=(self.numNodes, 1))) if \
self.parameters['mu_D'] is not None else self.mu_I
self.theta_E = numpy.array(self.parameters['theta_E']).reshape((self.numNodes, 1)) if isinstance(
self.parameters['theta_E'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['theta_E'],
shape=(self.numNodes, 1))
self.theta_I = numpy.array(self.parameters['theta_I']).reshape((self.numNodes, 1)) if isinstance(
self.parameters['theta_I'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['theta_I'],
shape=(self.numNodes, 1))
self.phi_E = numpy.array(self.parameters['phi_E']).reshape((self.numNodes, 1)) if isinstance(
self.parameters['phi_E'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['phi_E'],
shape=(self.numNodes, 1))
self.phi_I = numpy.array(self.parameters['phi_I']).reshape((self.numNodes, 1)) if isinstance(
self.parameters['phi_I'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['phi_I'],
shape=(self.numNodes, 1))
self.psi_E = numpy.array(self.parameters['psi_E']).reshape((self.numNodes, 1)) if isinstance(
self.parameters['psi_E'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['psi_E'],
shape=(self.numNodes, 1))
self.psi_I = numpy.array(self.parameters['psi_I']).reshape((self.numNodes, 1)) if isinstance(
self.parameters['psi_I'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['psi_I'],
shape=(self.numNodes, 1))
self.q = numpy.array(self.parameters['q']).reshape((self.numNodes, 1)) if isinstance(self.parameters['q'], (
list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['q'], shape=(self.numNodes, 1))
self.min_time = numpy.array(self.parameters['min_time']).reshape((self.numNodes, 1)) if isinstance(
self.parameters['min_time'], (
list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['min_time'], shape=(self.numNodes, 1))
if isinstance(self.parameters['beta'], dict):
self.beta = numpy.zeros(shape=(self.numNodes, 1))
for groupName, nodeList in self.node_groups.items():
mask = numpy.isin(range(self.numNodes), nodeList).reshape((self.numNodes, 1))
self.beta[mask] = self.parameters['beta'][groupName]
else:
self.beta = numpy.array(self.parameters['beta']).reshape((self.numNodes, 1)) if isinstance(
self.parameters['beta'], (list, numpy.ndarray)) else numpy.full(fill_value=self.parameters['beta'],
shape=(self.numNodes, 1))
if isinstance(self.parameters['p_extern'], dict):
self.p_extern = numpy.zeros(shape=(self.numNodes, 1))
for groupName, nodeList in self.node_groups.items():
mask = numpy.isin(range(self.numNodes), nodeList).reshape((self.numNodes, 1))
self.p_extern[mask] = self.parameters['p_extern'][groupName]
else:
self.p_extern = numpy.array(self.parameters['p_extern']).reshape((self.numNodes, 1)) if isinstance(
self.parameters['p_extern'], (list, numpy.ndarray)) else numpy.full(
fill_value=self.parameters['p_extern'], shape=(self.numNodes, 1))
self.p_periodic = self.parameters['p_periodic']
# numpy.array(self.parameters['p_periodic']).reshape((self.numNodes, 1)) if isinstance(
# self.parameters['p_periodic'], (list, numpy.ndarray)) else self.parameters['p_periodic']
# numpy.full(fill_value=self.parameters['p_periodic'], shape=(self.numNodes, 1))
self.period = self.parameters['period']
self.batch = self.parameters['batch']
self.count_non_random = self.parameters['count_non_random']
self.test_recovered = self.parameters['test_recovered']
# Local transmission parameters:
if (self.parameters['beta_local'] is not None):
if (isinstance(self.parameters['beta_local'], (list, numpy.ndarray))):
if (isinstance(self.parameters['beta_local'], list)):
self.beta_local = numpy.array(self.parameters['beta_local'])
else: # is numpy.ndarray
self.beta_local = self.parameters['beta_local']
if (self.beta_local.ndim == 1):
self.beta_local.reshape((self.numNodes, 1))
elif (self.beta_local.ndim == 2):
self.beta_local.reshape((self.numNodes, self.numNodes))
else:
self.beta_local = numpy.full_like(self.beta, fill_value=self.parameters['beta_local'])
else:
self.beta_local = self.beta
# ----------------------------------------
if (self.parameters['beta_D_local'] is not None):
if (isinstance(self.parameters['beta_D_local'], (list, numpy.ndarray))):
if (isinstance(self.parameters['beta_D_local'], list)):
self.beta_D_local = numpy.array(self.parameters['beta_D_local'])
else: # is numpy.ndarray
self.beta_D_local = self.parameters['beta_D_local']
if (self.beta_D_local.ndim == 1):
self.beta_D_local.reshape((self.numNodes, 1))
elif (self.beta_D_local.ndim == 2):
self.beta_D_local.reshape((self.numNodes, self.numNodes))
else:
self.beta_D_local = numpy.full_like(self.beta_D, fill_value=self.parameters['beta_D_local'])
else:
self.beta_D_local = self.beta_D
# Pre-multiply beta values by the adjacency matrix ("transmission weight connections")
if (self.beta_local.ndim == 1):
self.A_beta = scipy.sparse.csr_matrix.multiply(self.A,
numpy.tile(self.beta_local, (1, self.numNodes))).tocsr()
elif (self.beta_local.ndim == 2):
self.A_beta = scipy.sparse.csr_matrix.multiply(self.A, self.beta_local).tocsr()
# Pre-multiply beta_D values by the quarantine adjacency matrix ("transmission weight connections")
if (self.beta_D_local.ndim == 1):
self.A_Q_beta_D = scipy.sparse.csr_matrix.multiply(self.A_Q, numpy.tile(self.beta_D_local,
(1, self.numNodes))).tocsr()
elif (self.beta_D_local.ndim == 2):
self.A_Q_beta_D = scipy.sparse.csr_matrix.multiply(self.A_Q, self.beta_D_local).tocsr()
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Update scenario flags:
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
self.update_scenario_flags()
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def node_degrees(self, Amat):
return Amat.sum(axis=0).reshape(self.numNodes, 1) # sums of adj matrix cols
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def update_G(self, new_G):
self.G = new_G
# Adjacency matrix:
if type(new_G) == numpy.ndarray:
self.A = scipy.sparse.csr_matrix(new_G)
elif type(new_G) == networkx.classes.graph.Graph:
self.A = networkx.adj_matrix(new_G) # adj_matrix gives scipy.sparse csr_matrix
else:
raise BaseException("Input an adjacency matrix or networkx object only.")
self.numNodes = int(self.A.shape[1])
self.degree = numpy.asarray(self.node_degrees(self.A)).astype(float)
return
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def update_Q(self, new_Q):
self.Q = new_Q
# Quarantine Adjacency matrix:
if type(new_Q) == numpy.ndarray:
self.A_Q = scipy.sparse.csr_matrix(new_Q)
elif type(new_Q) == networkx.classes.graph.Graph:
self.A_Q = networkx.adj_matrix(new_Q) # adj_matrix gives scipy.sparse csr_matrix
else:
raise BaseException("Input an adjacency matrix or networkx object only.")
self.numNodes_Q = int(self.A_Q.shape[1])
self.degree_Q = numpy.asarray(self.node_degrees(self.A_Q)).astype(float)
assert (
self.numNodes == self.numNodes_Q), "The normal and quarantine adjacency graphs must be of the same size."
return
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def update_scenario_flags(self):
self.testing_scenario = ((numpy.any(self.psi_I) and (numpy.any(self.theta_I) or numpy.any(self.phi_I)))
or (numpy.any(self.psi_E) and (
numpy.any(self.theta_E) or numpy.any(self.phi_E))) or numpy.any(self.p_periodic))
self.tracing_scenario = ((numpy.any(self.psi_E) and numpy.any(self.phi_E))
or (numpy.any(self.psi_I) and numpy.any(self.phi_I)))
self.vitality_scenario = (numpy.any(self.mu_0) and numpy.any(self.nu))
self.resusceptibility_scenario = (numpy.any(self.xi))
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def total_num_infections(self, t_idx=None):
if (t_idx is None):
return (self.numE[:] + self.numI[:] + self.numD_E[:] + self.numD_I[:])
else:
return (self.numE[t_idx] + self.numI[t_idx] + self.numD_E[t_idx] + self.numD_I[t_idx])
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def calc_propensities(self):
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# Pre-calculate matrix multiplication terms that may be used in multiple propensity calculations,
# and check to see if their computation is necessary before doing the multiplication
transmissionTerms_I = numpy.zeros(shape=(self.numNodes, 1))
if (numpy.any(self.numI[self.tidx])
and numpy.any(self.beta != 0)):
transmissionTerms_I = numpy.asarray(scipy.sparse.csr_matrix.dot(self.A_beta, self.X == self.I))
transmissionTerms_DI = numpy.zeros(shape=(self.numNodes, 1))
if (self.testing_scenario
and numpy.any(self.numD_I[self.tidx])
and numpy.any(self.beta_D)):
transmissionTerms_DI = numpy.asarray(scipy.sparse.csr_matrix.dot(self.A_Q_beta_D, self.X == self.D_I))
numContacts_D = numpy.zeros(shape=(self.numNodes, 1))
if (self.tracing_scenario
and (numpy.any(self.numD_E[self.tidx]) or numpy.any(self.numD_I[self.tidx]))):
numContacts_D = numpy.asarray(
scipy.sparse.csr_matrix.dot(self.A, ((self.X == self.D_E) | (self.X == self.D_I))))
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
# BOAZ: additions
# NS - negative S
# NE - negative E
# NI - negative I
propensities_StoE = (self.p_extern + self.p * (
(self.beta * self.numI[self.tidx] + self.q * self.beta_D * self.numD_I[self.tidx]) / self.N[
self.tidx])
+ (1 - self.p) * numpy.divide((transmissionTerms_I + transmissionTerms_DI), self.degree,
out=numpy.zeros_like(self.degree), where=self.degree != 0)
) * (self.X == self.S)
propensities_EtoI = self.sigma * (self.X == self.E)
propensities_ItoR = self.gamma * (self.X == self.I)
propensities_ItoF = self.mu_I * (self.X == self.I)
# propensities_EtoDE = ( self.theta_E + numpy.divide((self.phi_E*numContacts_D), self.degree, out=numpy.zeros_like(self.degree), where=self.degree!=0) )*self.psi_E*(self.X==self.E)
can_test = self.last_tested + self.min_time <= self.t
propensities_StoNS = (self.phi_E * numContacts_D) * (self.X == self.S) * can_test
propensities_EtoDE = (self.theta_E + self.phi_E * numContacts_D) * self.psi_E * (self.X == self.E) * can_test
# propensities_ItoDI = ( self.theta_I + numpy.divide((self.phi_I*numContacts_D), self.degree, out=numpy.zeros_like(self.degree), where=self.degree!=0) )*self.psi_I*(self.X==self.I)
propensities_ItoDI = (self.theta_I + self.phi_I * numContacts_D) * self.psi_I * (self.X == self.I) * can_test
propensities_DEtoDI = self.sigma_D * (self.X == self.D_E)
propensities_DItoR = self.gamma_D * (self.X == self.D_I)
propensities_DItoF = self.mu_D * (self.X == self.D_I)
propensities_RtoS = self.xi * (self.X == self.R)
propensities__toS = self.nu * (self.X != self.F)
propensities = numpy.hstack([propensities_StoE, propensities_EtoI,
propensities_ItoR, propensities_ItoF,
propensities_EtoDE, propensities_ItoDI, propensities_DEtoDI,
propensities_DItoR, propensities_DItoF,
propensities_RtoS, propensities__toS, propensities_StoNS])
columns = ['StoE', 'EtoI', 'ItoR', 'ItoF', 'EtoDE', 'ItoDI', 'DEtoDI', 'DItoR', 'DItoF', 'RtoS', '_toS',
'StoNS']
return propensities, columns
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def increase_data_series_length(self):
self.tseries = numpy.pad(self.tseries, [(0, 5 * self.numNodes)], mode='constant', constant_values=0)
self.numS = numpy.pad(self.numS, [(0, 5 * self.numNodes)], mode='constant', constant_values=0)
self.numE = numpy.pad(self.numE, [(0, 5 * self.numNodes)], mode='constant', constant_values=0)
self.numI = numpy.pad(self.numI, [(0, 5 * self.numNodes)], mode='constant', constant_values=0)
self.numD_E = numpy.pad(self.numD_E, [(0, 5 * self.numNodes)], mode='constant', constant_values=0)
self.numD_I = numpy.pad(self.numD_I, [(0, 5 * self.numNodes)], mode='constant', constant_values=0)
self.numR = numpy.pad(self.numR, [(0, 5 * self.numNodes)], mode='constant', constant_values=0)
self.numF = numpy.pad(self.numF, [(0, 5 * self.numNodes)], mode='constant', constant_values=0)
self.N = numpy.pad(self.N, [(0, 5 * self.numNodes)], mode='constant', constant_values=0)
self.numTested = numpy.pad(self.numTested, [(0, 5 * self.numNodes)], mode='constant', constant_values=0)
self.numPositive = numpy.pad(self.numPositive, [(0, 5 * self.numNodes)], mode='constant', constant_values=0)
if (self.store_Xseries):
self.Xseries = numpy.pad(self.Xseries, [(0, 5 * self.numNodes), (0, 0)], mode='constant', constant_values=0)
if (self.nodeGroupData):
for groupName in self.nodeGroupData:
self.nodeGroupData[groupName]['numS'] = numpy.pad(self.nodeGroupData[groupName]['numS'],
[(0, 5 * self.numNodes)], mode='constant',
constant_values=0)
self.nodeGroupData[groupName]['numE'] = numpy.pad(self.nodeGroupData[groupName]['numE'],
[(0, 5 * self.numNodes)], mode='constant',
constant_values=0)
self.nodeGroupData[groupName]['numI'] = numpy.pad(self.nodeGroupData[groupName]['numI'],
[(0, 5 * self.numNodes)], mode='constant',
constant_values=0)
self.nodeGroupData[groupName]['numD_E'] = numpy.pad(self.nodeGroupData[groupName]['numD_E'],
[(0, 5 * self.numNodes)], mode='constant',
constant_values=0)
self.nodeGroupData[groupName]['numD_I'] = numpy.pad(self.nodeGroupData[groupName]['numD_I'],
[(0, 5 * self.numNodes)], mode='constant',
constant_values=0)
self.nodeGroupData[groupName]['numR'] = numpy.pad(self.nodeGroupData[groupName]['numR'],
[(0, 5 * self.numNodes)], mode='constant',
constant_values=0)
self.nodeGroupData[groupName]['numF'] = numpy.pad(self.nodeGroupData[groupName]['numF'],
[(0, 5 * self.numNodes)], mode='constant',
constant_values=0)
self.nodeGroupData[groupName]['N'] = numpy.pad(self.nodeGroupData[groupName]['N'],
[(0, 5 * self.numNodes)], mode='constant',
constant_values=0)
self.nodeGroupData[groupName]['numTested'] = numpy.pad(self.nodeGroupData[groupName]['numTested'],
[(0, 5 * self.numNodes)], mode='constant',
constant_values=0)
return None
# ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
def finalize_data_series(self):
self.tseries = numpy.array(self.tseries, dtype=float)[:self.tidx + 1]
self.numS = numpy.array(self.numS, dtype=float)[:self.tidx + 1]
self.numE = numpy.array(self.numE, dtype=float)[:self.tidx + 1]
self.numI = numpy.array(self.numI, dtype=float)[:self.tidx + 1]
self.numD_E = numpy.array(self.numD_E, dtype=float)[:self.tidx + 1]
self.numD_I = numpy.array(self.numD_I, dtype=float)[:self.tidx + 1]
self.numR = numpy.array(self.numR, dtype=float)[:self.tidx + 1]
self.numF = numpy.array(self.numF, dtype=float)[:self.tidx + 1]
self.N = numpy.array(self.N, dtype=float)[:self.tidx + 1]
self.numTested = numpy.array(self.numTested, dtype=float)[:self.tidx + 1]
self.numPositive = numpy.array(self.numPositive, dtype=float)[:self.tidx + 1]
if (self.store_Xseries):
self.Xseries = self.Xseries[:self.tidx + 1, :]
if (self.nodeGroupData):
for groupName in self.nodeGroupData:
self.nodeGroupData[groupName]['numS'] = numpy.array(self.nodeGroupData[groupName]['numS'], dtype=float)[
:self.tidx + 1]
self.nodeGroupData[groupName]['numE'] = numpy.array(self.nodeGroupData[groupName]['numE'], dtype=float)[
:self.tidx + 1]
self.nodeGroupData[groupName]['numI'] = numpy.array(self.nodeGroupData[groupName]['numI'], dtype=float)[
:self.tidx + 1]
self.nodeGroupData[groupName]['numD_E'] = numpy.array(self.nodeGroupData[groupName]['numD_E'],
dtype=float)[:self.tidx + 1]
self.nodeGroupData[groupName]['numD_I'] = numpy.array(self.nodeGroupData[groupName]['numD_I'],
dtype=float)[:self.tidx + 1]
self.nodeGroupData[groupName]['numR'] = | numpy.array(self.nodeGroupData[groupName]['numR'], dtype=float) | numpy.array |
import numpy as np
from scipy.stats import truncnorm, norm
def soft_threshold(r, gamma):
"""
soft-thresholding function
"""
return np.maximum(np.abs(r) - gamma, 0.0) * np.sign(r)
def df(r, gamma):
"""
divergence-free function
"""
eta = soft_threshold(r, gamma)
return eta - np.mean(eta != 0) * r
def GCAMP(w, beta, log=False):
shita = 0.7
communication_cost = 0
P, N, _ = w.shape
T = beta * shita / (P-1)
R = np.zeros((P, N, 1))
z = np.zeros((N, 1))
#STEP1
for p in range(1, P):
R[p] = np.abs(w[p]) > T
candidate = np.where(R[p])[0]
for n in candidate:
communication_cost += 1
send_to1(n, w[p, n])
#STEP2
S = [np.where(R[:, n])[0] for n in range(N)]
m = np.sum(R, axis=0)
U = np.empty((N, 1))
for n in range(N):
upper = (P - 1 - m[n]) * T
z[n] = w[0, n] + np.sum([w[p, n] for p in S[n]])
U[n] = np.abs(z[n]) + upper
F = (U > beta) * (m < (P-1))
candidate = np.where(F)[0]
for n in candidate:
communication_cost += 1
broadcast_others(n)
#STEP3
F_R = F * np.logical_not(R)
for p in range(1, P):
#print("p: {}".format(p))
candidate = np.where(F_R[p])[0]
for n in candidate:
communication_cost += 1
send_to1(n ,w[p, n])
if log:
print("Rp: {} \t F: {} \t F\\Rp: {}".format(np.sum(R), np.sum(F), np.sum(F_R)-np.sum(F)))
print("Total Communication Cost: {}".format(communication_cost))
print("="*50)
#STEP4
s = np.zeros((N, 1))
b = np.zeros((N, 1))
V = np.where(U > beta)[0].tolist()
for n in V:
b[n] = np.sum(w[:, n])
s[n] = soft_threshold(b[n], beta)
return s.real, communication_cost
def GCAMP_exp(w, tau_p, log=False):
shita = 0.7
tau = np.sum(tau_p)
communication_cost = 0
P, N, _ = w.shape
R = np.zeros((P, N, 1))
#STEP1
for p in range(1, P):
R[p] = np.square(w[p]) > tau_p[p] * shita
candidate = np.where(R[p])[0]
for n in candidate:
communication_cost += 1
send_to1(n, w[p, n])
#STEP2
S = [np.where(R[:, n])[0] for n in range(N)]
m = np.sum(R, axis=0)
U = np.empty((N, 1))
for n in range(N):
upper = np.sum([tau_p[p] for p in range(1, P) if p not in S[p]])
U[n] = (w[0, n] + np.sum(w[p, n] for p in S[n]))**2 + upper * shita
F = (U > tau) * (m < (P-1))
candidate = np.where(F)[0]
for n in candidate:
communication_cost += 1
broadcast_others(n)
#STEP3
F_R = F * np.logical_not(R)
for p in range(1, P):
#print("p: {}".format(p))
candidate = np.where(F_R[p])[0]
for n in candidate:
communication_cost += 1
send_to1(n ,w[p, n])
if log:
print("Rp: {} \t F: {} \t F\\Rp: {}".format(np.sum(R), np.sum(F), np.sum(F_R)-np.sum(F)))
print("Total Communication Cost: {}".format(communication_cost))
print("="*50)
#STEP4
s = np.zeros((N, 1))
V = np.where(U > tau)[0].tolist()
for n in V:
w_sum = np.sum(w[:, n])
s[n] = soft_threshold(w_sum, tau**0.5)
return s.real, communication_cost
def send_to1(n, w):
#print("n: {}, w: {}".format(n, w))
pass
def broadcast_others(n):
#print("n: {}".format(n))
pass
def GCOAMP(w, tau_p, log=False):
shita = 0.7
tau = np.sum(tau_p)
communication_cost = 0
P, N, _ = w.shape
R = np.zeros((P, N, 1))
z = [0] * N
#STEP1
for p in range(1, P):
R[p] = | np.square(w[p]) | numpy.square |
Subsets and Splits