filename
stringlengths 4
198
| content
stringlengths 25
939k
| environment
list | variablearg
list | constarg
list | variableargjson
stringclasses 1
value | constargjson
stringlengths 2
3.9k
| lang
stringclasses 3
values | constargcount
float64 0
129
⌀ | variableargcount
float64 0
0
⌀ | sentence
stringclasses 1
value |
---|---|---|---|---|---|---|---|---|---|---|
nmigen_boards/tinyfpga_bx.py | import os
import subprocess
from nmigen.build import *
from nmigen.vendor.lattice_ice40 import *
from .resources import *
__all__ = ["TinyFPGABXPlatform"]
class TinyFPGABXPlatform(LatticeICE40Platform):
device = "iCE40LP8K"
package = "CM81"
default_clk = "clk16"
resources = [
Resource("clk16", 0, Pins("B2", dir="i"),
Clock(16e6), Attrs(IO_STANDARD="SB_LVCMOS")),
*LEDResources(pins="B3", attrs=Attrs(IO_STANDARD="SB_LVCMOS")),
DirectUSBResource(0, d_p="B4", d_n="A4", pullup="A3",
attrs=Attrs(IO_STANDARD="SB_LVCMOS")
),
*SPIFlashResources(0,
cs="F7", clk="G7", copi="G6", cipo="H7", wp="H4", hold="J8",
attrs=Attrs(IO_STANDARD="SB_LVCMOS")
),
]
connectors = [
Connector("gpio", 0,
# Left side of the board
# 1 2 3 4 5 6 7 8 9 10 11 12 13
" A2 A1 B1 C2 C1 D2 D1 E2 E1 G2 H1 J1 H2 "
# Right side of the board
# 14 15 16 17 18 19 20 21 22 23 24
" H9 D9 D8 C9 A9 B8 A8 B7 A7 B6 A6 "
# Bottom of the board
# 25 26 27 28 29 30 31
"G1 J3 J4 G9 J9 E8 J2"
),
]
def toolchain_program(self, products, name):
tinyprog = os.environ.get("TINYPROG", "tinyprog")
with products.extract("{}.bin".format(name)) as bitstream_filename:
subprocess.check_call([tinyprog, "-p", bitstream_filename])
if __name__ == "__main__":
from .test.blinky import *
TinyFPGABXPlatform().build(Blinky(), do_program=True)
| []
| []
| [
"TINYPROG"
]
| [] | ["TINYPROG"] | python | 1 | 0 | |
main_DeepGMG.py | # an implementation for "Learning Deep Generative Models of Graphs"
from main import *
class Args_DGMG():
def __init__(self):
### CUDA
self.cuda = 2
### model type
self.note = 'Baseline_DGMG' # do GCN after adding each edge
# self.note = 'Baseline_DGMG_fast' # do GCN only after adding each node
### data config
#self.graph_type = 'caveman_small'
# self.graph_type = 'grid_small'
self.graph_type = 'ladder_small'
# self.graph_type = 'enzymes_small'
# self.graph_type = 'barabasi_small'
# self.graph_type = 'citeseer_small'
self.max_num_node = 20
### network config
self.node_embedding_size = 64
self.test_graph_num = 200
### training config
self.epochs = 100 #2000 # now one epoch means self.batch_ratio x batch_size
self.load_epoch = 100 #2000
self.epochs_test_start = 5 #100
self.epochs_test = 5 #100
self.epochs_log = 10 #100
self.epochs_save = 10 #100
if 'fast' in self.note:
self.is_fast = True
else:
self.is_fast = False
self.lr = 0.001
self.milestones = [300, 600, 1000]
self.lr_rate = 0.3
### output config
self.model_save_path = 'model_save/'
self.graph_save_path = 'graphs/'
self.figure_save_path = 'figures/'
self.timing_save_path = 'timing/'
self.figure_prediction_save_path = 'figures_prediction/'
self.nll_save_path = 'nll/'
self.fname = self.note + '_' + self.graph_type + '_' + str(self.node_embedding_size)
self.fname_pred = self.note + '_' + self.graph_type + '_' + str(self.node_embedding_size) + '_pred_'
self.fname_train = self.note + '_' + self.graph_type + '_' + str(self.node_embedding_size) + '_train_'
self.fname_test = self.note + '_' + self.graph_type + '_' + str(self.node_embedding_size) + '_test_'
self.load = False
self.save = True
def train_DGMG_epoch(epoch, args, model, dataset, optimizer, scheduler, is_fast = False):
model.train()
graph_num = len(dataset)
order = list(range(graph_num))
shuffle(order)
loss_addnode = 0
loss_addedge = 0
loss_node = 0
for i in order:
model.zero_grad()
graph = dataset[i]
# do random ordering: relabel nodes
node_order = list(range(graph.number_of_nodes()))
shuffle(node_order)
order_mapping = dict(zip(graph.nodes(), node_order))
graph = nx.relabel_nodes(graph, order_mapping, copy=True)
# NOTE: when starting loop, we assume a node has already been generated
node_count = 1
node_embedding = [Variable(torch.ones(1,args.node_embedding_size)).to(device)] # list of torch tensors, each size: 1*hidden
loss = 0
while node_count<=graph.number_of_nodes():
node_neighbor = graph.subgraph(list(range(node_count))).adjacency_list() # list of lists (first node is zero)
node_neighbor_new = graph.subgraph(list(range(node_count+1))).adjacency_list()[-1] # list of new node's neighbors
# 1 message passing
# do 2 times message passing
node_embedding = message_passing(node_neighbor, node_embedding, model)
# 2 graph embedding and new node embedding
node_embedding_cat = torch.cat(node_embedding, dim=0)
graph_embedding = calc_graph_embedding(node_embedding_cat, model)
init_embedding = calc_init_embedding(node_embedding_cat, model)
# 3 f_addnode
p_addnode = model.f_an(graph_embedding)
if node_count < graph.number_of_nodes():
# add node
node_neighbor.append([])
node_embedding.append(init_embedding)
if is_fast:
node_embedding_cat = torch.cat(node_embedding, dim=0)
# calc loss
loss_addnode_step = F.binary_cross_entropy(p_addnode,Variable(torch.ones((1,1))).to(device))
# loss_addnode_step.backward(retain_graph=True)
loss += loss_addnode_step
loss_addnode += loss_addnode_step.data
else:
# calc loss
loss_addnode_step = F.binary_cross_entropy(p_addnode, Variable(torch.zeros((1, 1))).to(device))
# loss_addnode_step.backward(retain_graph=True)
loss += loss_addnode_step
loss_addnode += loss_addnode_step.data
break
edge_count = 0
while edge_count<=len(node_neighbor_new):
if not is_fast:
node_embedding = message_passing(node_neighbor, node_embedding, model)
node_embedding_cat = torch.cat(node_embedding, dim=0)
graph_embedding = calc_graph_embedding(node_embedding_cat, model)
# 4 f_addedge
p_addedge = model.f_ae(graph_embedding)
if edge_count < len(node_neighbor_new):
# calc loss
loss_addedge_step = F.binary_cross_entropy(p_addedge, Variable(torch.ones((1, 1))).to(device))
# loss_addedge_step.backward(retain_graph=True)
loss += loss_addedge_step
loss_addedge += loss_addedge_step.data
# 5 f_nodes
# excluding the last node (which is the new node)
node_new_embedding_cat = node_embedding_cat[-1,:].expand(node_embedding_cat.size(0)-1,node_embedding_cat.size(1))
s_node = model.f_s(torch.cat((node_embedding_cat[0:-1,:],node_new_embedding_cat),dim=1))
p_node = F.softmax(s_node.permute(1,0))
# get ground truth
a_node = torch.zeros((1,p_node.size(1)))
# print('node_neighbor_new',node_neighbor_new, edge_count)
a_node[0,node_neighbor_new[edge_count]] = 1
a_node = Variable(a_node).to(device)
# add edge
node_neighbor[-1].append(node_neighbor_new[edge_count])
node_neighbor[node_neighbor_new[edge_count]].append(len(node_neighbor)-1)
# calc loss
loss_node_step = F.binary_cross_entropy(p_node,a_node)
# loss_node_step.backward(retain_graph=True)
loss += loss_node_step
loss_node += loss_node_step.data
else:
# calc loss
loss_addedge_step = F.binary_cross_entropy(p_addedge, Variable(torch.zeros((1, 1))).to(device))
# loss_addedge_step.backward(retain_graph=True)
loss += loss_addedge_step
loss_addedge += loss_addedge_step.data
break
edge_count += 1
node_count += 1
# update deterministic and lstm
loss.backward()
optimizer.step()
scheduler.step()
loss_all = loss_addnode + loss_addedge + loss_node
if epoch % args.epochs_log==0:
print('Epoch: {}/{}, train loss: {:.6f}, graph type: {}, hidden: {}'.format(
epoch, args.epochs,loss_all[0], args.graph_type, args.node_embedding_size))
# loss_sum += loss.data[0]*x.size(0)
# return loss_sum
def train_DGMG_forward_epoch(args, model, dataset, is_fast = False):
model.train()
graph_num = len(dataset)
order = list(range(graph_num))
shuffle(order)
loss_addnode = 0
loss_addedge = 0
loss_node = 0
for i in order:
model.zero_grad()
graph = dataset[i]
# do random ordering: relabel nodes
node_order = list(range(graph.number_of_nodes()))
shuffle(node_order)
order_mapping = dict(zip(graph.nodes(), node_order))
graph = nx.relabel_nodes(graph, order_mapping, copy=True)
# NOTE: when starting loop, we assume a node has already been generated
node_count = 1
node_embedding = [Variable(torch.ones(1,args.node_embedding_size)).to(device)] # list of torch tensors, each size: 1*hidden
loss = 0
while node_count<=graph.number_of_nodes():
node_neighbor = graph.subgraph(list(range(node_count))).adjacency_list() # list of lists (first node is zero)
node_neighbor_new = graph.subgraph(list(range(node_count+1))).adjacency_list()[-1] # list of new node's neighbors
# 1 message passing
# do 2 times message passing
node_embedding = message_passing(node_neighbor, node_embedding, model)
# 2 graph embedding and new node embedding
node_embedding_cat = torch.cat(node_embedding, dim=0)
graph_embedding = calc_graph_embedding(node_embedding_cat, model)
init_embedding = calc_init_embedding(node_embedding_cat, model)
# 3 f_addnode
p_addnode = model.f_an(graph_embedding)
if node_count < graph.number_of_nodes():
# add node
node_neighbor.append([])
node_embedding.append(init_embedding)
if is_fast:
node_embedding_cat = torch.cat(node_embedding, dim=0)
# calc loss
loss_addnode_step = F.binary_cross_entropy(p_addnode,Variable(torch.ones((1,1))).to(device))
# loss_addnode_step.backward(retain_graph=True)
loss += loss_addnode_step
loss_addnode += loss_addnode_step.data
else:
# calc loss
loss_addnode_step = F.binary_cross_entropy(p_addnode, Variable(torch.zeros((1, 1))).to(device))
# loss_addnode_step.backward(retain_graph=True)
loss += loss_addnode_step
loss_addnode += loss_addnode_step.data
break
edge_count = 0
while edge_count<=len(node_neighbor_new):
if not is_fast:
node_embedding = message_passing(node_neighbor, node_embedding, model)
node_embedding_cat = torch.cat(node_embedding, dim=0)
graph_embedding = calc_graph_embedding(node_embedding_cat, model)
# 4 f_addedge
p_addedge = model.f_ae(graph_embedding)
if edge_count < len(node_neighbor_new):
# calc loss
loss_addedge_step = F.binary_cross_entropy(p_addedge, Variable(torch.ones((1, 1))).to(device))
# loss_addedge_step.backward(retain_graph=True)
loss += loss_addedge_step
loss_addedge += loss_addedge_step.data
# 5 f_nodes
# excluding the last node (which is the new node)
node_new_embedding_cat = node_embedding_cat[-1,:].expand(node_embedding_cat.size(0)-1,node_embedding_cat.size(1))
s_node = model.f_s(torch.cat((node_embedding_cat[0:-1,:],node_new_embedding_cat),dim=1))
p_node = F.softmax(s_node.permute(1,0))
# get ground truth
a_node = torch.zeros((1,p_node.size(1)))
# print('node_neighbor_new',node_neighbor_new, edge_count)
a_node[0,node_neighbor_new[edge_count]] = 1
a_node = Variable(a_node).to(device)
# add edge
node_neighbor[-1].append(node_neighbor_new[edge_count])
node_neighbor[node_neighbor_new[edge_count]].append(len(node_neighbor)-1)
# calc loss
loss_node_step = F.binary_cross_entropy(p_node,a_node)
# loss_node_step.backward(retain_graph=True)
loss += loss_node_step
loss_node += loss_node_step.data*p_node.size(1)
else:
# calc loss
loss_addedge_step = F.binary_cross_entropy(p_addedge, Variable(torch.zeros((1, 1))).to(device))
# loss_addedge_step.backward(retain_graph=True)
loss += loss_addedge_step
loss_addedge += loss_addedge_step.data
break
edge_count += 1
node_count += 1
loss_all = loss_addnode + loss_addedge + loss_node
# if epoch % args.epochs_log==0:
# print('Epoch: {}/{}, train loss: {:.6f}, graph type: {}, hidden: {}'.format(
# epoch, args.epochs,loss_all[0], args.graph_type, args.node_embedding_size))
return loss_all[0]/len(dataset)
def test_DGMG_epoch(args, model, is_fast=False):
model.eval()
graph_num = args.test_graph_num
graphs_generated = []
for i in range(graph_num):
# NOTE: when starting loop, we assume a node has already been generated
node_neighbor = [[]] # list of lists (first node is zero)
node_embedding = [Variable(torch.ones(1,args.node_embedding_size)).to(device)] # list of torch tensors, each size: 1*hidden
node_count = 1
while node_count<=args.max_num_node:
# 1 message passing
# do 2 times message passing
node_embedding = message_passing(node_neighbor, node_embedding, model)
# 2 graph embedding and new node embedding
node_embedding_cat = torch.cat(node_embedding, dim=0)
graph_embedding = calc_graph_embedding(node_embedding_cat, model)
init_embedding = calc_init_embedding(node_embedding_cat, model)
# 3 f_addnode
p_addnode = model.f_an(graph_embedding)
a_addnode = sample_tensor(p_addnode)
# print(a_addnode.data[0][0])
if a_addnode.data[0][0]==1:
# print('add node')
# add node
node_neighbor.append([])
node_embedding.append(init_embedding)
if is_fast:
node_embedding_cat = torch.cat(node_embedding, dim=0)
else:
break
edge_count = 0
while edge_count<args.max_num_node:
if not is_fast:
node_embedding = message_passing(node_neighbor, node_embedding, model)
node_embedding_cat = torch.cat(node_embedding, dim=0)
graph_embedding = calc_graph_embedding(node_embedding_cat, model)
# 4 f_addedge
p_addedge = model.f_ae(graph_embedding)
a_addedge = sample_tensor(p_addedge)
# print(a_addedge.data[0][0])
if a_addedge.data[0][0]==1:
# print('add edge')
# 5 f_nodes
# excluding the last node (which is the new node)
node_new_embedding_cat = node_embedding_cat[-1,:].expand(node_embedding_cat.size(0)-1,node_embedding_cat.size(1))
s_node = model.f_s(torch.cat((node_embedding_cat[0:-1,:],node_new_embedding_cat),dim=1))
p_node = F.softmax(s_node.permute(1,0))
a_node = gumbel_softmax(p_node, temperature=0.01)
_, a_node_id = a_node.topk(1)
a_node_id = int(a_node_id.data[0][0])
# add edge
node_neighbor[-1].append(a_node_id)
node_neighbor[a_node_id].append(len(node_neighbor)-1)
else:
break
edge_count += 1
node_count += 1
# save graph
node_neighbor_dict = dict(zip(list(range(len(node_neighbor))), node_neighbor))
graph = nx.from_dict_of_lists(node_neighbor_dict)
graphs_generated.append(graph)
return graphs_generated
########### train function for LSTM + VAE
def train_DGMG(args, dataset_train, model):
# check if load existing model
if args.load:
fname = args.model_save_path + args.fname + 'model_' + str(args.load_epoch) + '.dat'
model.load_state_dict(torch.load(fname))
args.lr = 0.00001
epoch = args.load_epoch
print('model loaded!, lr: {}'.format(args.lr))
else:
epoch = 1
# initialize optimizer
optimizer = optim.Adam(list(model.parameters()), lr=args.lr)
scheduler = MultiStepLR(optimizer, milestones=args.milestones, gamma=args.lr_rate)
# start main loop
time_all = np.zeros(args.epochs)
while epoch <= args.epochs:
time_start = tm.time()
# train
train_DGMG_epoch(epoch, args, model, dataset_train, optimizer, scheduler, is_fast=args.is_fast)
time_end = tm.time()
time_all[epoch - 1] = time_end - time_start
# print('time used',time_all[epoch - 1])
# test
if epoch % args.epochs_test == 0 and epoch >= args.epochs_test_start:
graphs = test_DGMG_epoch(args,model, is_fast=args.is_fast)
fname = args.graph_save_path + args.fname_pred + str(epoch) + '.dat'
save_graph_list(graphs, fname)
# print('test done, graphs saved')
# save model checkpoint
if args.save:
if epoch % args.epochs_save == 0:
fname = args.model_save_path + args.fname + 'model_' + str(epoch) + '.dat'
torch.save(model.state_dict(), fname)
epoch += 1
np.save(args.timing_save_path + args.fname, time_all)
########### train function for LSTM + VAE
def train_DGMG_nll(args, dataset_train,dataset_test, model,max_iter=1000):
# check if load existing model
fname = args.model_save_path + args.fname + 'model_' + str(args.load_epoch) + '.dat'
model.load_state_dict(torch.load(fname))
fname_output = args.nll_save_path + args.note + '_' + args.graph_type + '.csv'
with open(fname_output, 'w+') as f:
f.write('train,test\n')
# start main loop
for iter in range(max_iter):
nll_train = train_DGMG_forward_epoch(args, model, dataset_train, is_fast=args.is_fast)
nll_test = train_DGMG_forward_epoch(args, model, dataset_test, is_fast=args.is_fast)
print('train', nll_train, 'test', nll_test)
f.write(str(nll_train) + ',' + str(nll_test) + '\n')
if __name__ == '__main__':
args = Args_DGMG()
os.environ['CUDA_VISIBLE_DEVICES'] = str(args.cuda)
print('CUDA', args.cuda)
print('File name prefix',args.fname)
# Why not use create graphs??
graphs = []
for i in range(4, 10):
graphs.append(nx.ladder_graph(i))
model = DGM_graphs(h_size = args.node_embedding_size).to(device)
if args.graph_type == 'ladder_small':
graphs = []
for i in range(2, 11):
graphs.append(nx.ladder_graph(i))
args.max_prev_node = 10
# if args.graph_type == 'caveman_small':
# graphs = []
# for i in range(2, 5):
# for j in range(2, 6):
# for k in range(10):
# graphs.append(nx.relaxed_caveman_graph(i, j, p=0.1))
# args.max_prev_node = 20
if args.graph_type=='caveman_small':
graphs = []
for i in range(2, 3):
for j in range(6, 11):
for k in range(20):
graphs.append(caveman_special(i, j, p_edge=0.8))
args.max_prev_node = 20
if args.graph_type == 'grid_small':
graphs = []
for i in range(2, 5):
for j in range(2, 6):
graphs.append(nx.grid_2d_graph(i, j))
args.max_prev_node = 15
if args.graph_type == 'barabasi_small':
graphs = []
for i in range(4, 21):
for j in range(3, 4):
for k in range(10):
graphs.append(nx.barabasi_albert_graph(i, j))
args.max_prev_node = 20
if args.graph_type == 'enzymes_small':
graphs_raw = Graph_load_batch(min_num_nodes=10, name='ENZYMES')
graphs = []
for G in graphs_raw:
if G.number_of_nodes()<=20:
graphs.append(G)
args.max_prev_node = 15
if args.graph_type == 'citeseer_small':
_, _, G = Graph_load(dataset='citeseer')
G = max(nx.connected_component_subgraphs(G), key=len)
G = nx.convert_node_labels_to_integers(G)
graphs = []
for i in range(G.number_of_nodes()):
G_ego = nx.ego_graph(G, i, radius=1)
if (G_ego.number_of_nodes() >= 4) and (G_ego.number_of_nodes() <= 20):
graphs.append(G_ego)
shuffle(graphs)
graphs = graphs[0:200]
args.max_prev_node = 15
# remove self loops
for graph in graphs:
edges_with_selfloops = graph.selfloop_edges()
if len(edges_with_selfloops) > 0:
graph.remove_edges_from(edges_with_selfloops)
# split datasets
random.seed(123)
shuffle(graphs)
graphs_len = len(graphs)
graphs_test = graphs[int(0.8 * graphs_len):]
graphs_train = graphs[0:int(0.8 * graphs_len)]
args.max_num_node = max([graphs[i].number_of_nodes() for i in range(len(graphs))])
# args.max_num_node = 2000
# show graphs statistics
print('total graph num: {}, training set: {}'.format(len(graphs), len(graphs_train)))
print('max number node: {}'.format(args.max_num_node))
print('max previous node: {}'.format(args.max_prev_node))
# save ground truth graphs
# save_graph_list(graphs, args.graph_save_path + args.fname_train + '0.dat')
# save_graph_list(graphs, args.graph_save_path + args.fname_test + '0.dat')
# print('train and test graphs saved')
## if use pre-saved graphs
# dir_input = "graphs/"
# fname_test = args.graph_save_path + args.fname_test + '0.dat'
# graphs = load_graph_list(fname_test, is_real=True)
# graphs_test = graphs[int(0.8 * graphs_len):]
# graphs_train = graphs[0:int(0.8 * graphs_len)]
# graphs_validate = graphs[0:int(0.2 * graphs_len)]
# print('train')
# for graph in graphs_validate:
# print(graph.number_of_nodes())
# print('test')
# for graph in graphs_test:
# print(graph.number_of_nodes())
### train
train_DGMG(args,graphs,model)
### calc nll
# train_DGMG_nll(args, graphs_validate,graphs_test, model,max_iter=1000)
# for j in range(1000):
# graph = graphs[0]
# # do random ordering: relabel nodes
# node_order = list(range(graph.number_of_nodes()))
# shuffle(node_order)
# order_mapping = dict(zip(graph.nodes(), node_order))
# graph = nx.relabel_nodes(graph, order_mapping, copy=True)
# print(graph.nodes()) | []
| []
| [
"CUDA_VISIBLE_DEVICES"
]
| [] | ["CUDA_VISIBLE_DEVICES"] | python | 1 | 0 | |
vendor/gopkg.in/macaron.v1/macaron.go | // +build go1.3
// Copyright 2014 The Macaron Authors
//
// Licensed under the Apache License, Version 2.0 (the "License"): you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
// Package macaron is a high productive and modular web framework in Go.
package macaron
import (
"io"
"log"
"net/http"
"os"
"reflect"
"strings"
"sync"
"gitote.com/yoginth/com"
"gopkg.in/ini.v1"
"github.com/go-macaron/inject"
)
const _VERSION = "1.3.1.0306"
func Version() string {
return _VERSION
}
// Handler can be any callable function.
// Macaron attempts to inject services into the handler's argument list,
// and panics if an argument could not be fullfilled via dependency injection.
type Handler interface{}
// handlerFuncInvoker is an inject.FastInvoker wrapper of func(http.ResponseWriter, *http.Request).
type handlerFuncInvoker func(http.ResponseWriter, *http.Request)
func (invoke handlerFuncInvoker) Invoke(params []interface{}) ([]reflect.Value, error) {
invoke(params[0].(http.ResponseWriter), params[1].(*http.Request))
return nil, nil
}
// internalServerErrorInvoker is an inject.FastInvoker wrapper of func(rw http.ResponseWriter, err error).
type internalServerErrorInvoker func(rw http.ResponseWriter, err error)
func (invoke internalServerErrorInvoker) Invoke(params []interface{}) ([]reflect.Value, error) {
invoke(params[0].(http.ResponseWriter), params[1].(error))
return nil, nil
}
// validateAndWrapHandler makes sure a handler is a callable function, it panics if not.
// When the handler is also potential to be any built-in inject.FastInvoker,
// it wraps the handler automatically to have some performance gain.
func validateAndWrapHandler(h Handler) Handler {
if reflect.TypeOf(h).Kind() != reflect.Func {
panic("Macaron handler must be a callable function")
}
if !inject.IsFastInvoker(h) {
switch v := h.(type) {
case func(*Context):
return ContextInvoker(v)
case func(*Context, *log.Logger):
return LoggerInvoker(v)
case func(http.ResponseWriter, *http.Request):
return handlerFuncInvoker(v)
case func(http.ResponseWriter, error):
return internalServerErrorInvoker(v)
}
}
return h
}
// validateAndWrapHandlers preforms validation and wrapping for each input handler.
// It accepts an optional wrapper function to perform custom wrapping on handlers.
func validateAndWrapHandlers(handlers []Handler, wrappers ...func(Handler) Handler) []Handler {
var wrapper func(Handler) Handler
if len(wrappers) > 0 {
wrapper = wrappers[0]
}
wrappedHandlers := make([]Handler, len(handlers))
for i, h := range handlers {
h = validateAndWrapHandler(h)
if wrapper != nil && !inject.IsFastInvoker(h) {
h = wrapper(h)
}
wrappedHandlers[i] = h
}
return wrappedHandlers
}
// Macaron represents the top level web application.
// inject.Injector methods can be invoked to map services on a global level.
type Macaron struct {
inject.Injector
befores []BeforeHandler
handlers []Handler
action Handler
hasURLPrefix bool
urlPrefix string // For suburl support.
*Router
logger *log.Logger
}
// NewWithLogger creates a bare bones Macaron instance.
// Use this method if you want to have full control over the middleware that is used.
// You can specify logger output writer with this function.
func NewWithLogger(out io.Writer) *Macaron {
m := &Macaron{
Injector: inject.New(),
action: func() {},
Router: NewRouter(),
logger: log.New(out, "[Macaron] ", 0),
}
m.Router.m = m
m.Map(m.logger)
m.Map(defaultReturnHandler())
m.NotFound(http.NotFound)
m.InternalServerError(func(rw http.ResponseWriter, err error) {
http.Error(rw, err.Error(), 500)
})
return m
}
// New creates a bare bones Macaron instance.
// Use this method if you want to have full control over the middleware that is used.
func New() *Macaron {
return NewWithLogger(os.Stdout)
}
// Classic creates a classic Macaron with some basic default middleware:
// macaron.Logger, macaron.Recovery and macaron.Static.
func Classic() *Macaron {
m := New()
m.Use(Logger())
m.Use(Recovery())
m.Use(Static("public"))
return m
}
// Handlers sets the entire middleware stack with the given Handlers.
// This will clear any current middleware handlers,
// and panics if any of the handlers is not a callable function
func (m *Macaron) Handlers(handlers ...Handler) {
m.handlers = make([]Handler, 0)
for _, handler := range handlers {
m.Use(handler)
}
}
// Action sets the handler that will be called after all the middleware has been invoked.
// This is set to macaron.Router in a macaron.Classic().
func (m *Macaron) Action(handler Handler) {
handler = validateAndWrapHandler(handler)
m.action = handler
}
// BeforeHandler represents a handler executes at beginning of every request.
// Macaron stops future process when it returns true.
type BeforeHandler func(rw http.ResponseWriter, req *http.Request) bool
func (m *Macaron) Before(handler BeforeHandler) {
m.befores = append(m.befores, handler)
}
// Use adds a middleware Handler to the stack,
// and panics if the handler is not a callable func.
// Middleware Handlers are invoked in the order that they are added.
func (m *Macaron) Use(handler Handler) {
handler = validateAndWrapHandler(handler)
m.handlers = append(m.handlers, handler)
}
func (m *Macaron) createContext(rw http.ResponseWriter, req *http.Request) *Context {
c := &Context{
Injector: inject.New(),
handlers: m.handlers,
action: m.action,
index: 0,
Router: m.Router,
Req: Request{req},
Resp: NewResponseWriter(req.Method, rw),
Render: &DummyRender{rw},
Data: make(map[string]interface{}),
}
c.SetParent(m)
c.Map(c)
c.MapTo(c.Resp, (*http.ResponseWriter)(nil))
c.Map(req)
return c
}
// ServeHTTP is the HTTP Entry point for a Macaron instance.
// Useful if you want to control your own HTTP server.
// Be aware that none of middleware will run without registering any router.
func (m *Macaron) ServeHTTP(rw http.ResponseWriter, req *http.Request) {
if m.hasURLPrefix {
req.URL.Path = strings.TrimPrefix(req.URL.Path, m.urlPrefix)
}
for _, h := range m.befores {
if h(rw, req) {
return
}
}
m.Router.ServeHTTP(rw, req)
}
func GetDefaultListenInfo() (string, int) {
host := os.Getenv("HOST")
if len(host) == 0 {
host = "0.0.0.0"
}
port := com.StrTo(os.Getenv("PORT")).MustInt()
if port == 0 {
port = 4000
}
return host, port
}
// Run the http server. Listening on os.GetEnv("PORT") or 4000 by default.
func (m *Macaron) Run(args ...interface{}) {
host, port := GetDefaultListenInfo()
if len(args) == 1 {
switch arg := args[0].(type) {
case string:
host = arg
case int:
port = arg
}
} else if len(args) >= 2 {
if arg, ok := args[0].(string); ok {
host = arg
}
if arg, ok := args[1].(int); ok {
port = arg
}
}
addr := host + ":" + com.ToStr(port)
logger := m.GetVal(reflect.TypeOf(m.logger)).Interface().(*log.Logger)
logger.Printf("listening on %s (%s)\n", addr, safeEnv())
logger.Fatalln(http.ListenAndServe(addr, m))
}
// SetURLPrefix sets URL prefix of router layer, so that it support suburl.
func (m *Macaron) SetURLPrefix(prefix string) {
m.urlPrefix = prefix
m.hasURLPrefix = len(m.urlPrefix) > 0
}
// ____ ____ .__ ___. .__
// \ \ / /____ _______|__|____ \_ |__ | | ____ ______
// \ Y /\__ \\_ __ \ \__ \ | __ \| | _/ __ \ / ___/
// \ / / __ \| | \/ |/ __ \| \_\ \ |_\ ___/ \___ \
// \___/ (____ /__| |__(____ /___ /____/\___ >____ >
// \/ \/ \/ \/ \/
const (
DEV = "development"
PROD = "production"
TEST = "test"
)
var (
// Env is the environment that Macaron is executing in.
// The MACARON_ENV is read on initialization to set this variable.
Env = DEV
envLock sync.Mutex
// Path of work directory.
Root string
// Flash applies to current request.
FlashNow bool
// Configuration convention object.
cfg *ini.File
)
func setENV(e string) {
envLock.Lock()
defer envLock.Unlock()
if len(e) > 0 {
Env = e
}
}
func safeEnv() string {
envLock.Lock()
defer envLock.Unlock()
return Env
}
func init() {
setENV(os.Getenv("MACARON_ENV"))
var err error
Root, err = os.Getwd()
if err != nil {
panic("error getting work directory: " + err.Error())
}
}
// SetConfig sets data sources for configuration.
func SetConfig(source interface{}, others ...interface{}) (_ *ini.File, err error) {
cfg, err = ini.Load(source, others...)
return Config(), err
}
// Config returns configuration convention object.
// It returns an empty object if there is no one available.
func Config() *ini.File {
if cfg == nil {
return ini.Empty()
}
return cfg
}
| [
"\"HOST\"",
"\"PORT\"",
"\"MACARON_ENV\""
]
| []
| [
"PORT",
"HOST",
"MACARON_ENV"
]
| [] | ["PORT", "HOST", "MACARON_ENV"] | go | 3 | 0 | |
pkg/jx/cmd/create_cluster_eks.go | package cmd
import (
"github.com/jenkins-x/jx/pkg/cloud/amazon"
"io"
"os"
"strconv"
"strings"
"time"
"github.com/jenkins-x/jx/pkg/jx/cmd/templates"
"github.com/jenkins-x/jx/pkg/log"
"github.com/jenkins-x/jx/pkg/util"
"github.com/spf13/cobra"
"gopkg.in/AlecAivazis/survey.v1/terminal"
)
// CreateClusterEKSOptions contains the CLI flags
type CreateClusterEKSOptions struct {
CreateClusterOptions
Flags CreateClusterEKSFlags
}
type CreateClusterEKSFlags struct {
ClusterName string
NodeType string
NodeCount int
NodesMin int
NodesMax int
Region string
Zones string
Profile string
SshPublicKey string
Verbose int
AWSOperationTimeout time.Duration
}
var (
createClusterEKSLong = templates.LongDesc(`
This command creates a new Kubernetes cluster on Amazon Web Services (AWS) using EKS, installing required local dependencies and provisions the
Jenkins X platform
EKS is a managed Kubernetes service on AWS.
`)
createClusterEKSExample = templates.Examples(`
# to create a new Kubernetes cluster with Jenkins X in your default zones (from $EKS_AVAILABILITY_ZONES)
jx create cluster eks
# to specify the zones
jx create cluster eks --zones us-west-2a,us-west-2b,us-west-2c
`)
)
// NewCmdCreateClusterEKS creates the command
func NewCmdCreateClusterEKS(f Factory, in terminal.FileReader, out terminal.FileWriter, errOut io.Writer) *cobra.Command {
options := CreateClusterEKSOptions{
CreateClusterOptions: createCreateClusterOptions(f, in, out, errOut, AKS),
}
cmd := &cobra.Command{
Use: "eks",
Short: "Create a new Kubernetes cluster on AWS using EKS",
Long: createClusterEKSLong,
Example: createClusterEKSExample,
Run: func(cmd *cobra.Command, args []string) {
options.Cmd = cmd
options.Args = args
err := options.Run()
CheckErr(err)
},
}
options.addCreateClusterFlags(cmd)
options.addCommonFlags(cmd)
cmd.Flags().StringVarP(&options.Flags.ClusterName, optionClusterName, "n", "", "The name of this cluster.")
cmd.Flags().StringVarP(&options.Flags.NodeType, "node-type", "", "m5.large", "node instance type")
cmd.Flags().IntVarP(&options.Flags.NodeCount, optionNodes, "o", -1, "number of nodes")
cmd.Flags().IntVarP(&options.Flags.NodesMin, "nodes-min", "", -1, "minimum number of nodes")
cmd.Flags().IntVarP(&options.Flags.NodesMax, "nodes-max", "", -1, "maximum number of nodes")
cmd.Flags().IntVarP(&options.Flags.Verbose, "eksctl-log-level", "", -1, "set log level, use 0 to silence, 4 for debugging and 5 for debugging with AWS debug logging (default 3)")
cmd.Flags().DurationVarP(&options.Flags.AWSOperationTimeout, "aws-api-timeout", "", 20*time.Minute, "Duration of AWS API timeout")
cmd.Flags().StringVarP(&options.Flags.Region, "region", "r", "", "The region to use. Default: us-west-2")
cmd.Flags().StringVarP(&options.Flags.Zones, optionZones, "z", "", "Availability Zones. Auto-select if not specified. If provided, this overrides the $EKS_AVAILABILITY_ZONES environment variable")
cmd.Flags().StringVarP(&options.Flags.Profile, "profile", "p", "", "AWS profile to use. If provided, this overrides the AWS_PROFILE environment variable")
cmd.Flags().StringVarP(&options.Flags.SshPublicKey, "ssh-public-key", "", "", "SSH public key to use for nodes (import from local path, or use existing EC2 key pair) (default \"~/.ssh/id_rsa.pub\")")
return cmd
}
// Run runs the command
func (o *CreateClusterEKSOptions) Run() error {
var deps []string
/*
d := binaryShouldBeInstalled("aws")
if d != "" {
deps = append(deps, d)
}
*/
d := binaryShouldBeInstalled("eksctl")
if d != "" {
deps = append(deps, d)
}
d = binaryShouldBeInstalled("heptio-authenticator-aws")
if d != "" {
deps = append(deps, d)
}
err := o.installMissingDependencies(deps)
if err != nil {
log.Errorf("%v\nPlease fix the error or install manually then try again", err)
os.Exit(-1)
}
flags := &o.Flags
zones := flags.Zones
if zones == "" {
zones = os.Getenv("EKS_AVAILABILITY_ZONES")
}
args := []string{"create", "cluster", "--full-ecr-access"}
if flags.ClusterName != "" {
args = append(args, "--name", flags.ClusterName)
}
region, err := amazon.ResolveRegion("", flags.Region)
if err != nil {
return err
}
args = append(args, "--region", region)
if zones != "" {
args = append(args, "--zones", zones)
}
if flags.Profile != "" {
args = append(args, "--profile", flags.Profile)
}
if flags.SshPublicKey != "" {
args = append(args, "--ssh-public-key", flags.SshPublicKey)
}
args = append(args, "--node-type", flags.NodeType)
if flags.NodeCount >= 0 {
args = append(args, "--nodes", strconv.Itoa(flags.NodeCount))
}
if flags.NodesMin >= 0 {
args = append(args, "--nodes-min", strconv.Itoa(flags.NodesMin))
}
if flags.NodesMax >= 0 {
args = append(args, "--nodes-max", strconv.Itoa(flags.NodesMax))
}
if flags.Verbose >= 0 {
args = append(args, "--verbose", strconv.Itoa(flags.Verbose))
}
args = append(args, "--aws-api-timeout", flags.AWSOperationTimeout.String())
log.Info("Creating EKS cluster - this can take a while so please be patient...\n")
log.Infof("You can watch progress in the CloudFormation console: %s\n\n", util.ColorInfo("https://console.aws.amazon.com/cloudformation/"))
log.Infof("running command: %s\n", util.ColorInfo("eksctl "+strings.Join(args, " ")))
err = o.runCommandVerbose("eksctl", args...)
if err != nil {
return err
}
log.Blank()
log.Info("Initialising cluster ...\n")
return o.initAndInstall(EKS)
}
| [
"\"EKS_AVAILABILITY_ZONES\""
]
| []
| [
"EKS_AVAILABILITY_ZONES"
]
| [] | ["EKS_AVAILABILITY_ZONES"] | go | 1 | 0 | |
cache/blobs.go | package cache
import (
"context"
"fmt"
"io"
"os"
"strconv"
"github.com/containerd/containerd/content"
"github.com/containerd/containerd/diff"
"github.com/containerd/containerd/leases"
"github.com/containerd/containerd/mount"
"github.com/klauspost/compress/zstd"
"github.com/moby/buildkit/session"
"github.com/moby/buildkit/util/compression"
"github.com/moby/buildkit/util/flightcontrol"
"github.com/moby/buildkit/util/winlayers"
digest "github.com/opencontainers/go-digest"
imagespecidentity "github.com/opencontainers/image-spec/identity"
ocispecs "github.com/opencontainers/image-spec/specs-go/v1"
"github.com/pkg/errors"
"github.com/sirupsen/logrus"
"golang.org/x/sync/errgroup"
)
var g flightcontrol.Group
const containerdUncompressed = "containerd.io/uncompressed"
var ErrNoBlobs = errors.Errorf("no blobs for snapshot")
// computeBlobChain ensures every ref in a parent chain has an associated blob in the content store. If
// a blob is missing and createIfNeeded is true, then the blob will be created, otherwise ErrNoBlobs will
// be returned. Caller must hold a lease when calling this function.
// If forceCompression is specified but the blob of compressionType doesn't exist, this function creates it.
func (sr *immutableRef) computeBlobChain(ctx context.Context, createIfNeeded bool, compressionType compression.Type, forceCompression bool, s session.Group) error {
if _, ok := leases.FromContext(ctx); !ok {
return errors.Errorf("missing lease requirement for computeBlobChain")
}
if err := sr.Finalize(ctx); err != nil {
return err
}
if isTypeWindows(sr) {
ctx = winlayers.UseWindowsLayerMode(ctx)
}
return computeBlobChain(ctx, sr, createIfNeeded, compressionType, forceCompression, s)
}
type compressor func(dest io.Writer, requiredMediaType string) (io.WriteCloser, error)
func computeBlobChain(ctx context.Context, sr *immutableRef, createIfNeeded bool, compressionType compression.Type, forceCompression bool, s session.Group) error {
eg, ctx := errgroup.WithContext(ctx)
switch sr.kind() {
case Merge:
for _, parent := range sr.mergeParents {
parent := parent
eg.Go(func() error {
return computeBlobChain(ctx, parent, createIfNeeded, compressionType, forceCompression, s)
})
}
case Layer:
eg.Go(func() error {
return computeBlobChain(ctx, sr.layerParent, createIfNeeded, compressionType, forceCompression, s)
})
fallthrough
case BaseLayer:
eg.Go(func() error {
_, err := g.Do(ctx, fmt.Sprintf("%s-%t", sr.ID(), createIfNeeded), func(ctx context.Context) (interface{}, error) {
if sr.getBlob() != "" {
return nil, nil
}
if !createIfNeeded {
return nil, errors.WithStack(ErrNoBlobs)
}
var mediaType string
var compressorFunc compressor
var finalize func(context.Context, content.Store) (map[string]string, error)
switch compressionType {
case compression.Uncompressed:
mediaType = ocispecs.MediaTypeImageLayer
case compression.Gzip:
mediaType = ocispecs.MediaTypeImageLayerGzip
case compression.EStargz:
compressorFunc, finalize = compressEStargz()
mediaType = ocispecs.MediaTypeImageLayerGzip
case compression.Zstd:
compressorFunc = zstdWriter
mediaType = ocispecs.MediaTypeImageLayer + "+zstd"
default:
return nil, errors.Errorf("unknown layer compression type: %q", compressionType)
}
var lower []mount.Mount
if sr.layerParent != nil {
m, err := sr.layerParent.Mount(ctx, true, s)
if err != nil {
return nil, err
}
var release func() error
lower, release, err = m.Mount()
if err != nil {
return nil, err
}
if release != nil {
defer release()
}
}
m, err := sr.Mount(ctx, true, s)
if err != nil {
return nil, err
}
upper, release, err := m.Mount()
if err != nil {
return nil, err
}
if release != nil {
defer release()
}
var desc ocispecs.Descriptor
// Determine differ and error/log handling according to the platform, envvar and the snapshotter.
var enableOverlay, fallback, logWarnOnErr bool
if forceOvlStr := os.Getenv("BUILDKIT_DEBUG_FORCE_OVERLAY_DIFF"); forceOvlStr != "" {
enableOverlay, err = strconv.ParseBool(forceOvlStr)
if err != nil {
return nil, errors.Wrapf(err, "invalid boolean in BUILDKIT_DEBUG_FORCE_OVERLAY_DIFF")
}
fallback = false // prohibit fallback on debug
} else if !isTypeWindows(sr) {
enableOverlay, fallback = true, true
switch sr.cm.Snapshotter.Name() {
case "overlayfs", "stargz":
// overlayfs-based snapshotters should support overlay diff. so print warn log on failure.
logWarnOnErr = true
case "fuse-overlayfs":
// not supported with fuse-overlayfs snapshotter which doesn't provide overlayfs mounts.
// TODO: add support for fuse-overlayfs
enableOverlay = false
}
}
if enableOverlay {
computed, ok, err := sr.tryComputeOverlayBlob(ctx, lower, upper, mediaType, sr.ID(), compressorFunc)
if !ok || err != nil {
if !fallback {
if !ok {
return nil, errors.Errorf("overlay mounts not detected (lower=%+v,upper=%+v)", lower, upper)
}
if err != nil {
return nil, errors.Wrapf(err, "failed to compute overlay diff")
}
}
if logWarnOnErr {
logrus.Warnf("failed to compute blob by overlay differ (ok=%v): %v", ok, err)
}
}
if ok {
desc = computed
}
}
if desc.Digest == "" {
desc, err = sr.cm.Differ.Compare(ctx, lower, upper,
diff.WithMediaType(mediaType),
diff.WithReference(sr.ID()),
diff.WithCompressor(compressorFunc),
)
if err != nil {
return nil, err
}
}
if desc.Annotations == nil {
desc.Annotations = map[string]string{}
}
if finalize != nil {
a, err := finalize(ctx, sr.cm.ContentStore)
if err != nil {
return nil, errors.Wrapf(err, "failed to finalize compression")
}
for k, v := range a {
desc.Annotations[k] = v
}
}
info, err := sr.cm.ContentStore.Info(ctx, desc.Digest)
if err != nil {
return nil, err
}
if diffID, ok := info.Labels[containerdUncompressed]; ok {
desc.Annotations[containerdUncompressed] = diffID
} else if mediaType == ocispecs.MediaTypeImageLayer {
desc.Annotations[containerdUncompressed] = desc.Digest.String()
} else {
return nil, errors.Errorf("unknown layer compression type")
}
if err := sr.setBlob(ctx, compressionType, desc); err != nil {
return nil, err
}
return nil, nil
})
if err != nil {
return err
}
if forceCompression {
if err := ensureCompression(ctx, sr, compressionType, s); err != nil {
return errors.Wrapf(err, "failed to ensure compression type of %q", compressionType)
}
}
return nil
})
}
if err := eg.Wait(); err != nil {
return err
}
return sr.computeChainMetadata(ctx)
}
// setBlob associates a blob with the cache record.
// A lease must be held for the blob when calling this function
func (sr *immutableRef) setBlob(ctx context.Context, compressionType compression.Type, desc ocispecs.Descriptor) error {
if _, ok := leases.FromContext(ctx); !ok {
return errors.Errorf("missing lease requirement for setBlob")
}
diffID, err := diffIDFromDescriptor(desc)
if err != nil {
return err
}
if _, err := sr.cm.ContentStore.Info(ctx, desc.Digest); err != nil {
return err
}
if compressionType == compression.UnknownCompression {
return errors.Errorf("unhandled layer media type: %q", desc.MediaType)
}
sr.mu.Lock()
defer sr.mu.Unlock()
if sr.getBlob() != "" {
return nil
}
if err := sr.finalize(ctx); err != nil {
return err
}
if err := sr.cm.LeaseManager.AddResource(ctx, leases.Lease{ID: sr.ID()}, leases.Resource{
ID: desc.Digest.String(),
Type: "content",
}); err != nil {
return err
}
sr.queueDiffID(diffID)
sr.queueBlob(desc.Digest)
sr.queueMediaType(desc.MediaType)
sr.queueBlobSize(desc.Size)
if err := sr.commitMetadata(); err != nil {
return err
}
if err := sr.addCompressionBlob(ctx, desc, compressionType); err != nil {
return err
}
return nil
}
func (sr *immutableRef) computeChainMetadata(ctx context.Context) error {
if _, ok := leases.FromContext(ctx); !ok {
return errors.Errorf("missing lease requirement for computeChainMetadata")
}
sr.mu.Lock()
defer sr.mu.Unlock()
if sr.getChainID() != "" {
return nil
}
var chainID digest.Digest
var blobChainID digest.Digest
switch sr.kind() {
case BaseLayer:
diffID := sr.getDiffID()
chainID = diffID
blobChainID = imagespecidentity.ChainID([]digest.Digest{digest.Digest(sr.getBlob()), diffID})
case Layer:
if parentChainID := sr.layerParent.getChainID(); parentChainID != "" {
chainID = parentChainID
} else {
return errors.Errorf("failed to set chain for reference with non-addressable parent %q", sr.layerParent.GetDescription())
}
if parentBlobChainID := sr.layerParent.getBlobChainID(); parentBlobChainID != "" {
blobChainID = parentBlobChainID
} else {
return errors.Errorf("failed to set blobchain for reference with non-addressable parent %q", sr.layerParent.GetDescription())
}
diffID := digest.Digest(sr.getDiffID())
chainID = imagespecidentity.ChainID([]digest.Digest{chainID, diffID})
blobID := imagespecidentity.ChainID([]digest.Digest{digest.Digest(sr.getBlob()), diffID})
blobChainID = imagespecidentity.ChainID([]digest.Digest{blobChainID, blobID})
case Merge:
// Merge chain IDs can re-use the first input chain ID as a base, but after that have to
// be computed one-by-one for each blob in the chain. It should have the same value as
// if you had created the merge by unpacking all the blobs on top of each other with GetByBlob.
baseInput := sr.mergeParents[0]
chainID = baseInput.getChainID()
blobChainID = baseInput.getBlobChainID()
for _, mergeParent := range sr.mergeParents[1:] {
for _, layer := range mergeParent.layerChain() {
diffID := digest.Digest(layer.getDiffID())
chainID = imagespecidentity.ChainID([]digest.Digest{chainID, diffID})
blobID := imagespecidentity.ChainID([]digest.Digest{digest.Digest(layer.getBlob()), diffID})
blobChainID = imagespecidentity.ChainID([]digest.Digest{blobChainID, blobID})
}
}
}
sr.queueChainID(chainID)
sr.queueBlobChainID(blobChainID)
if err := sr.commitMetadata(); err != nil {
return err
}
return nil
}
func isTypeWindows(sr *immutableRef) bool {
if sr.GetLayerType() == "windows" {
return true
}
switch sr.kind() {
case Merge:
for _, p := range sr.mergeParents {
if isTypeWindows(p) {
return true
}
}
case Layer:
return isTypeWindows(sr.layerParent)
}
return false
}
// ensureCompression ensures the specified ref has the blob of the specified compression Type.
func ensureCompression(ctx context.Context, ref *immutableRef, compressionType compression.Type, s session.Group) error {
_, err := g.Do(ctx, fmt.Sprintf("%s-%d", ref.ID(), compressionType), func(ctx context.Context) (interface{}, error) {
desc, err := ref.ociDesc(ctx, ref.descHandlers)
if err != nil {
return nil, err
}
// Resolve converters
layerConvertFunc, err := getConverter(ctx, ref.cm.ContentStore, desc, compressionType)
if err != nil {
return nil, err
} else if layerConvertFunc == nil {
if isLazy, err := ref.isLazy(ctx); err != nil {
return nil, err
} else if isLazy {
// This ref can be used as the specified compressionType. Keep it lazy.
return nil, nil
}
return nil, ref.addCompressionBlob(ctx, desc, compressionType)
}
// First, lookup local content store
if _, err := ref.getCompressionBlob(ctx, compressionType); err == nil {
return nil, nil // found the compression variant. no need to convert.
}
// Convert layer compression type
if err := (lazyRefProvider{
ref: ref,
desc: desc,
dh: ref.descHandlers[desc.Digest],
session: s,
}).Unlazy(ctx); err != nil {
return nil, err
}
newDesc, err := layerConvertFunc(ctx, ref.cm.ContentStore, desc)
if err != nil {
return nil, errors.Wrapf(err, "failed to convert")
}
// Start to track converted layer
if err := ref.addCompressionBlob(ctx, *newDesc, compressionType); err != nil {
return nil, errors.Wrapf(err, "failed to add compression blob")
}
return nil, nil
})
return err
}
func zstdWriter(dest io.Writer, requiredMediaType string) (io.WriteCloser, error) {
return zstd.NewWriter(dest)
}
| [
"\"BUILDKIT_DEBUG_FORCE_OVERLAY_DIFF\""
]
| []
| [
"BUILDKIT_DEBUG_FORCE_OVERLAY_DIFF"
]
| [] | ["BUILDKIT_DEBUG_FORCE_OVERLAY_DIFF"] | go | 1 | 0 | |
calliope/backend/pyomo/constraints/__init__.py | # flake8: noqa
from calliope.backend.pyomo.constraints.capacity import *
from calliope.backend.pyomo.constraints.conversion import *
from calliope.backend.pyomo.constraints.conversion_plus import *
from calliope.backend.pyomo.constraints.costs import *
from calliope.backend.pyomo.constraints.dispatch import *
from calliope.backend.pyomo.constraints.energy_balance import *
from calliope.backend.pyomo.constraints.export import *
from calliope.backend.pyomo.constraints.milp import *
from calliope.backend.pyomo.constraints.network import *
from calliope.backend.pyomo.constraints.policy import *
| []
| []
| []
| [] | [] | python | null | null | null |
backend/testsettingpage_dev_23375/wsgi.py | """
WSGI config for testsettingpage_dev_23375 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'testsettingpage_dev_23375.settings')
application = get_wsgi_application()
| []
| []
| []
| [] | [] | python | 0 | 0 | |
snappass/main.py | import os
import re
import sys
import uuid
import redis
from cryptography.fernet import Fernet
from flask import abort, Flask, render_template, request
from redis.exceptions import ConnectionError
from werkzeug.urls import url_quote_plus
from werkzeug.urls import url_unquote_plus
NO_SSL = os.environ.get('NO_SSL', False)
TOKEN_SEPARATOR = '~'
# Initialize Flask Application
app = Flask(__name__)
if os.environ.get('DEBUG'):
app.debug = True
app.secret_key = os.environ.get('SECRET_KEY', 'Secret Key')
app.config.update(
dict(STATIC_URL=os.environ.get('STATIC_URL', 'static')))
# Initialize Redis
if os.environ.get('MOCK_REDIS'):
from mockredis import mock_strict_redis_client
redis_client = mock_strict_redis_client()
elif os.environ.get('REDIS_URL'):
redis_client = redis.StrictRedis.from_url(os.environ.get('REDIS_URL'))
else:
redis_host = os.environ.get('REDIS_HOST', 'localhost')
redis_port = os.environ.get('REDIS_PORT', 6379)
redis_db = os.environ.get('SNAPPASS_REDIS_DB', 0)
redis_client = redis.StrictRedis(
host=redis_host, port=redis_port, db=redis_db)
REDIS_PREFIX = os.environ.get('REDIS_PREFIX', 'snappass')
TIME_CONVERSION = {'week': 604800, 'day': 86400, 'hour': 3600}
def check_redis_alive(fn):
def inner(*args, **kwargs):
try:
if fn.__name__ == 'main':
redis_client.ping()
return fn(*args, **kwargs)
except ConnectionError as e:
print('Failed to connect to redis! %s' % e.message)
if fn.__name__ == 'main':
sys.exit(0)
else:
return abort(500)
return inner
def encrypt(password):
"""
Take a password string, encrypt it with Fernet symmetric encryption,
and return the result (bytes), with the decryption key (bytes)
"""
encryption_key = Fernet.generate_key()
fernet = Fernet(encryption_key)
encrypted_password = fernet.encrypt(password.encode('utf-8'))
return encrypted_password, encryption_key
def decrypt(password, decryption_key):
"""
Decrypt a password (bytes) using the provided key (bytes),
and return the plain-text password (bytes).
"""
fernet = Fernet(decryption_key)
return fernet.decrypt(password)
def parse_token(token):
token_fragments = token.split(TOKEN_SEPARATOR, 1) # Split once, not more.
storage_key = token_fragments[0]
try:
decryption_key = token_fragments[1].encode('utf-8')
except IndexError:
decryption_key = None
return storage_key, decryption_key
@check_redis_alive
def set_password(password, ttl):
"""
Encrypt and store the password for the specified lifetime.
Returns a token comprised of the key where the encrypted password
is stored, and the decryption key.
"""
storage_key = REDIS_PREFIX + uuid.uuid4().hex
encrypted_password, encryption_key = encrypt(password)
redis_client.setex(storage_key, ttl, encrypted_password)
encryption_key = encryption_key.decode('utf-8')
token = TOKEN_SEPARATOR.join([storage_key, encryption_key])
return token
@check_redis_alive
def get_password(token):
"""
From a given token, return the initial password.
If the token is tilde-separated, we decrypt the password fetched from Redis.
If not, the password is simply returned as is.
"""
storage_key, decryption_key = parse_token(token)
password = redis_client.get(storage_key)
redis_client.delete(storage_key)
if password is not None:
if decryption_key is not None:
password = decrypt(password, decryption_key)
return password.decode('utf-8')
@check_redis_alive
def password_exists(token):
storage_key, decryption_key = parse_token(token)
return redis_client.exists(storage_key)
def empty(value):
if not value:
return True
def clean_input():
"""
Make sure we're not getting bad data from the front end,
format data to be machine readable
"""
if empty(request.form.get('password', '')):
abort(400)
if empty(request.form.get('ttl', '')):
abort(400)
time_period = request.form['ttl'].lower()
if time_period not in TIME_CONVERSION:
abort(400)
return TIME_CONVERSION[time_period], request.form['password']
@app.route('/', methods=['GET'])
def index():
return render_template('set_password.html')
@app.route('/', methods=['POST'])
def handle_password():
ttl, password = clean_input()
token = set_password(password, ttl)
if NO_SSL:
base_url = request.url_root
else:
base_url = request.url_root.replace("http://", "https://")
link = base_url + url_quote_plus(token)
return render_template('confirm.html', password_link=link)
@app.route('/<password_key>', methods=['GET'])
def preview_password(password_key):
password_key = url_unquote_plus(password_key)
if not password_exists(password_key):
abort(404)
return render_template('preview.html')
@app.route('/<password_key>', methods=['POST'])
def show_password(password_key):
password_key = url_unquote_plus(password_key)
password = get_password(password_key)
if not password:
abort(404)
return render_template('password.html', password=password)
@check_redis_alive
def main():
app.run(host='0.0.0.0')
if __name__ == '__main__':
main()
| []
| []
| [
"REDIS_PORT",
"SNAPPASS_REDIS_DB",
"STATIC_URL",
"NO_SSL",
"REDIS_HOST",
"SECRET_KEY",
"DEBUG",
"REDIS_URL",
"MOCK_REDIS",
"REDIS_PREFIX"
]
| [] | ["REDIS_PORT", "SNAPPASS_REDIS_DB", "STATIC_URL", "NO_SSL", "REDIS_HOST", "SECRET_KEY", "DEBUG", "REDIS_URL", "MOCK_REDIS", "REDIS_PREFIX"] | python | 10 | 0 | |
s3/e2e_test.go | package s3_test
import (
"context"
"os"
"testing"
"github.com/minio/minio-go/v7"
"github.com/minio/minio-go/v7/pkg/credentials"
"github.com/gotd/contrib/internal/tests"
"github.com/gotd/contrib/s3"
)
func TestE2E(t *testing.T) {
addr := os.Getenv("S3_ADDR")
if addr == "" {
t.Skip("Set S3_ADDR to run E2E test")
}
db, err := minio.New(addr, &minio.Options{
Creds: credentials.NewStaticV4(
os.Getenv("MINIO_ACCESS_KEY"),
os.Getenv("MINIO_SECRET_KEY"),
"",
),
})
if err != nil {
t.Fatal(err)
}
tests.RetryUntilAvailable(t, "s3", addr, func(ctx context.Context) error {
_, err := db.ListBuckets(ctx)
return err
})
tests.TestSessionStorage(t, s3.NewSessionStorage(db, "testsession", "session"))
}
| [
"\"S3_ADDR\"",
"\"MINIO_ACCESS_KEY\"",
"\"MINIO_SECRET_KEY\""
]
| []
| [
"MINIO_SECRET_KEY",
"MINIO_ACCESS_KEY",
"S3_ADDR"
]
| [] | ["MINIO_SECRET_KEY", "MINIO_ACCESS_KEY", "S3_ADDR"] | go | 3 | 0 | |
names_native.go | // +build !js
package main
import (
"os"
"strings"
)
// Languages returns the user's preferred languages in BCP 47 format in
// priority order.
func Languages() (langs []string) {
add := func(lang string) {
for _, l := range strings.Split(lang, ":") {
// Remove encoding (we only support UTF-8).
if i := strings.IndexRune(l, '.'); i != -1 {
l = l[:i]
}
// Skip empty locales or the "C" locale.
if l == "" || l == "C" {
continue
}
// Add the locale.
langs = append(langs, strings.Replace(l, "_", "-", -1))
// Add the base language if it is a dialect.
if i := strings.IndexRune(l, '_'); i != -1 {
langs = append(langs, l[:i])
}
}
}
add(os.Getenv("LANGUAGE"))
add(os.Getenv("LC_MESSAGES"))
add(os.Getenv("LC_ALL"))
add(os.Getenv("LANG"))
return
}
| [
"\"LANGUAGE\"",
"\"LC_MESSAGES\"",
"\"LC_ALL\"",
"\"LANG\""
]
| []
| [
"LC_ALL",
"LC_MESSAGES",
"LANG",
"LANGUAGE"
]
| [] | ["LC_ALL", "LC_MESSAGES", "LANG", "LANGUAGE"] | go | 4 | 0 | |
tests/__init__.py | """
A Fake or multiple fakes for each stripe object.
Originally collected using API VERSION 2015-07-28.
Updated to API VERSION 2016-03-07 with bogus fields.
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import json
import logging
import os
import sys
from copy import deepcopy
from datetime import datetime
from pathlib import Path
from typing import Any
from django.core.exceptions import ObjectDoesNotExist
from django.db import models
from django.utils import dateformat, timezone
from djstripe.webhooks import TEST_EVENT_ID
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "tests.settings")
logger = logging.getLogger(__name__)
FUTURE_DATE = datetime(2100, 4, 30, tzinfo=timezone.utc)
FIXTURE_DIR_PATH = Path(__file__).parent.joinpath("fixtures")
# Flags for various bugs with mock autospec
# These can be removed once we drop support for the affected python versions
# Don't try and use autospec=True on staticmethods on <py3.7
# see https://bugs.python.org/issue23078
IS_STATICMETHOD_AUTOSPEC_SUPPORTED = sys.version_info >= (3, 7, 4)
class AssertStripeFksMixin:
def _get_field_str(self, field) -> str:
if isinstance(field, models.OneToOneRel):
if field.parent_link:
return ""
else:
reverse_id_name = str(field.remote_field.foreign_related_fields[0])
return (
reverse_id_name.replace("djstripe_id", field.name)
+ " (related name)"
)
elif isinstance(field, models.ForeignKey):
return str(field)
else:
return ""
def assert_fks(self, obj, expected_blank_fks, processed_stripe_ids=None):
"""
Recursively walk through fks on obj, asserting they're not-none
:param obj:
:param expected_blank_fks: fields that are expected to be None
:param processed_stripe_ids: set of objects ids already processed
:return:
"""
if processed_stripe_ids is None:
processed_stripe_ids = set()
processed_stripe_ids.add(obj.id)
for field in obj._meta.get_fields():
field_str = self._get_field_str(field)
if not field_str or field_str.endswith(".djstripe_owner_account"):
continue
try:
field_value = getattr(obj, field.name)
except ObjectDoesNotExist:
field_value = None
if field_str in expected_blank_fks:
self.assertIsNone(field_value, field_str)
else:
self.assertIsNotNone(field_value, field_str)
if field_value.id not in processed_stripe_ids:
# recurse into the object if it's not already been checked
self.assert_fks(
field_value, expected_blank_fks, processed_stripe_ids
)
logger.warning("checked {}".format(field_str))
def load_fixture(filename):
with FIXTURE_DIR_PATH.joinpath(filename).open("r") as f:
return json.load(f)
def datetime_to_unix(datetime_):
return int(dateformat.format(datetime_, "U"))
class StripeItem(dict):
"""Flexible class built to mock any generic Stripe object.
Implements object access + deletion methods to match the behavior
of Stripe's library, which allows both object + dictionary access.
Has a delete method since (most) Stripe objects can be deleted.
"""
def __getattr__(self, name):
"""Give StripeItem normal object access to match Stripe behavior."""
if name in self:
return self[name]
else:
raise AttributeError("No such attribute: " + name)
def __setattr__(self, name, value):
self[name] = value
def __delattr__(self, name):
if name in self:
del self[name]
else:
raise AttributeError("No such attribute: " + name)
def delete(self) -> bool:
"""Superficial mock that adds a deleted attribute."""
self.deleted = True
return self.deleted
@classmethod
def class_url(cls):
return "/v1/test-items/"
def instance_url(self):
"""Superficial mock that emulates instance_url."""
id = self.get("id")
base = self.class_url()
return "%s/%s" % (base, id)
def request(self, method, url, params) -> dict:
"""Superficial mock that emulates request method."""
assert method == "post"
for key, value in params.items():
self.__setattr__(key, value)
return self
class StripeList(dict):
"""Mock a generic Stripe Iterable.
It has the relevant attributes of a stripe iterable (has_more, data).
This mock is important so we can use stripe's `list` method in our testing.
StripeList.list() will return the StripeList.
Additionally, iterating over instances of MockStripeIterable will iterate over
the data attribute, just like Stripe iterables.
Attributes:
has_more: mock has_more flag. Default False.
**kwargs: all of the fields of the stripe object, generally as a dictionary.
"""
object = "list"
url = "/v1/fakes"
has_more = False
def __getattr__(self, name):
"""Give StripeItem normal object access to match Stripe behavior."""
if name in self:
return self[name]
else:
raise AttributeError("No such attribute: " + name)
def __setattr__(self, name, value):
self[name] = value
def __delattr__(self, name):
if name in self:
del self[name]
else:
raise AttributeError("No such attribute: " + name)
def __iter__(self) -> Any:
"""Make StripeList an iterable, to match the Stripe iterable behavior."""
self.iter_copy = self.data.copy()
return self
def __next__(self) -> StripeItem:
"""Define iteration for StripeList."""
if len(self.iter_copy) > 0:
return self.iter_copy.pop(0)
else:
raise StopIteration()
def list(self, **kwargs: Any) -> "StripeList":
"""Add a list method to the StripeList which returns itself.
list() accepts arbitrary kwargs, be careful is you expect the
argument-accepting functionality of Stripe's list() method.
"""
return self
def auto_paging_iter(self) -> "StripeList":
"""Add an auto_paging_iter method to the StripeList which returns itself.
The StripeList is an iterable, so this mimics the real behavior.
"""
return self
@property
def total_count(self):
return len(self.data)
class ExternalAccounts(object):
def __init__(self, external_account_fakes):
self.external_account_fakes = external_account_fakes
def create(self, source, api_key=None):
for fake_external_account in self.external_account_fakes:
if fake_external_account["id"] == source:
return fake_external_account
def retrieve(self, id, expand=None): # noqa
for fake_external_account in self.external_account_fakes:
if fake_external_account["id"] == id:
return fake_external_account
def list(self, **kwargs):
return StripeList(data=self.external_account_fakes)
class AccountDict(dict):
def save(self, idempotency_key=None):
return self
@property
def external_accounts(self):
return ExternalAccounts(
external_account_fakes=self["external_accounts"]["data"]
)
def create(self):
from djstripe.models import Account
return Account.sync_from_stripe_data(self)
FAKE_STANDARD_ACCOUNT = AccountDict(
load_fixture("account_standard_acct_1Fg9jUA3kq9o1aTc.json")
)
# Stripe Platform Account to which the STRIPE_SECRET_KEY belongs to
FAKE_PLATFORM_ACCOUNT = deepcopy(FAKE_STANDARD_ACCOUNT)
FAKE_CUSTOM_ACCOUNT = AccountDict(
load_fixture("account_custom_acct_1IuHosQveW0ONQsd.json")
)
FAKE_EXPRESS_ACCOUNT = AccountDict(
load_fixture("account_express_acct_1IuHosQveW0ONQsd.json")
)
FAKE_BALANCE_TRANSACTION = load_fixture(
"balance_transaction_txn_fake_ch_fakefakefakefakefake0001.json"
)
FAKE_BALANCE_TRANSACTION_II = {
"id": "txn_16g5h62eZvKYlo2CQ2AHA89s",
"object": "balance_transaction",
"amount": 65400,
"available_on": 1441670400,
"created": 1441079064,
"currency": "usd",
"description": None,
"fee": 1927,
"fee_details": [
{
"amount": 1927,
"currency": "usd",
"type": "stripe_fee",
"description": "Stripe processing fees",
"application": None,
}
],
"net": 63473,
"source": "ch_16g5h62eZvKYlo2CMRXkSqa0",
"sourced_transfers": {
"object": "list",
"total_count": 0,
"has_more": False,
"url": "/v1/transfers?source_transaction=ch_16g5h62eZvKYlo2CMRXkSqa0",
"data": [],
},
"status": "pending",
"type": "charge",
}
FAKE_BALANCE_TRANSACTION_III = {
"id": "txn_16g5h62eZvKYlo2CQ2AHA89s",
"object": "balance_transaction",
"amount": 2000,
"available_on": 1441670400,
"created": 1441079064,
"currency": "usd",
"description": None,
"fee": 1927,
"fee_details": [
{
"amount": 1927,
"currency": "usd",
"type": "stripe_fee",
"description": "Stripe processing fees",
"application": None,
}
],
"net": 73,
"source": "ch_16g5h62eZvKYlo2CMRXkSqa0",
"sourced_transfers": {
"object": "list",
"total_count": 0,
"has_more": False,
"url": "/v1/transfers?source_transaction=ch_16g5h62eZvKYlo2CMRXkSqa0",
"data": [],
},
"status": "pending",
"type": "charge",
}
FAKE_BALANCE_TRANSACTION_IV = {
"id": "txn_16g5h62eZvKYlo2CQ2AHA89s",
"object": "balance_transaction",
"amount": 19010,
"available_on": 1441670400,
"created": 1441079064,
"currency": "usd",
"description": None,
"fee": 1927,
"fee_details": [
{
"amount": 1927,
"currency": "usd",
"type": "stripe_fee",
"description": "Stripe processing fees",
"application": None,
}
],
"net": 17083,
"source": "ch_16g5h62eZvKYlo2CMRXkSqa0",
"sourced_transfers": {
"object": "list",
"total_count": 0,
"has_more": False,
"url": "/v1/transfers?source_transaction=ch_16g5h62eZvKYlo2CMRXkSqa0",
"data": [],
},
"status": "pending",
"type": "charge",
}
class LegacySourceDict(dict):
def delete(self):
return self
class BankAccountDict(LegacySourceDict):
pass
FAKE_BANK_ACCOUNT = {
"id": "ba_16hTzo2eZvKYlo2CeSjfb0tS",
"object": "bank_account",
"account_holder_name": None,
"account_holder_type": None,
"bank_name": "STRIPE TEST BANK",
"country": "US",
"currency": "usd",
"fingerprint": "1JWtPxqbdX5Gamtc",
"last4": "6789",
"routing_number": "110000000",
"status": "new",
}
FAKE_BANK_ACCOUNT_II = {
"id": "ba_17O4Tz2eZvKYlo2CMYsxroV5",
"object": "bank_account",
"account_holder_name": None,
"account_holder_type": None,
"bank_name": None,
"country": "US",
"currency": "usd",
"fingerprint": "1JWtPxqbdX5Gamtc",
"last4": "6789",
"routing_number": "110000000",
"status": "new",
}
# Stripe Customer Bank Account Payment Source
FAKE_BANK_ACCOUNT_SOURCE = BankAccountDict(
load_fixture("bank_account_ba_fakefakefakefakefake0003.json")
)
FAKE_BANK_ACCOUNT_IV = BankAccountDict(
load_fixture("bank_account_ba_fakefakefakefakefake0004.json")
)
class CardDict(LegacySourceDict):
pass
FAKE_CARD = CardDict(load_fixture("card_card_fakefakefakefakefake0001.json"))
FAKE_CARD_II = CardDict(load_fixture("card_card_fakefakefakefakefake0002.json"))
FAKE_CARD_III = CardDict(load_fixture("card_card_fakefakefakefakefake0003.json"))
# Stripe Custom Connected Account Card Payout Source
FAKE_CARD_IV = CardDict(load_fixture("card_card_fakefakefakefakefake0004.json"))
class SourceDict(dict):
def detach(self):
self.pop("customer")
self.update({"status": "consumed"})
return self
# Attached, chargeable source
FAKE_SOURCE = SourceDict(load_fixture("source_src_fakefakefakefakefake0001.json"))
# Detached, consumed source
FAKE_SOURCE_II = SourceDict(
{
"id": "src_1DuuGjkE6hxDGaasasjdlajl",
"object": "source",
"amount": None,
"card": {
"address_line1_check": None,
"address_zip_check": "pass",
"brand": "Visa",
"country": "US",
"cvc_check": "pass",
"dynamic_last4": None,
"exp_month": 10,
"exp_year": 2029,
"fingerprint": "TmOrYzPdAoO6YFNB",
"funding": "credit",
"last4": "4242",
"name": None,
"three_d_secure": "optional",
"tokenization_method": None,
},
"client_secret": "src_client_secret_ENg5dyB1KTXCAEJGJQWEf67X",
"created": 1548046215,
"currency": None,
"flow": "none",
"livemode": False,
"metadata": {"djstripe_test_fake_id": "src_fakefakefakefakefake0002"},
"owner": {
"address": {
"city": None,
"country": None,
"line1": None,
"line2": None,
"postal_code": "90210",
"state": None,
},
"email": None,
"name": None,
"phone": None,
"verified_address": None,
"verified_email": None,
"verified_name": None,
"verified_phone": None,
},
"statement_descriptor": None,
"status": "consumed",
"type": "card",
"usage": "reusable",
}
)
FAKE_PAYMENT_INTENT_I = load_fixture("payment_intent_pi_fakefakefakefakefake0001.json")
FAKE_PAYMENT_INTENT_II = deepcopy(FAKE_PAYMENT_INTENT_I)
FAKE_PAYMENT_INTENT_II["customer"] = "cus_4UbFSo9tl62jqj" # FAKE_CUSTOMER_II
FAKE_PAYMENT_INTENT_DESTINATION_CHARGE = load_fixture(
"payment_intent_pi_destination_charge.json"
)
class PaymentMethodDict(dict):
def detach(self):
self.pop("customer")
return self
FAKE_PAYMENT_METHOD_I = PaymentMethodDict(
load_fixture("payment_method_pm_fakefakefakefake0001.json")
)
FAKE_PAYMENT_METHOD_II = deepcopy(FAKE_PAYMENT_METHOD_I)
FAKE_PAYMENT_METHOD_II["customer"] = "cus_4UbFSo9tl62jqj" # FAKE_CUSTOMER_II
# FAKE_CARD, but accessed as a PaymentMethod
FAKE_CARD_AS_PAYMENT_METHOD = PaymentMethodDict(
load_fixture("payment_method_card_fakefakefakefakefake0001.json")
)
# TODO - add to regenerate_test_fixtures and replace this with a JSON fixture
FAKE_SETUP_INTENT_I = {
"id": "seti_fakefakefakefake0001",
"object": "setup_intent",
"cancellation_reason": None,
"payment_method_types": ["card"],
"status": "requires_payment_method",
"usage": "off_session",
"payment_method": None,
"on_behalf_of": None,
"customer": None,
}
FAKE_SETUP_INTENT_II = {
"application": None,
"cancellation_reason": None,
"client_secret": "seti_1J0g0WJSZQVUcJYgWE2XSi1K_secret_Jdxw2mOaIEHBdE6eTsfJ2IfmamgNJaF",
"created": 1623301244,
"customer": "cus_6lsBvm5rJ0zyHc",
"description": None,
"id": "seti_1J0g0WJSZQVUcJYgWE2XSi1K",
"last_setup_error": None,
"latest_attempt": "setatt_1J0g0WJSZQVUcJYgsrFgwxVh",
"livemode": False,
"mandate": None,
"metadata": {},
"next_action": None,
"object": "setup_intent",
"on_behalf_of": None,
"payment_method": "pm_fakefakefakefake0001",
"payment_method_options": {"card": {"request_three_d_secure": "automatic"}},
"payment_method_types": ["card"],
"single_use_mandate": None,
"status": "succeeded",
"usage": "off_session",
}
FAKE_SETUP_INTENT_DESTINATION_CHARGE = load_fixture(
"setup_intent_pi_destination_charge.json"
)
# TODO - add to regenerate_test_fixtures and replace this with a JSON fixture
# (will need to use a different payment_intent fixture)
FAKE_SESSION_I = {
"id": "cs_test_OAgNmy75Td25OeREvKUs8XZ7SjMPO9qAplqHO1sBaEjOg9fYbaeMh2nA",
"object": "checkout.session",
"billing_address_collection": None,
"cancel_url": "https://example.com/cancel",
"client_reference_id": None,
"customer": "cus_6lsBvm5rJ0zyHc",
"customer_email": None,
"display_items": [
{
"amount": 1500,
"currency": "usd",
"custom": {
"description": "Comfortable cotton t-shirt",
"images": None,
"name": "T-shirt",
},
"quantity": 2,
"type": "custom",
}
],
"livemode": False,
"locale": None,
"mode": None,
"payment_intent": FAKE_PAYMENT_INTENT_I["id"],
"payment_method_types": ["card"],
"setup_intent": None,
"submit_type": None,
"subscription": None,
"success_url": "https://example.com/success",
"metadata": {},
}
class ChargeDict(StripeItem):
def __init__(self, *args, **kwargs):
"""Match Stripe's behavior: return a stripe iterable on `charge.refunds`."""
super().__init__(*args, **kwargs)
self.refunds = StripeList(self.refunds)
def refund(self, amount=None, reason=None):
self.update({"refunded": True, "amount_refunded": amount})
return self
def capture(self):
self.update({"captured": True})
return self
FAKE_CHARGE = ChargeDict(load_fixture("charge_ch_fakefakefakefakefake0001.json"))
FAKE_CHARGE_II = ChargeDict(
{
"id": "ch_16ag432eZvKYlo2CGDe6lvVs",
"object": "charge",
"amount": 3000,
"amount_captured": 0,
"amount_refunded": 0,
"application_fee": None,
"application_fee_amount": None,
"balance_transaction": FAKE_BALANCE_TRANSACTION["id"],
"billing_details": {
"address": {
"city": None,
"country": "US",
"line1": None,
"line2": None,
"postal_code": "92082",
"state": None,
},
"email": "[email protected]",
"name": "John Foo",
"phone": None,
},
"calculated_statement_descriptor": "Stripe",
"captured": False,
"created": 1439788903,
"currency": "usd",
"customer": "cus_4UbFSo9tl62jqj",
"description": None,
"destination": None,
"dispute": None,
"disputed": False,
"failure_code": "expired_card",
"failure_message": "Your card has expired.",
"fraud_details": {},
"invoice": "in_16af5A2eZvKYlo2CJjANLL81",
"livemode": False,
"metadata": {},
"on_behalf_of": None,
"order": None,
"outcome": {
"network_status": "declined_by_network",
"reason": "expired_card",
"risk_level": "normal",
"risk_score": 1,
"seller_message": "The bank returned the decline code `expired_card`.",
"type": "issuer_declined",
},
"paid": False,
"payment_intent": FAKE_PAYMENT_INTENT_II["id"],
"payment_method": FAKE_CARD_AS_PAYMENT_METHOD["id"],
"payment_method_details": {
"card": {
"brand": "visa",
"checks": {
"address_line1_check": None,
"address_postal_code_check": None,
"cvc_check": None,
},
"country": "US",
"exp_month": 6,
"exp_year": 2021,
"fingerprint": "88PuXw9tEmvYe69o",
"funding": "credit",
"installments": None,
"last4": "4242",
"network": "visa",
"three_d_secure": None,
"wallet": None,
},
"type": "card",
},
"receipt_email": None,
"receipt_number": None,
"receipt_url": None,
"refunded": False,
"refunds": {
"object": "list",
"total_count": 0,
"has_more": False,
"url": "/v1/charges/ch_16ag432eZvKYlo2CGDe6lvVs/refunds",
"data": [],
},
"review": None,
"shipping": None,
"source": deepcopy(FAKE_CARD_II),
"source_transfer": None,
"statement_descriptor": None,
"statement_descriptor_suffix": None,
"status": "failed",
"transfer_data": None,
"transfer_group": None,
}
)
FAKE_CHARGE_REFUNDED = deepcopy(FAKE_CHARGE)
FAKE_CHARGE_REFUNDED = FAKE_CHARGE_REFUNDED.refund(
amount=FAKE_CHARGE_REFUNDED["amount"]
)
FAKE_REFUND = {
"id": "re_1E0he8KatMEEd8456454S01Vc",
"object": "refund",
"amount": FAKE_CHARGE_REFUNDED["amount_refunded"],
"balance_transaction": "txn_1E0he8KaGRDEd998TDswMZuN",
"charge": FAKE_CHARGE_REFUNDED["id"],
"created": 1549425864,
"currency": "usd",
"metadata": {},
"reason": None,
"receipt_number": None,
"source_transfer_reversal": None,
"status": "succeeded",
"transfer_reversal": None,
}
# Balance transaction associated with the refund
FAKE_BALANCE_TRANSACTION_REFUND = {
"id": "txn_1E0he8KaGRDEd998TDswMZuN",
"amount": -1 * FAKE_CHARGE_REFUNDED["amount_refunded"],
"available_on": 1549425864,
"created": 1549425864,
"currency": "usd",
"description": "REFUND FOR CHARGE (Payment for invoice G432DF1C-0028)",
"exchange_rate": None,
"fee": 0,
"fee_details": [],
"net": -1 * FAKE_CHARGE_REFUNDED["amount_refunded"],
"object": "balance_transaction",
"source": FAKE_REFUND["id"],
"status": "available",
"type": "refund",
}
FAKE_CHARGE_REFUNDED["refunds"].update(
{"total_count": 1, "data": [deepcopy(FAKE_REFUND)]}
)
FAKE_COUPON = {
"id": "fake-coupon-1",
"object": "coupon",
"amount_off": None,
"created": 1490157071,
"currency": None,
"duration": "once",
"duration_in_months": None,
"livemode": False,
"max_redemptions": None,
"metadata": {},
"percent_off": 1,
"redeem_by": None,
"times_redeemed": 0,
"valid": True,
}
FAKE_DISPUTE_CHARGE = load_fixture("dispute_ch_fakefakefakefake01.json")
FAKE_DISPUTE_BALANCE_TRANSACTION = load_fixture("dispute_txn_fakefakefakefake01.json")
# case when a dispute gets closed and the funds get reinstated (full)
FAKE_DISPUTE_BALANCE_TRANSACTION_REFUND_FULL = deepcopy(
FAKE_DISPUTE_BALANCE_TRANSACTION
)
FAKE_DISPUTE_BALANCE_TRANSACTION_REFUND_FULL["amount"] = (
-1 * FAKE_DISPUTE_BALANCE_TRANSACTION["amount"]
)
FAKE_DISPUTE_BALANCE_TRANSACTION_REFUND_FULL["fee"] = (
-1 * FAKE_DISPUTE_BALANCE_TRANSACTION["fee"]
)
FAKE_DISPUTE_BALANCE_TRANSACTION_REFUND_FULL["net"] = (
-1 * FAKE_DISPUTE_BALANCE_TRANSACTION["net"]
)
FAKE_DISPUTE_BALANCE_TRANSACTION_REFUND_FULL["fee_details"][0]["amount"] = (
-1 * FAKE_DISPUTE_BALANCE_TRANSACTION["fee_details"][0]["amount"]
)
# case when a dispute gets closed and the funds get reinstated (partial)
FAKE_DISPUTE_BALANCE_TRANSACTION_REFUND_PARTIAL = deepcopy(
FAKE_DISPUTE_BALANCE_TRANSACTION
)
FAKE_DISPUTE_BALANCE_TRANSACTION_REFUND_PARTIAL["amount"] = (
-0.9 * FAKE_DISPUTE_BALANCE_TRANSACTION["amount"]
)
FAKE_DISPUTE_BALANCE_TRANSACTION_REFUND_PARTIAL["fee"] = (
-0.9 * FAKE_DISPUTE_BALANCE_TRANSACTION["fee"]
)
FAKE_DISPUTE_BALANCE_TRANSACTION_REFUND_PARTIAL["net"] = (
-0.9 * FAKE_DISPUTE_BALANCE_TRANSACTION["net"]
)
FAKE_DISPUTE_BALANCE_TRANSACTION_REFUND_PARTIAL["fee_details"][0]["amount"] = (
-0.9 * FAKE_DISPUTE_BALANCE_TRANSACTION["fee_details"][0]["amount"]
)
FAKE_DISPUTE_PAYMENT_INTENT = load_fixture("dispute_pi_fakefakefakefake01.json")
FAKE_DISPUTE_PAYMENT_METHOD = load_fixture("dispute_pm_fakefakefakefake01.json")
# case when dispute gets created
FAKE_DISPUTE_I = load_fixture("dispute_dp_fakefakefakefake01.json")
# case when funds get withdrawn from platform account due to dispute
FAKE_DISPUTE_II = load_fixture("dispute_dp_fakefakefakefake02.json")
# case when dispute gets updated
FAKE_DISPUTE_III = deepcopy(FAKE_DISPUTE_II)
FAKE_DISPUTE_III["evidence"]["receipt"] = "file_4hshrsKatMEEd6736724HYAXyj"
# case when dispute gets closed
FAKE_DISPUTE_IV = deepcopy(FAKE_DISPUTE_II)
FAKE_DISPUTE_IV["evidence"]["receipt"] = "file_4hshrsKatMEEd6736724HYAXyj"
FAKE_DISPUTE_IV["status"] = "won"
# case when dispute funds get reinstated (partial)
FAKE_DISPUTE_V_PARTIAL = load_fixture("dispute_dp_funds_reinstated_full.json")
FAKE_DISPUTE_V_PARTIAL["balance_transactions"][
1
] = FAKE_DISPUTE_BALANCE_TRANSACTION_REFUND_PARTIAL
# case when dispute funds get reinstated (full)
FAKE_DISPUTE_V_FULL = load_fixture("dispute_dp_funds_reinstated_full.json")
FAKE_DISPUTE_V_FULL["balance_transactions"][
1
] = FAKE_DISPUTE_BALANCE_TRANSACTION_REFUND_FULL
FAKE_PRODUCT = load_fixture("product_prod_fake1.json")
FAKE_PLAN = load_fixture("plan_gold21323.json")
FAKE_PLAN_II = load_fixture("plan_silver41294.json")
for plan in (FAKE_PLAN, FAKE_PLAN_II):
# sanity check
assert plan["product"] == FAKE_PRODUCT["id"]
FAKE_TIER_PLAN = {
"id": "tier21323",
"object": "plan",
"active": True,
"amount": None,
"created": 1386247539,
"currency": "usd",
"interval": "month",
"interval_count": 1,
"livemode": False,
"metadata": {},
"nickname": "New plan name",
"product": FAKE_PRODUCT["id"],
"trial_period_days": None,
"usage_type": "licensed",
"tiers_mode": "graduated",
"tiers": [
{"flat_amount": 4900, "unit_amount": 1000, "up_to": 5},
{"flat_amount": None, "unit_amount": 900, "up_to": None},
],
}
FAKE_PLAN_METERED = {
"id": "plan_fakemetered",
"billing_scheme": "per_unit",
"object": "plan",
"active": True,
"aggregate_usage": "sum",
"amount": 200,
"collection_method": "per_unit",
"created": 1552632817,
"currency": "usd",
"interval": "month",
"interval_count": 1,
"livemode": False,
"metadata": {},
"nickname": "Sum Metered Plan",
"product": FAKE_PRODUCT["id"],
"tiers": None,
"tiers_mode": None,
"transform_usage": None,
"trial_period_days": None,
"usage_type": "metered",
}
FAKE_PRICE = load_fixture("price_gold21323.json")
FAKE_PRICE_II = load_fixture("price_silver41294.json")
for price in (FAKE_PRICE, FAKE_PRICE_II):
# sanity check
assert price["product"] == FAKE_PRODUCT["id"]
FAKE_PRICE_TIER = {
"active": True,
"billing_scheme": "tiered",
"created": 1386247539,
"currency": "usd",
"id": "price_tier21323",
"livemode": False,
"lookup_key": None,
"metadata": {},
"nickname": "New price name",
"object": "price",
"product": FAKE_PRODUCT["id"],
"recurring": {
"aggregate_usage": None,
"interval": "month",
"interval_count": 1,
"trial_period_days": None,
"usage_type": "licensed",
},
"tiers": [
{
"flat_amount": 4900,
"flat_amount_decimal": "4900",
"unit_amount": 1000,
"unit_amount_decimal": "1000",
"up_to": 5,
},
{
"flat_amount": None,
"flat_amount_decimal": None,
"unit_amount": 900,
"unit_amount_decimal": "900",
"up_to": None,
},
],
"tiers_mode": "graduated",
"transform_quantity": None,
"type": "recurring",
"unit_amount": None,
"unit_amount_decimal": None,
}
FAKE_PRICE_METERED = {
"active": True,
"billing_scheme": "per_unit",
"created": 1552632817,
"currency": "usd",
"id": "price_fakemetered",
"livemode": False,
"lookup_key": None,
"metadata": {},
"nickname": "Sum Metered Price",
"object": "price",
"product": FAKE_PRODUCT["id"],
"recurring": {
"aggregate_usage": "sum",
"interval": "month",
"interval_count": 1,
"trial_period_days": None,
"usage_type": "metered",
},
"tiers_mode": None,
"transform_quantity": None,
"type": "recurring",
"unit_amount": 200,
"unit_amount_decimal": "200",
}
FAKE_PRICE_ONETIME = {
"active": True,
"billing_scheme": "per_unit",
"created": 1552632818,
"currency": "usd",
"id": "price_fakeonetime",
"livemode": False,
"lookup_key": None,
"metadata": {},
"nickname": "One-Time Price",
"object": "price",
"product": FAKE_PRODUCT["id"],
"recurring": None,
"tiers_mode": None,
"transform_quantity": None,
"type": "one_time",
"unit_amount": 2000,
"unit_amount_decimal": "2000",
}
class SubscriptionDict(StripeItem):
def __init__(self, *args, **kwargs):
"""Match Stripe's behavior: return a stripe iterable on `subscription.items`."""
super().__init__(*args, **kwargs)
self["items"] = StripeList(self["items"])
def __setattr__(self, name, value):
if type(value) == datetime:
value = datetime_to_unix(value)
# Special case for price and plan
if name == "price":
for price in [
FAKE_PRICE,
FAKE_PRICE_II,
FAKE_PRICE_TIER,
FAKE_PRICE_METERED,
]:
if value == price["id"]:
value = price
elif name == "plan":
for plan in [FAKE_PLAN, FAKE_PLAN_II, FAKE_TIER_PLAN, FAKE_PLAN_METERED]:
if value == plan["id"]:
value = plan
self[name] = value
def delete(self, **kwargs):
if "at_period_end" in kwargs:
self["cancel_at_period_end"] = kwargs["at_period_end"]
return self
def save(self, idempotency_key=None):
return self
FAKE_SUBSCRIPTION = SubscriptionDict(
load_fixture("subscription_sub_fakefakefakefakefake0001.json")
)
FAKE_SUBSCRIPTION_NOT_PERIOD_CURRENT = deepcopy(FAKE_SUBSCRIPTION)
FAKE_SUBSCRIPTION_NOT_PERIOD_CURRENT.update(
{"current_period_end": 1441907581, "current_period_start": 1439229181}
)
FAKE_SUBSCRIPTION_CANCELED = deepcopy(FAKE_SUBSCRIPTION)
FAKE_SUBSCRIPTION_CANCELED["status"] = "canceled"
FAKE_SUBSCRIPTION_CANCELED["canceled_at"] = 1440907580
FAKE_SUBSCRIPTION_CANCELED_AT_PERIOD_END = deepcopy(FAKE_SUBSCRIPTION)
FAKE_SUBSCRIPTION_CANCELED_AT_PERIOD_END["canceled_at"] = 1440907580
FAKE_SUBSCRIPTION_CANCELED_AT_PERIOD_END["cancel_at_period_end"] = True
FAKE_SUBSCRIPTION_II = SubscriptionDict(
load_fixture("subscription_sub_fakefakefakefakefake0002.json")
)
FAKE_SUBSCRIPTION_III = SubscriptionDict(
load_fixture("subscription_sub_fakefakefakefakefake0003.json")
)
FAKE_SUBSCRIPTION_MULTI_PLAN = SubscriptionDict(
load_fixture("subscription_sub_fakefakefakefakefake0004.json")
)
FAKE_SUBSCRIPTION_METERED = SubscriptionDict(
{
"id": "sub_1rn1dp7WgjMtx9",
"object": "subscription",
"application_fee_percent": None,
"collection_method": "charge_automatically",
"cancel_at_period_end": False,
"canceled_at": None,
"current_period_end": 1441907581,
"current_period_start": 1439229181,
"customer": "cus_6lsBvm5rJ0zyHc",
"discount": None,
"ended_at": None,
"metadata": {"djstripe_test_fake_id": "sub_fakefakefakefakefake0005"},
"items": {
"data": [
{
"created": 1441907581,
"id": "si_UXYmKmJp6aWTw6",
"metadata": {},
"object": "subscription_item",
"plan": deepcopy(FAKE_PLAN_METERED),
"subscription": "sub_1rn1dp7WgjMtx9",
}
]
},
"plan": deepcopy(FAKE_PLAN_METERED),
"quantity": 1,
"start": 1439229181,
"start_date": 1439229181,
"status": "active",
"tax_percent": None,
"trial_end": None,
"trial_start": None,
}
)
FAKE_SUBSCRIPTION_ITEM_METERED = {
"id": "si_JiphMAMFxZKW8s",
"object": "subscription_item",
"metadata": {},
"billing_thresholds": "",
"created": 1441907581,
"plan": deepcopy(FAKE_PLAN_METERED),
"price": deepcopy(FAKE_PRICE_METERED),
"quantity": 1,
"subscription": FAKE_SUBSCRIPTION_METERED["id"],
"tax_rates": [],
}
FAKE_SUBSCRIPTION_ITEM_MULTI_PLAN = {
"id": "si_JiphMAMFxZKW8s",
"object": "subscription_item",
"metadata": {},
"billing_thresholds": "",
"created": 1441907581,
"plan": deepcopy(FAKE_PLAN),
"price": deepcopy(FAKE_PRICE),
"quantity": 1,
"subscription": FAKE_SUBSCRIPTION_MULTI_PLAN["id"],
"tax_rates": [],
}
FAKE_SUBSCRIPTION_ITEM_TAX_RATES = {
"id": "si_JiphMAMFxZKW8s",
"object": "subscription_item",
"metadata": {},
"billing_thresholds": "",
"created": 1441907581,
"plan": deepcopy(FAKE_PLAN_II),
"price": deepcopy(FAKE_PRICE_II),
"quantity": 1,
"subscription": FAKE_SUBSCRIPTION_II["id"],
"tax_rates": [
{
"id": "txr_fakefakefakefakefake0001",
"object": "tax_rate",
"active": True,
"created": 1593225980,
"description": None,
"display_name": "VAT",
"inclusive": True,
"jurisdiction": "Example1",
"livemode": False,
"metadata": {"djstripe_test_fake_id": "txr_fakefakefakefakefake0001"},
"percentage": 15.0,
}
],
}
FAKE_SUBSCRIPTION_SCHEDULE = {
"id": "sub_sched_1Hm7q6Fz0jfFqjGs2OxOSCzD",
"object": "subscription_schedule",
"canceled_at": None,
"completed_at": None,
"created": 1605056974,
"current_phase": None,
"customer": "cus_4UbFSo9tl62jqj", # FAKE_CUSTOMER_II
"default_settings": {
"billing_cycle_anchor": "automatic",
"billing_thresholds": None,
"collection_method": "charge_automatically",
"default_payment_method": None,
"default_source": None,
"invoice_settings": None,
"transfer_data": None,
},
"end_behavior": "release",
"livemode": False,
"metadata": {},
"phases": [
{
"add_invoice_items": [],
"application_fee_percent": None,
"billing_cycle_anchor": None,
"billing_thresholds": None,
"collection_method": None,
"coupon": None,
"default_payment_method": None,
"default_tax_rates": [],
"end_date": 1637195591,
"invoice_settings": None,
"plans": [
{
"billing_thresholds": None,
"plan": FAKE_PLAN_II["id"],
"price": FAKE_PRICE_II["id"],
"quantity": None,
"tax_rates": [],
}
],
"prorate": True,
"proration_behavior": "create_prorations",
"start_date": 1605659591,
"tax_percent": None,
"transfer_data": None,
"trial_end": None,
}
],
"released_at": None,
"released_subscription": None,
"renewal_interval": None,
"status": "not_started",
"subscription": None,
}
class Sources(object):
def __init__(self, card_fakes):
self.card_fakes = card_fakes
def create(self, source, api_key=None):
for fake_card in self.card_fakes:
if fake_card["id"] == source:
return fake_card
def retrieve(self, id, expand=None): # noqa
for fake_card in self.card_fakes:
if fake_card["id"] == id:
return fake_card
def list(self, **kwargs):
return StripeList(data=self.card_fakes)
def convert_source_dict(data):
if data:
source_type = data["object"]
if source_type == "card":
data = CardDict(data)
elif source_type == "bank_account":
data = BankAccountDict(data)
elif source_type == "source":
data = SourceDict(data)
else:
raise ValueError("Unknown source type: {}".format(source_type))
return data
class CustomerDict(dict):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self["default_source"] = convert_source_dict(self["default_source"])
for n, d in enumerate(self["sources"].get("data", [])):
self["sources"]["data"][n] = convert_source_dict(d)
def save(self, idempotency_key=None):
return self
def delete(self):
return self
@property
def sources(self):
return Sources(card_fakes=self["sources"]["data"])
def create_for_user(self, user):
from djstripe.models import Customer
stripe_customer = Customer.sync_from_stripe_data(self)
stripe_customer.subscriber = user
stripe_customer.save()
return stripe_customer
FAKE_CUSTOMER = CustomerDict(load_fixture("customer_cus_6lsBvm5rJ0zyHc.json"))
# Customer with multiple subscriptions (all licensed usagetype)
FAKE_CUSTOMER_II = CustomerDict(load_fixture("customer_cus_4UbFSo9tl62jqj.json"))
# Customer with a Source (instead of Card) as default_source
FAKE_CUSTOMER_III = CustomerDict(load_fixture("customer_cus_4QWKsZuuTHcs7X.json"))
# Customer with a Bank Account as default_source
FAKE_CUSTOMER_IV = CustomerDict(
load_fixture("customer_cus_example_with_bank_account.json")
)
FAKE_DISCOUNT_CUSTOMER = {
"object": "discount",
"coupon": deepcopy(FAKE_COUPON),
"customer": FAKE_CUSTOMER["id"],
"start": 1493206114,
"end": None,
"subscription": None,
}
class InvoiceDict(StripeItem):
def __init__(self, *args, **kwargs):
"""Match Stripe's behavior: return a stripe iterable on `invoice.lines`."""
super().__init__(*args, **kwargs)
self.lines = StripeList(self.lines)
def pay(self):
return self
FAKE_INVOICE = InvoiceDict(load_fixture("invoice_in_fakefakefakefakefake0001.json"))
FAKE_INVOICE_IV = InvoiceDict(load_fixture("invoice_in_fakefakefakefakefake0004.json"))
FAKE_INVOICE_II = InvoiceDict(
{
"id": "in_16af5A2eZvKYlo2CJjANLL81",
"object": "invoice",
"amount_due": 3000,
"amount_paid": 0,
"amount_remaining": 3000,
"application_fee_amount": None,
"attempt_count": 1,
"attempted": True,
"auto_advance": True,
"collection_method": "charge_automatically",
"charge": FAKE_CHARGE_II["id"],
"currency": "usd",
"customer": "cus_4UbFSo9tl62jqj",
"created": 1439785128,
"description": None,
"discount": None,
"due_date": None,
"ending_balance": 0,
"lines": {
"data": [
{
"id": FAKE_SUBSCRIPTION_III["id"],
"object": "line_item",
"amount": 2000,
"currency": "usd",
"description": None,
"discountable": True,
"livemode": True,
"metadata": {},
"period": {"start": 1442469907, "end": 1445061907},
"plan": deepcopy(FAKE_PLAN),
"proration": False,
"quantity": 1,
"subscription": None,
"type": "subscription",
}
],
"total_count": 1,
"object": "list",
"url": "/v1/invoices/in_16af5A2eZvKYlo2CJjANLL81/lines",
},
"livemode": False,
"metadata": {},
"next_payment_attempt": 1440048103,
"number": "XXXXXXX-0002",
"paid": False,
"period_end": 1439784771,
"period_start": 1439698371,
"receipt_number": None,
"starting_balance": 0,
"statement_descriptor": None,
"subscription": FAKE_SUBSCRIPTION_III["id"],
"subtotal": 3000,
"tax": None,
"tax_percent": None,
"total": 3000,
"webhooks_delivered_at": 1439785139,
}
)
FAKE_INVOICE_III = InvoiceDict(
{
"id": "in_16Z9dP2eZvKYlo2CgFHgFx2Z",
"object": "invoice",
"amount_due": 0,
"amount_paid": 0,
"amount_remaining": 0,
"application_fee_amount": None,
"attempt_count": 0,
"attempted": True,
"auto_advance": True,
"collection_method": "charge_automatically",
"charge": None,
"created": 1439425915,
"currency": "usd",
"customer": "cus_6lsBvm5rJ0zyHc",
"description": None,
"discount": None,
"due_date": None,
"ending_balance": 20,
"lines": {
"data": [
{
"id": FAKE_SUBSCRIPTION["id"],
"object": "line_item",
"amount": 2000,
"currency": "usd",
"description": None,
"discountable": True,
"livemode": True,
"metadata": {},
"period": {"start": 1442111228, "end": 1444703228},
"plan": deepcopy(FAKE_PLAN),
"proration": False,
"quantity": 1,
"subscription": None,
"type": "subscription",
}
],
"total_count": 1,
"object": "list",
"url": "/v1/invoices/in_16Z9dP2eZvKYlo2CgFHgFx2Z/lines",
},
"livemode": False,
"metadata": {},
"next_payment_attempt": None,
"number": "XXXXXXX-0003",
"paid": False,
"period_end": 1439424571,
"period_start": 1436746171,
"receipt_number": None,
"starting_balance": 0,
"statement_descriptor": None,
"subscription": FAKE_SUBSCRIPTION["id"],
"subtotal": 20,
"tax": None,
"tax_percent": None,
"total": 20,
"webhooks_delivered_at": 1439426955,
}
)
FAKE_INVOICE_METERED_SUBSCRIPTION_USAGE = deepcopy(FAKE_SUBSCRIPTION_METERED)
FAKE_INVOICE_METERED_SUBSCRIPTION_USAGE["customer"] = FAKE_CUSTOMER_II["id"]
FAKE_SUBSCRIPTION_ITEM = {
"id": "si_JiphMAMFxZKW8s",
"object": "subscription_item",
"metadata": {},
"billing_thresholds": "",
"created": 1441907581,
"plan": deepcopy(FAKE_PLAN_METERED),
"price": deepcopy(FAKE_PRICE_METERED),
"quantity": 1,
"subscription": FAKE_INVOICE_METERED_SUBSCRIPTION_USAGE["id"],
"tax_rates": [],
}
FAKE_INVOICE_METERED_SUBSCRIPTION = InvoiceDict(
{
"id": "in_1JGGM6JSZQVUcJYgpWqfBOIl",
"livemode": False,
"created": 1439425915,
"metadata": {},
"description": "",
"amount_due": "1.05",
"amount_paid": "1.05",
"amount_remaining": "0.00",
"application_fee_amount": None,
"attempt_count": 1,
"attempted": True,
"auto_advance": False,
"collection_method": "charge_automatically",
"currency": "usd",
"customer": FAKE_CUSTOMER_II["id"],
"object": "invoice",
"charge": None,
"discount": None,
"due_date": None,
"ending_balance": 0,
"lines": {
"data": [
{
"amount": 2000,
"id": FAKE_INVOICE_METERED_SUBSCRIPTION_USAGE["id"],
"object": "line_item",
"currency": "usd",
"description": None,
"discountable": True,
"livemode": True,
"metadata": {},
"period": {"start": 1442111228, "end": 1444703228},
"plan": deepcopy(FAKE_PLAN_METERED),
"proration": False,
"quantity": 1,
"subscription": FAKE_INVOICE_METERED_SUBSCRIPTION_USAGE["id"],
"subscription_item": FAKE_SUBSCRIPTION_ITEM["id"],
"type": "subscription",
}
],
"total_count": 1,
"object": "list",
"url": "/v1/invoices/in_1JGGM6JSZQVUcJYgpWqfBOIl/lines",
},
"next_payment_attempt": None,
"number": "84DE1540-0004",
"paid": True,
"period_end": 1439424571,
"period_start": 1436746171,
"receipt_number": None,
"starting_balance": 0,
"statement_descriptor": None,
"subscription": FAKE_INVOICE_METERED_SUBSCRIPTION_USAGE["id"],
"subtotal": "1.00",
"tax": None,
"tax_percent": None,
"total": "1.00",
"webhooks_delivered_at": 1439426955,
}
)
FAKE_UPCOMING_INVOICE = InvoiceDict(
{
"id": "in",
"object": "invoice",
"amount_due": 2000,
"amount_paid": 0,
"amount_remaining": 2000,
"application_fee_amount": None,
"attempt_count": 1,
"attempted": False,
"collection_method": "charge_automatically",
"charge": None,
"created": 1439218864,
"currency": "usd",
"customer": FAKE_CUSTOMER["id"],
"description": None,
"default_tax_rates": [
{
"id": "txr_fakefakefakefakefake0001",
"object": "tax_rate",
"active": True,
"created": 1570921289,
"description": None,
"display_name": "VAT",
"inclusive": True,
"jurisdiction": "Example1",
"livemode": False,
"metadata": {"djstripe_test_fake_id": "txr_fakefakefakefakefake0001"},
"percentage": 15.0,
}
],
"discount": None,
"due_date": None,
"ending_balance": None,
"lines": {
"data": [
{
"id": FAKE_SUBSCRIPTION["id"],
"object": "line_item",
"amount": 2000,
"currency": "usd",
"description": None,
"discountable": True,
"livemode": True,
"metadata": {},
"period": {"start": 1441907581, "end": 1444499581},
"plan": deepcopy(FAKE_PLAN),
"proration": False,
"quantity": 1,
"subscription": None,
"tax_amounts": [
{
"amount": 261,
"inclusive": True,
"tax_rate": "txr_fakefakefakefakefake0001",
}
],
"tax_rates": [],
"type": "subscription",
}
],
"total_count": 1,
"object": "list",
"url": "/v1/invoices/in_fakefakefakefakefake0001/lines",
},
"livemode": False,
"metadata": {},
"next_payment_attempt": 1439218689,
"number": None,
"paid": False,
"period_end": 1439218689,
"period_start": 1439132289,
"receipt_number": None,
"starting_balance": 0,
"statement_descriptor": None,
"subscription": FAKE_SUBSCRIPTION["id"],
"subtotal": 2000,
"tax": 261,
"tax_percent": None,
"total": 2000,
"total_tax_amounts": [
{
"amount": 261,
"inclusive": True,
"tax_rate": "txr_fakefakefakefakefake0001",
}
],
"webhooks_delivered_at": 1439218870,
}
)
FAKE_TAX_RATE_EXAMPLE_1_VAT = load_fixture("tax_rate_txr_fakefakefakefakefake0001.json")
FAKE_TAX_RATE_EXAMPLE_2_SALES = load_fixture(
"tax_rate_txr_fakefakefakefakefake0002.json"
)
FAKE_TAX_ID = load_fixture("tax_id_txi_fakefakefakefakefake0001.json")
FAKE_EVENT_TAX_ID_CREATED = {
"id": "evt_16YKQi2eZvKYlo2CT2oe5ff3",
"object": "event",
"api_version": "2020-08-27",
"created": 1439229084,
"data": {"object": deepcopy(FAKE_TAX_ID)},
"livemode": False,
"pending_webhooks": 0,
"request": "req_ZoH080M8fny6yR",
"type": "customer.tax_id.created",
}
FAKE_TAX_ID_UPDATED = deepcopy(FAKE_TAX_ID)
FAKE_TAX_ID_UPDATED["verification"] = {
"status": "verified",
"verified_address": None,
"verified_name": "Test",
}
FAKE_EVENT_TAX_ID_UPDATED = {
"id": "evt_1J6Fy3JSZQVUcJYgnddjnMzx",
"object": "event",
"api_version": "2020-08-27",
"created": 1439229084,
"data": {"object": deepcopy(FAKE_TAX_ID_UPDATED)},
"livemode": False,
"pending_webhooks": 0,
"request": "req_ZoH080M8fny6yR",
"type": "customer.tax_id.updated",
}
FAKE_EVENT_TAX_ID_DELETED = deepcopy(FAKE_EVENT_TAX_ID_UPDATED)
FAKE_EVENT_TAX_ID_DELETED["type"] = "customer.tax_id.deleted"
FAKE_INVOICEITEM = {
"id": "ii_16XVTY2eZvKYlo2Cxz5n3RaS",
"object": "invoiceitem",
"amount": 2000,
"currency": "usd",
"customer": FAKE_CUSTOMER_II["id"],
"date": 1439033216,
"description": "One-time setup fee",
"discountable": True,
"discounts": [],
"invoice": FAKE_INVOICE_II["id"],
"livemode": False,
"metadata": {"key1": "value1", "key2": "value2"},
"period": {"start": 1439033216, "end": 1439033216},
"plan": None,
"price": None,
"proration": False,
"quantity": None,
"subscription": None,
"subscription_item": None,
"tax_rates": [],
"unit_amount": 2000,
"unit_amount_decimal": "2000",
}
FAKE_INVOICEITEM_II = {
"id": "ii_16XVTY2eZvKYlo2Cxz5n3RaS",
"object": "invoiceitem",
"amount": 2000,
"currency": "usd",
"customer": FAKE_CUSTOMER["id"],
"date": 1439033216,
"description": "One-time setup fee",
"discountable": True,
"discounts": [],
"invoice": FAKE_INVOICE["id"],
"livemode": False,
"metadata": {"key1": "value1", "key2": "value2"},
"period": {"start": 1439033216, "end": 1439033216},
"plan": None,
"price": None,
"proration": False,
"quantity": None,
"subscription": None,
"subscription_item": None,
"tax_rates": [],
"unit_amount": 2000,
"unit_amount_decimal": "2000",
}
# Invoice item with tax_rates
# TODO generate this
FAKE_INVOICEITEM_III = {
"id": "ii_16XVTY2eZvKYlo2Cxz5n3RaS",
"object": "invoiceitem",
"amount": 2000,
"currency": "usd",
"customer": FAKE_CUSTOMER_II["id"],
"date": 1439033216,
"description": "One-time setup fee",
"discountable": True,
"discounts": [],
"invoice": FAKE_INVOICE_II["id"],
"livemode": False,
"metadata": {"key1": "value1", "key2": "value2"},
"period": {"start": 1439033216, "end": 1439033216},
"plan": None,
"price": None,
"proration": False,
"quantity": None,
"subscription": None,
"subscription_item": None,
"tax_rates": [FAKE_TAX_RATE_EXAMPLE_1_VAT],
"unit_amount": 2000,
"unit_amount_decimal": "2000",
}
FAKE_TRANSFER = {
"id": "tr_16Y9BK2eZvKYlo2CR0ySu1BA",
"object": "transfer",
"amount": 100,
"amount_reversed": 0,
"application_fee_amount": None,
"balance_transaction": deepcopy(FAKE_BALANCE_TRANSACTION_II),
"created": 1439185846,
"currency": "usd",
"description": "Test description - 1439185984",
"destination": FAKE_STANDARD_ACCOUNT["id"],
"destination_payment": "py_16Y9BKFso9hLaeLueFmWAYUi",
"livemode": False,
"metadata": {},
"recipient": None,
"reversals": {
"object": "list",
"total_count": 0,
"has_more": False,
"url": "/v1/transfers/tr_16Y9BK2eZvKYlo2CR0ySu1BA/reversals",
"data": [],
},
"reversed": False,
"source_transaction": None,
"source_type": "bank_account",
}
FAKE_TRANSFER_WITH_1_REVERSAL = {
"id": "tr_16Y9BK2eZvKYlo2CR0ySu1BA",
"object": "transfer",
"amount": 100,
"amount_reversed": 0,
"application_fee_amount": None,
"balance_transaction": deepcopy(FAKE_BALANCE_TRANSACTION_II),
"created": 1439185846,
"currency": "usd",
"description": "Test description - 1439185984",
"destination": FAKE_STANDARD_ACCOUNT["id"],
"destination_payment": "py_16Y9BKFso9hLaeLueFmWAYUi",
"livemode": False,
"metadata": {},
"recipient": None,
"reversals": {
"object": "list",
"total_count": 1,
"has_more": False,
"url": "/v1/transfers/tr_16Y9BK2eZvKYlo2CR0ySu1BA/reversals",
"data": [
{
"id": "trr_1J5UlFJSZQVUcJYgb38m1OZO",
"object": "transfer_reversal",
"amount": 20,
"balance_transaction": deepcopy(FAKE_BALANCE_TRANSACTION_II),
"created": 1624449653,
"currency": "usd",
"destination_payment_refund": "pyr_1J5UlFR44xKqawmIBvFa6gW9",
"metadata": {},
"source_refund": None,
"transfer": deepcopy(FAKE_TRANSFER),
}
],
},
"reversed": False,
"source_transaction": None,
"source_type": "bank_account",
}
FAKE_USAGE_RECORD = {
"id": "mbur_1JPJz2JSZQVUcJYgK4otTE2V",
"livemode": False,
"object": "usage_record",
"quantity": 100,
"subscription_item": FAKE_SUBSCRIPTION_ITEM["id"],
"timestamp": 1629174774,
"action": "increment",
}
class UsageRecordSummaryDict(StripeItem):
def __init__(self, *args, **kwargs):
"""Match Stripe's behavior: return a stripe iterable on `invoice.lines`."""
super().__init__(*args, **kwargs)
FAKE_USAGE_RECORD_SUMMARY = UsageRecordSummaryDict(
load_fixture("usage_record_summary_sis_fakefakefakefakefake0001.json")
)
FAKE_ACCOUNT = {
"id": "acct_1032D82eZvKYlo2C",
"object": "account",
"business_profile": {
"name": "dj-stripe",
"support_email": "[email protected]",
"support_phone": None,
"support_url": "https://djstripe.com/support/",
"url": "https://djstripe.com",
},
"settings": {
"branding": {
"icon": "file_4hshrsKatMEEd6736724HYAXyj",
"logo": "file_1E3fssKatMEEd6736724HYAXyj",
"primary_color": "#092e20",
},
"dashboard": {"display_name": "dj-stripe", "timezone": "Etc/UTC"},
"payments": {"statement_descriptor": "DJSTRIPE"},
},
"charges_enabled": True,
"country": "US",
"default_currency": "usd",
"details_submitted": True,
"email": "[email protected]",
"payouts_enabled": True,
"type": "standard",
}
FAKE_FILEUPLOAD_LOGO = {
"created": 1550134074,
"filename": "logo_preview.png",
"id": "file_1E3fssKatMEEd6736724HYAXyj",
"links": {
"data": [
{
"created": 1550134074,
"expired": False,
"expires_at": 1850134074,
"file": "file_1E3fssKatMEEd6736724HYAXyj",
"id": "link_1E3fssKatMEEd673672V0JSH",
"livemode": False,
"metadata": {},
"object": "file_link",
"url": (
"https://files.stripe.com/links/fl_test_69vG4ISDx9Chjklasrf06BJeQo"
),
}
],
"has_more": False,
"object": "list",
"url": "/v1/file_links?file=file_1E3fssKatMEEd6736724HYAXyj",
},
"object": "file_upload",
"purpose": "business_logo",
"size": 6650,
"type": "png",
"url": "https://files.stripe.com/files/f_test_BTJFKcS7VDahgkjqw8EVNWlM",
}
FAKE_FILEUPLOAD_ICON = {
"created": 1550134074,
"filename": "icon_preview.png",
"id": "file_4hshrsKatMEEd6736724HYAXyj",
"links": {
"data": [
{
"created": 1550134074,
"expired": False,
"expires_at": 1850134074,
"file": "file_4hshrsKatMEEd6736724HYAXyj",
"id": "link_4jsdgsKatMEEd673672V0JSH",
"livemode": False,
"metadata": {},
"object": "file_link",
"url": (
"https://files.stripe.com/links/fl_test_69vG4ISDx9Chjklasrf06BJeQo"
),
}
],
"has_more": False,
"object": "list",
"url": "/v1/file_links?file=file_4hshrsKatMEEd6736724HYAXyj",
},
"object": "file_upload",
# Note that purpose="business_logo" for both icon and logo fields
"purpose": "business_logo",
"size": 6650,
"type": "png",
"url": "https://files.stripe.com/files/f_test_BTJFKcS7VDahgkjqw8EVNWlM",
}
FAKE_EVENT_FILE_CREATED = {
"id": "evt_1J5TusR44xKqawmIQVXSrGyf",
"object": "event",
"api_version": "2020-08-27",
"created": 1439229084,
"data": {"object": deepcopy(FAKE_FILEUPLOAD_ICON)},
"livemode": False,
"pending_webhooks": 0,
"request": "req_sTSstDDIOpKi2w",
"type": "file.created",
}
FAKE_EVENT_ACCOUNT_APPLICATION_DEAUTHORIZED = dict(
load_fixture("event_account_application_deauthorized.json")
)
FAKE_EVENT_ACCOUNT_APPLICATION_AUTHORIZED = dict(
load_fixture("event_account_application_authorized.json")
)
FAKE_EVENT_ACCOUNT_EXTERNAL_ACCOUNT_BANK_ACCOUNT_CREATED = dict(
load_fixture("event_external_account_bank_account_created.json")
)
FAKE_EVENT_ACCOUNT_EXTERNAL_ACCOUNT_CARD_CREATED = dict(
load_fixture("event_external_account_card_created.json")
)
FAKE_EVENT_ACCOUNT_EXTERNAL_ACCOUNT_BANK_ACCOUNT_DELETED = dict(
load_fixture("event_external_account_bank_account_deleted.json")
)
FAKE_EVENT_ACCOUNT_EXTERNAL_ACCOUNT_CARD_DELETED = dict(
load_fixture("event_external_account_card_deleted.json")
)
FAKE_EVENT_ACCOUNT_EXTERNAL_ACCOUNT_BANK_ACCOUNT_UPDATED = dict(
load_fixture("event_external_account_bank_account_updated.json")
)
FAKE_EVENT_ACCOUNT_EXTERNAL_ACCOUNT_CARD_UPDATED = dict(
load_fixture("event_external_account_card_updated.json")
)
FAKE_EVENT_STANDARD_ACCOUNT_UPDATED = dict(
load_fixture("event_account_updated_standard.json")
)
FAKE_EVENT_EXPRESS_ACCOUNT_UPDATED = dict(
load_fixture("event_account_updated_express.json")
)
FAKE_EVENT_CUSTOM_ACCOUNT_UPDATED = dict(
load_fixture("event_account_updated_custom.json")
)
# 2017-05-25 api changed request from id to object with id and idempotency_key
# issue #541
FAKE_EVENT_PLAN_REQUEST_IS_OBJECT = {
"id": "evt_1AcdbXXXXXXXXXXXXXXXXXXX",
"object": "event",
"api_version": "2017-06-05",
"created": 1499361420,
"data": {"object": FAKE_PLAN, "previous_attributes": {"name": "Plan anual test4"}},
"livemode": False,
"pending_webhooks": 1,
"request": {"id": "req_AyamqQWoi5AMR2", "idempotency_key": None},
"type": "plan.updated",
}
FAKE_EVENT_CHARGE_SUCCEEDED = {
"id": "evt_16YKQi2eZvKYlo2CT2oe5ff3",
"object": "event",
"api_version": "2016-03-07",
"created": 1439229084,
"data": {"object": deepcopy(FAKE_CHARGE)},
"livemode": False,
"pending_webhooks": 0,
"request": "req_6lsB7hkicwhaDj",
"type": "charge.succeeded",
}
FAKE_EVENT_TEST_CHARGE_SUCCEEDED = deepcopy(FAKE_EVENT_CHARGE_SUCCEEDED)
FAKE_EVENT_TEST_CHARGE_SUCCEEDED["id"] = TEST_EVENT_ID
FAKE_EVENT_CUSTOMER_CREATED = {
"id": "evt_38DHch3whaDvKYlo2CT2oe5ff3",
"object": "event",
"api_version": "2016-03-07",
"created": 1439229084,
"data": {"object": deepcopy(FAKE_CUSTOMER)},
"livemode": False,
"pending_webhooks": 0,
"request": "req_6l38DHch3whaDj",
"type": "customer.created",
}
FAKE_EVENT_CUSTOMER_UPDATED = deepcopy(FAKE_EVENT_CUSTOMER_CREATED)
FAKE_EVENT_CUSTOMER_UPDATED["type"] = "customer.updated"
FAKE_EVENT_CUSTOMER_DELETED = deepcopy(FAKE_EVENT_CUSTOMER_CREATED)
FAKE_EVENT_CUSTOMER_DELETED.update(
{"id": "evt_38DHch3whaDvKYlo2jksfsFFxy", "type": "customer.deleted"}
)
FAKE_EVENT_CUSTOMER_DISCOUNT_CREATED = {
"id": "evt_test_customer.discount.created",
"object": "event",
"api_version": "2018-05-21",
"created": 1439229084,
"data": {"object": deepcopy(FAKE_DISCOUNT_CUSTOMER)},
"livemode": False,
"pending_webhooks": 1,
"request": {"id": "req_6l38DHch3whaDj", "idempotency_key": None},
"type": "customer.discount.created",
}
FAKE_EVENT_CUSTOMER_DISCOUNT_DELETED = {
"id": "AGBWvF5zBm4sMCsLLPZrw9XX",
"type": "customer.discount.deleted",
"api_version": "2017-02-14",
"created": 1439229084,
"object": "event",
"pending_webhooks": 0,
"request": "req_6l38DHch3whaDj",
"data": {"object": deepcopy(FAKE_DISCOUNT_CUSTOMER)},
}
FAKE_EVENT_CUSTOMER_SOURCE_CREATED = {
"id": "evt_DvKYlo38huDvKYlo2C7SXedrZk",
"object": "event",
"api_version": "2016-03-07",
"created": 1439229084,
"data": {"object": deepcopy(FAKE_CARD)},
"livemode": False,
"pending_webhooks": 0,
"request": "req_o3whaDvh3whaDj",
"type": "customer.source.created",
}
FAKE_EVENT_CUSTOMER_SOURCE_DELETED = deepcopy(FAKE_EVENT_CUSTOMER_SOURCE_CREATED)
FAKE_EVENT_CUSTOMER_SOURCE_DELETED.update(
{"id": "evt_DvKYlo38huDvKYlo2C7SXedrYk", "type": "customer.source.deleted"}
)
FAKE_EVENT_CUSTOMER_SOURCE_DELETED_DUPE = deepcopy(FAKE_EVENT_CUSTOMER_SOURCE_DELETED)
FAKE_EVENT_CUSTOMER_SOURCE_DELETED_DUPE.update({"id": "evt_DvKYlo38huDvKYlo2C7SXedzAk"})
FAKE_EVENT_CUSTOMER_SUBSCRIPTION_CREATED = {
"id": "evt_38DHch3wHD2eZvKYlCT2oe5ff3",
"object": "event",
"api_version": "2016-03-07",
"created": 1439229084,
"data": {"object": deepcopy(FAKE_SUBSCRIPTION)},
"livemode": False,
"pending_webhooks": 0,
"request": "req_6l87IHch3diaDj",
"type": "customer.subscription.created",
}
FAKE_EVENT_CUSTOMER_SUBSCRIPTION_DELETED = deepcopy(
FAKE_EVENT_CUSTOMER_SUBSCRIPTION_CREATED
)
FAKE_EVENT_CUSTOMER_SUBSCRIPTION_DELETED.update(
{"id": "evt_38DHch3wHD2eZvKYlCT2oeryaf", "type": "customer.subscription.deleted"}
)
FAKE_EVENT_DISPUTE_CREATED = {
"id": "evt_16YKQi2eZvKYlo2CT2oe5ff3",
"object": "event",
"api_version": "2017-08-15",
"created": 1439229084,
"data": {"object": deepcopy(FAKE_DISPUTE_I)},
"livemode": False,
"pending_webhooks": 0,
"request": "req_6lsB7hkicwhaDj",
"type": "charge.dispute.created",
}
FAKE_EVENT_DISPUTE_FUNDS_WITHDRAWN = {
"id": "evt_1JAyTxJSZQVUcJYgNk1Jqu8o",
"object": "event",
"api_version": "2020-08-27",
"created": 1439229084,
"data": {"object": deepcopy(FAKE_DISPUTE_II)},
"livemode": False,
"pending_webhooks": 0,
"request": "req_6lsB7hkicwhaDj",
"type": "charge.dispute.funds_withdrawn",
}
FAKE_EVENT_DISPUTE_UPDATED = {
"id": "evt_1JAyTxJSZQVUcJYgNk1Jqu8o",
"object": "event",
"api_version": "2020-08-27",
"created": 1439229084,
"data": {"object": deepcopy(FAKE_DISPUTE_III)},
"livemode": False,
"pending_webhooks": 0,
"request": "req_6lsB7hkicwhaDj",
"type": "charge.dispute.funds_withdrawn",
}
FAKE_EVENT_DISPUTE_CLOSED = {
"id": "evt_1JAyTxJSZQVUcJYgNk1Jqu8o",
"object": "event",
"api_version": "2020-08-27",
"created": 1439229084,
"data": {"object": deepcopy(FAKE_DISPUTE_IV)},
"livemode": False,
"pending_webhooks": 0,
"request": "req_6lsB7hkicwhaDj",
"type": "charge.dispute.closed",
}
FAKE_EVENT_DISPUTE_FUNDS_REINSTATED_FULL = {
"id": "evt_1JAyTxJSZQVUcJYgNk1Jqu8o",
"object": "event",
"api_version": "2020-08-27",
"created": 1439229084,
"data": {"object": deepcopy(FAKE_DISPUTE_V_FULL)},
"livemode": False,
"pending_webhooks": 0,
"request": "req_6lsB7hkicwhaDj",
"type": "charge.dispute.funds_reinstated",
}
FAKE_EVENT_DISPUTE_FUNDS_REINSTATED_PARTIAL = {
"id": "evt_1JAyTxJSZQVUcJYgNk1Jqu8o",
"object": "event",
"api_version": "2020-08-27",
"created": 1439229084,
"data": {"object": deepcopy(FAKE_DISPUTE_V_PARTIAL)},
"livemode": False,
"pending_webhooks": 0,
"request": "req_6lsB7hkicwhaDj",
"type": "charge.dispute.funds_reinstated",
}
FAKE_EVENT_SESSION_COMPLETED = {
"id": "evt_1JAyTxJSZQVUcJYgNk1Jqu8o",
"object": "event",
"api_version": "2020-08-27",
"created": 1439229084,
"data": {"object": deepcopy(FAKE_SESSION_I)},
"livemode": False,
"pending_webhooks": 0,
"request": "req_6lsB7hkicwhaDj",
"type": "checkout.session.completed",
}
FAKE_EVENT_INVOICE_CREATED = {
"id": "evt_187IHD2eZvKYlo2C6YKQi2eZ",
"object": "event",
"api_version": "2016-03-07",
"created": 1462338623,
"data": {"object": deepcopy(FAKE_INVOICE)},
"livemode": False,
"pending_webhooks": 0,
"request": "req_8O4sB7hkDobVT",
"type": "invoice.created",
}
FAKE_EVENT_INVOICE_DELETED = deepcopy(FAKE_EVENT_INVOICE_CREATED)
FAKE_EVENT_INVOICE_DELETED.update(
{"id": "evt_187IHD2eZvKYlo2Cjkjsr34H", "type": "invoice.deleted"}
)
FAKE_EVENT_INVOICE_UPCOMING = {
"id": "evt_187IHD2eZvKYlo2C6YKQi2bc",
"object": "event",
"api_version": "2017-02-14",
"created": 1501859641,
"data": {"object": deepcopy(FAKE_INVOICE)},
"livemode": False,
"pending_webhooks": 0,
"request": "req_8O4sB7hkDobZA",
"type": "invoice.upcoming",
}
del FAKE_EVENT_INVOICE_UPCOMING["data"]["object"]["id"]
FAKE_EVENT_INVOICEITEM_CREATED = {
"id": "evt_187IHD2eZvKYlo2C7SXedrZk",
"object": "event",
"api_version": "2016-03-07",
"created": 1462338623,
"data": {"object": deepcopy(FAKE_INVOICEITEM)},
"livemode": False,
"pending_webhooks": 0,
"request": "req_8O4Qbs2EDobDVT",
"type": "invoiceitem.created",
}
FAKE_EVENT_INVOICEITEM_DELETED = deepcopy(FAKE_EVENT_INVOICEITEM_CREATED)
FAKE_EVENT_INVOICEITEM_DELETED.update(
{"id": "evt_187IHD2eZvKYloJfdsnnfs34", "type": "invoiceitem.deleted"}
)
FAKE_EVENT_PAYMENT_METHOD_ATTACHED = {
"id": "evt_1FDOwDKatMEEd998o5FyxxAB",
"object": "event",
"api_version": "2019-08-14",
"created": 1567228549,
"data": {"object": deepcopy(FAKE_PAYMENT_METHOD_I)},
"livemode": False,
"pending_webhooks": 0,
"request": {"id": "req_9c9djVqxUZIKNr", "idempotency_key": None},
"type": "payment_method.attached",
}
FAKE_EVENT_PAYMENT_METHOD_DETACHED = {
"id": "evt_1FDOwDKatMEEd998o5Fdadfds",
"object": "event",
"api_version": "2019-08-14",
"created": 1567228549,
"data": {"object": deepcopy(FAKE_PAYMENT_METHOD_I)},
"livemode": False,
"pending_webhooks": 0,
"request": {"id": "req_9c9djVqxcxgdfg", "idempotency_key": None},
"type": "payment_method.detached",
}
FAKE_EVENT_PAYMENT_METHOD_DETACHED["data"]["object"]["customer"] = None
FAKE_EVENT_CARD_PAYMENT_METHOD_ATTACHED = {
"id": "evt_1FDOwDKatMEEd998o5Fghgfh",
"object": "event",
"api_version": "2019-08-14",
"created": 1567228549,
"data": {"object": deepcopy(FAKE_CARD_AS_PAYMENT_METHOD)},
"livemode": False,
"pending_webhooks": 0,
"request": {"id": "req_9c9djVqxUhgfh", "idempotency_key": None},
"type": "payment_method.attached",
}
FAKE_EVENT_CARD_PAYMENT_METHOD_DETACHED = {
"id": "evt_1FDOwDKatMEEd998o5435345",
"object": "event",
"api_version": "2019-08-14",
"created": 1567228549,
"data": {"object": deepcopy(FAKE_CARD_AS_PAYMENT_METHOD)},
"livemode": False,
"pending_webhooks": 0,
"request": {"id": "req_9c9djVqx6tgeg", "idempotency_key": None},
"type": "payment_method.detached",
}
# Note that the event from Stripe doesn't have customer = None
FAKE_EVENT_PLAN_CREATED = {
"id": "evt_1877X72eZvKYlo2CLK6daFxu",
"object": "event",
"api_version": "2016-03-07",
"created": 1462297325,
"data": {"object": deepcopy(FAKE_PLAN)},
"livemode": False,
"pending_webhooks": 0,
"request": "req_8NtJXPttxSvFyM",
"type": "plan.created",
}
FAKE_EVENT_PLAN_DELETED = deepcopy(FAKE_EVENT_PLAN_CREATED)
FAKE_EVENT_PLAN_DELETED.update(
{"id": "evt_1877X72eZvKYl2jkds32jJFc", "type": "plan.deleted"}
)
FAKE_EVENT_PRICE_CREATED = {
"id": "evt_1HlZWCFz0jfFqjGsXOiPW10r",
"object": "event",
"api_version": "2020-03-02",
"created": 1604925044,
"data": {"object": deepcopy(FAKE_PRICE)},
"livemode": False,
"pending_webhooks": 0,
"request": {"id": "req_Nq7dDuP0HRrqcP", "idempotency_key": None},
"type": "price.created",
}
FAKE_EVENT_PRICE_UPDATED = {
"id": "evt_1HlZbxFz0jfFqjGsZwiHHf7h",
"object": "event",
"api_version": "2020-03-02",
"created": 1604925401,
"data": {
"object": FAKE_PRICE,
"previous_attributes": {"unit_amount": 2000, "unit_amount_decimal": "2000"},
},
"livemode": False,
"pending_webhooks": 0,
"request": {"id": "req_78pnxbwPMvOIwe", "idempotency_key": None},
"type": "price.updated",
}
FAKE_EVENT_PRICE_DELETED = deepcopy(FAKE_EVENT_PRICE_CREATED)
FAKE_EVENT_PRICE_DELETED.update(
{"id": "evt_1HlZelFz0jfFqjGs0F4BML2l", "type": "price.deleted"}
)
FAKE_EVENT_TRANSFER_CREATED = {
"id": "evt_16igNU2eZvKYlo2CYyMkYvet",
"object": "event",
"api_version": "2016-03-07",
"created": 1441696732,
"data": {"object": deepcopy(FAKE_TRANSFER)},
"livemode": False,
"pending_webhooks": 0,
"request": "req_6wZW9MskhYU15Y",
"type": "transfer.created",
}
FAKE_EVENT_TRANSFER_DELETED = deepcopy(FAKE_EVENT_TRANSFER_CREATED)
FAKE_EVENT_TRANSFER_DELETED.update(
{"id": "evt_16igNU2eZvKjklfsdjk232Mf", "type": "transfer.deleted"}
)
FAKE_TOKEN = {
"id": "tok_16YDIe2eZvKYlo2CPvqprIJd",
"object": "token",
"card": deepcopy(FAKE_CARD),
"client_ip": None,
"created": 1439201676,
"livemode": False,
"type": "card",
"used": False,
}
FAKE_EVENT_PAYMENT_INTENT_SUCCEEDED_DESTINATION_CHARGE = {
"id": "evt_1FG74XB7kbjcJ8Qq22i2BPdt",
"object": "event",
"api_version": "2019-05-16",
"created": 1567874857,
"data": {"object": deepcopy(FAKE_PAYMENT_INTENT_DESTINATION_CHARGE)},
"livemode": False,
"pending_webhooks": 1,
"request": {"id": "req_AJAmnJE4eiPIzb", "idempotency_key": None},
"type": "payment_intent.succeeded",
}
FAKE_EVENT_SUBSCRIPTION_SCHEDULE_CREATED = {
"id": "evt_1Hm7q6Fz0jfFqjGsJSG4N91w",
"object": "event",
"api_version": "2020-03-02",
"created": 1605056974,
"data": {"object": deepcopy(FAKE_SUBSCRIPTION_SCHEDULE)},
"livemode": False,
"pending_webhooks": 0,
"request": {
"id": "req_Pttj3aW5RJwees",
"idempotency_key": "d2a77191-cc07-4c60-abab-5fb11357bd63",
},
"type": "subscription_schedule.created",
}
FAKE_EVENT_SUBSCRIPTION_SCHEDULE_UPDATED = deepcopy(
FAKE_EVENT_SUBSCRIPTION_SCHEDULE_CREATED
)
FAKE_EVENT_SUBSCRIPTION_SCHEDULE_UPDATED["id"] = "sub_sched_1Hm86MFz0jfFqjGsc5iEdZee"
FAKE_EVENT_SUBSCRIPTION_SCHEDULE_UPDATED["type"] = "subscription_schedule.updated"
# FAKE_EVENT_SUBSCRIPTION_SCHEDULE_UPDATED["data"]["object"]["released_at"] = 1605058030
# FAKE_EVENT_SUBSCRIPTION_SCHEDULE_UPDATED["data"]["object"]["status"] = "released"
# FAKE_EVENT_SUBSCRIPTION_SCHEDULE_UPDATED["data"]["previous_attributes"] = {
# "released_at": None,
# "status": "not_started",
# }
FAKE_EVENT_SUBSCRIPTION_SCHEDULE_RELEASED = deepcopy(
FAKE_EVENT_SUBSCRIPTION_SCHEDULE_CREATED
)
FAKE_EVENT_SUBSCRIPTION_SCHEDULE_RELEASED["id"] = "evt_1Hm878Fz0jfFqjGsClU9gE79"
FAKE_EVENT_SUBSCRIPTION_SCHEDULE_RELEASED["type"] = "subscription_schedule.released"
FAKE_EVENT_SUBSCRIPTION_SCHEDULE_RELEASED["data"]["object"]["released_at"] = 1605058030
FAKE_EVENT_SUBSCRIPTION_SCHEDULE_RELEASED["data"]["object"]["status"] = "released"
FAKE_EVENT_SUBSCRIPTION_SCHEDULE_CANCELED = deepcopy(
FAKE_EVENT_SUBSCRIPTION_SCHEDULE_CREATED
)
FAKE_EVENT_SUBSCRIPTION_SCHEDULE_CANCELED["id"] = "evt_1Hm80YFz0jfFqjGs7kKvT7RE"
FAKE_EVENT_SUBSCRIPTION_SCHEDULE_CANCELED["type"] = "subscription_schedule.canceled"
FAKE_EVENT_SUBSCRIPTION_SCHEDULE_CANCELED["data"]["object"]["canceled_at"] = 1605057622
FAKE_EVENT_SUBSCRIPTION_SCHEDULE_CANCELED["data"]["object"]["status"] = "canceled"
FAKE_EVENT_SUBSCRIPTION_SCHEDULE_CANCELED["data"]["previous_attributes"] = {
"released_at": None,
"status": "not_started",
}
| []
| []
| []
| [] | [] | python | 0 | 0 | |
tools/train.py | from __future__ import division
import argparse
import os
import os.path as osp
import time
import mmcv
import torch
from mmcv import Config
from mmcv.runner import init_dist
from mmdet import __version__
from mmdet.apis import set_random_seed, train_detector
from mmdet.datasets import build_dataset
from mmdet.models import build_detector
from mmdet.utils import get_root_logger
import warnings
warnings.filterwarnings("ignore")
def parse_args():
parser = argparse.ArgumentParser(description='Train a detector')
parser.add_argument('config', help='train config file path')
parser.add_argument('--work_dir', help='the dir to save logs and models')
parser.add_argument(
'--resume_from', help='the checkpoint file to resume from')
parser.add_argument(
'--validate',
action='store_true',
help='whether to evaluate the checkpoint during training')
parser.add_argument(
'--gpus',
type=int,
default=1,
help='number of gpus to use '
'(only applicable to non-distributed training)')
parser.add_argument('--seed', type=int, default=None, help='random seed')
parser.add_argument(
'--deterministic',
action='store_true',
help='whether to set deterministic options for CUDNN backend.')
parser.add_argument(
'--launcher',
choices=['none', 'pytorch', 'slurm', 'mpi'],
default='none',
help='job launcher')
parser.add_argument('--local_rank', type=int, default=0)
parser.add_argument(
'--autoscale-lr',
action='store_true',
help='automatically scale lr with the number of gpus')
args = parser.parse_args()
if 'LOCAL_RANK' not in os.environ:
os.environ['LOCAL_RANK'] = str(args.local_rank)
return args
def main():
args = parse_args()
cfg = Config.fromfile(args.config)
# set cudnn_benchmark
if cfg.get('cudnn_benchmark', False):
torch.backends.cudnn.benchmark = True
# update configs according to CLI args
if args.work_dir is not None:
cfg.work_dir = args.work_dir
if args.resume_from is not None:
cfg.resume_from = args.resume_from
cfg.gpus = args.gpus
if args.autoscale_lr:
# apply the linear scaling rule (https://arxiv.org/abs/1706.02677)
cfg.optimizer['lr'] = cfg.optimizer['lr'] * cfg.gpus / 8
# init distributed env first, since logger depends on the dist info.
if args.launcher == 'none':
distributed = False
else:
distributed = True
init_dist(args.launcher, **cfg.dist_params)
# create work_dir
mmcv.mkdir_or_exist(osp.abspath(cfg.work_dir))
# init the logger before other steps
timestamp = time.strftime('%Y%m%d_%H%M%S', time.localtime())
log_file = osp.join(cfg.work_dir, '{}.log'.format(timestamp))
logger = get_root_logger(log_file=log_file, log_level=cfg.log_level)
# log some basic info
logger.info('Distributed training: {}'.format(distributed))
logger.info('MMDetection Version: {}'.format(__version__))
logger.info('Config:\n{}'.format(cfg.text))
# set random seeds
if args.seed is not None:
logger.info('Set random seed to {}, deterministic: {}'.format(
args.seed, args.deterministic))
set_random_seed(args.seed, deterministic=args.deterministic)
model = build_detector(
cfg.model, train_cfg=cfg.train_cfg, test_cfg=cfg.test_cfg)
datasets = [build_dataset(cfg.data.train)]
if len(cfg.workflow) == 2:
datasets.append(build_dataset(cfg.data.val))
if cfg.checkpoint_config is not None:
# save mmdet version, config file content and class names in
# checkpoints as meta data
cfg.checkpoint_config.meta = dict(
mmdet_version=__version__,
config=cfg.text,
CLASSES=datasets[0].CLASSES)
# add an attribute for visualization convenience
model.CLASSES = datasets[0].CLASSES
train_detector(
model,
datasets,
cfg,
distributed=distributed,
validate=args.validate,
timestamp=timestamp)
if __name__ == '__main__':
main()
| []
| []
| [
"LOCAL_RANK"
]
| [] | ["LOCAL_RANK"] | python | 1 | 0 | |
src/cmd/internal/obj/wasm/wasmobj.go | // Copyright 2018 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package wasm
import (
"bytes"
"cmd/internal/obj"
"cmd/internal/objabi"
"cmd/internal/sys"
"encoding/binary"
"fmt"
"io"
"math"
)
var Register = map[string]int16{
"PC_F": REG_PC_F,
"PC_B": REG_PC_B,
"SP": REG_SP,
"CTXT": REG_CTXT,
"g": REG_g,
"RET0": REG_RET0,
"RET1": REG_RET1,
"RET2": REG_RET2,
"RET3": REG_RET3,
"PAUSE": REG_PAUSE,
"R0": REG_R0,
"R1": REG_R1,
"R2": REG_R2,
"R3": REG_R3,
"R4": REG_R4,
"R5": REG_R5,
"R6": REG_R6,
"R7": REG_R7,
"R8": REG_R8,
"R9": REG_R9,
"R10": REG_R10,
"R11": REG_R11,
"R12": REG_R12,
"R13": REG_R13,
"R14": REG_R14,
"R15": REG_R15,
"F0": REG_F0,
"F1": REG_F1,
"F2": REG_F2,
"F3": REG_F3,
"F4": REG_F4,
"F5": REG_F5,
"F6": REG_F6,
"F7": REG_F7,
"F8": REG_F8,
"F9": REG_F9,
"F10": REG_F10,
"F11": REG_F11,
"F12": REG_F12,
"F13": REG_F13,
"F14": REG_F14,
"F15": REG_F15,
}
var registerNames []string
func init() {
obj.RegisterRegister(MINREG, MAXREG, rconv)
obj.RegisterOpcode(obj.ABaseWasm, Anames)
registerNames = make([]string, MAXREG-MINREG)
for name, reg := range Register {
registerNames[reg-MINREG] = name
}
}
func rconv(r int) string {
return registerNames[r-MINREG]
}
var unaryDst = map[obj.As]bool{
ASet: true,
ATee: true,
ACall: true,
ACallIndirect: true,
ACallImport: true,
ABr: true,
ABrIf: true,
ABrTable: true,
AI32Store: true,
AI64Store: true,
AF32Store: true,
AF64Store: true,
AI32Store8: true,
AI32Store16: true,
AI64Store8: true,
AI64Store16: true,
AI64Store32: true,
ACALLNORESUME: true,
}
var Linkwasm = obj.LinkArch{
Arch: sys.ArchWasm,
Init: instinit,
Preprocess: preprocess,
Assemble: assemble,
UnaryDst: unaryDst,
}
var (
morestack *obj.LSym
morestackNoCtxt *obj.LSym
gcWriteBarrier *obj.LSym
sigpanic *obj.LSym
deferreturn *obj.LSym
jmpdefer *obj.LSym
)
const (
/* mark flags */
WasmImport = 1 << 0
)
func instinit(ctxt *obj.Link) {
morestack = ctxt.Lookup("runtime.morestack")
morestackNoCtxt = ctxt.Lookup("runtime.morestack_noctxt")
gcWriteBarrier = ctxt.Lookup("runtime.gcWriteBarrier")
sigpanic = ctxt.LookupABI("runtime.sigpanic", obj.ABIInternal)
deferreturn = ctxt.LookupABI("runtime.deferreturn", obj.ABIInternal)
// jmpdefer is defined in assembly as ABI0, but what we're
// looking for is the *call* to jmpdefer from the Go function
// deferreturn, so we're looking for the ABIInternal version
// of jmpdefer that's called by Go.
jmpdefer = ctxt.LookupABI(`"".jmpdefer`, obj.ABIInternal)
}
func preprocess(ctxt *obj.Link, s *obj.LSym, newprog obj.ProgAlloc) {
appendp := func(p *obj.Prog, as obj.As, args ...obj.Addr) *obj.Prog {
if p.As != obj.ANOP {
p2 := obj.Appendp(p, newprog)
p2.Pc = p.Pc
p = p2
}
p.As = as
switch len(args) {
case 0:
p.From = obj.Addr{}
p.To = obj.Addr{}
case 1:
if unaryDst[as] {
p.From = obj.Addr{}
p.To = args[0]
} else {
p.From = args[0]
p.To = obj.Addr{}
}
case 2:
p.From = args[0]
p.To = args[1]
default:
panic("bad args")
}
return p
}
framesize := s.Func.Text.To.Offset
if framesize < 0 {
panic("bad framesize")
}
s.Func.Args = s.Func.Text.To.Val.(int32)
s.Func.Locals = int32(framesize)
if s.Func.Text.From.Sym.Wrapper() {
// if g._panic != nil && g._panic.argp == FP {
// g._panic.argp = bottom-of-frame
// }
//
// MOVD g_panic(g), R0
// Get R0
// I64Eqz
// Not
// If
// Get SP
// I64ExtendI32U
// I64Const $framesize+8
// I64Add
// I64Load panic_argp(R0)
// I64Eq
// If
// MOVD SP, panic_argp(R0)
// End
// End
gpanic := obj.Addr{
Type: obj.TYPE_MEM,
Reg: REGG,
Offset: 4 * 8, // g_panic
}
panicargp := obj.Addr{
Type: obj.TYPE_MEM,
Reg: REG_R0,
Offset: 0, // panic.argp
}
p := s.Func.Text
p = appendp(p, AMOVD, gpanic, regAddr(REG_R0))
p = appendp(p, AGet, regAddr(REG_R0))
p = appendp(p, AI64Eqz)
p = appendp(p, ANot)
p = appendp(p, AIf)
p = appendp(p, AGet, regAddr(REG_SP))
p = appendp(p, AI64ExtendI32U)
p = appendp(p, AI64Const, constAddr(framesize+8))
p = appendp(p, AI64Add)
p = appendp(p, AI64Load, panicargp)
p = appendp(p, AI64Eq)
p = appendp(p, AIf)
p = appendp(p, AMOVD, regAddr(REG_SP), panicargp)
p = appendp(p, AEnd)
p = appendp(p, AEnd)
}
if framesize > 0 {
p := s.Func.Text
p = appendp(p, AGet, regAddr(REG_SP))
p = appendp(p, AI32Const, constAddr(framesize))
p = appendp(p, AI32Sub)
p = appendp(p, ASet, regAddr(REG_SP))
p.Spadj = int32(framesize)
}
// Introduce resume points for CALL instructions
// and collect other explicit resume points.
numResumePoints := 0
explicitBlockDepth := 0
pc := int64(0) // pc is only incremented when necessary, this avoids bloat of the BrTable instruction
var tableIdxs []uint64
tablePC := int64(0)
base := ctxt.PosTable.Pos(s.Func.Text.Pos).Base()
for p := s.Func.Text; p != nil; p = p.Link {
prevBase := base
base = ctxt.PosTable.Pos(p.Pos).Base()
switch p.As {
case ABlock, ALoop, AIf:
explicitBlockDepth++
case AEnd:
if explicitBlockDepth == 0 {
panic("End without block")
}
explicitBlockDepth--
case ARESUMEPOINT:
if explicitBlockDepth != 0 {
panic("RESUME can only be used on toplevel")
}
p.As = AEnd
for tablePC <= pc {
tableIdxs = append(tableIdxs, uint64(numResumePoints))
tablePC++
}
numResumePoints++
pc++
case obj.ACALL:
if explicitBlockDepth != 0 {
panic("CALL can only be used on toplevel, try CALLNORESUME instead")
}
appendp(p, ARESUMEPOINT)
}
p.Pc = pc
// Increase pc whenever some pc-value table needs a new entry. Don't increase it
// more often to avoid bloat of the BrTable instruction.
// The "base != prevBase" condition detects inlined instructions. They are an
// implicit call, so entering and leaving this section affects the stack trace.
if p.As == ACALLNORESUME || p.As == obj.ANOP || p.As == ANop || p.Spadj != 0 || base != prevBase {
pc++
if p.To.Sym == sigpanic {
// The panic stack trace expects the PC at the call of sigpanic,
// not the next one. However, runtime.Caller subtracts 1 from the
// PC. To make both PC and PC-1 work (have the same line number),
// we advance the PC by 2 at sigpanic.
pc++
}
}
}
tableIdxs = append(tableIdxs, uint64(numResumePoints))
s.Size = pc + 1
if !s.Func.Text.From.Sym.NoSplit() {
p := s.Func.Text
if framesize <= objabi.StackSmall {
// small stack: SP <= stackguard
// Get SP
// Get g
// I32WrapI64
// I32Load $stackguard0
// I32GtU
p = appendp(p, AGet, regAddr(REG_SP))
p = appendp(p, AGet, regAddr(REGG))
p = appendp(p, AI32WrapI64)
p = appendp(p, AI32Load, constAddr(2*int64(ctxt.Arch.PtrSize))) // G.stackguard0
p = appendp(p, AI32LeU)
} else {
// large stack: SP-framesize <= stackguard-StackSmall
// SP <= stackguard+(framesize-StackSmall)
// Get SP
// Get g
// I32WrapI64
// I32Load $stackguard0
// I32Const $(framesize-StackSmall)
// I32Add
// I32GtU
p = appendp(p, AGet, regAddr(REG_SP))
p = appendp(p, AGet, regAddr(REGG))
p = appendp(p, AI32WrapI64)
p = appendp(p, AI32Load, constAddr(2*int64(ctxt.Arch.PtrSize))) // G.stackguard0
p = appendp(p, AI32Const, constAddr(int64(framesize)-objabi.StackSmall))
p = appendp(p, AI32Add)
p = appendp(p, AI32LeU)
}
// TODO(neelance): handle wraparound case
p = appendp(p, AIf)
p = appendp(p, obj.ACALL, constAddr(0))
if s.Func.Text.From.Sym.NeedCtxt() {
p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: morestack}
} else {
p.To = obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: morestackNoCtxt}
}
p = appendp(p, AEnd)
}
// Add Block instructions for resume points and BrTable to jump to selected resume point.
if numResumePoints > 0 {
p := s.Func.Text
p = appendp(p, ALoop) // entryPointLoop, used to jump between basic blocks
for i := 0; i < numResumePoints+1; i++ {
p = appendp(p, ABlock)
}
p = appendp(p, AGet, regAddr(REG_PC_B)) // read next basic block from PC_B
p = appendp(p, ABrTable, obj.Addr{Val: tableIdxs})
p = appendp(p, AEnd) // end of Block
for p.Link != nil {
p = p.Link
}
p = appendp(p, AEnd) // end of entryPointLoop
p = appendp(p, obj.AUNDEF)
}
p := s.Func.Text
currentDepth := 0
blockDepths := make(map[*obj.Prog]int)
for p != nil {
switch p.As {
case ABlock, ALoop, AIf:
currentDepth++
blockDepths[p] = currentDepth
case AEnd:
currentDepth--
}
switch p.As {
case ABr, ABrIf:
if p.To.Type == obj.TYPE_BRANCH {
blockDepth, ok := blockDepths[p.To.Val.(*obj.Prog)]
if !ok {
panic("label not at block")
}
p.To = constAddr(int64(currentDepth - blockDepth))
}
case obj.AJMP:
jmp := *p
p.As = obj.ANOP
if jmp.To.Type == obj.TYPE_BRANCH {
// jump to basic block
p = appendp(p, AI32Const, constAddr(jmp.To.Val.(*obj.Prog).Pc))
p = appendp(p, ASet, regAddr(REG_PC_B)) // write next basic block to PC_B
p = appendp(p, ABr, constAddr(int64(currentDepth-1))) // jump to beginning of entryPointLoop
break
}
// reset PC_B to function entry
p = appendp(p, AI32Const, constAddr(0))
p = appendp(p, ASet, regAddr(REG_PC_B))
// low-level WebAssembly call to function
switch jmp.To.Type {
case obj.TYPE_MEM:
p = appendp(p, ACall, jmp.To)
case obj.TYPE_NONE:
// (target PC is on stack)
p = appendp(p, AI32WrapI64)
p = appendp(p, AI32Const, constAddr(16)) // only needs PC_F bits (16-31), PC_B bits (0-15) are zero
p = appendp(p, AI32ShrU)
p = appendp(p, ACallIndirect)
default:
panic("bad target for JMP")
}
p = appendp(p, AReturn)
case obj.ACALL, ACALLNORESUME:
call := *p
p.As = obj.ANOP
pcAfterCall := call.Link.Pc
if call.To.Sym == sigpanic {
pcAfterCall-- // sigpanic expects to be called without advancing the pc
}
// jmpdefer manipulates the return address on the stack so deferreturn gets called repeatedly.
// Model this in WebAssembly with a loop.
if call.To.Sym == deferreturn {
p = appendp(p, ALoop)
}
// SP -= 8
p = appendp(p, AGet, regAddr(REG_SP))
p = appendp(p, AI32Const, constAddr(8))
p = appendp(p, AI32Sub)
p = appendp(p, ASet, regAddr(REG_SP))
// write return address to Go stack
p = appendp(p, AGet, regAddr(REG_SP))
p = appendp(p, AI64Const, obj.Addr{
Type: obj.TYPE_ADDR,
Name: obj.NAME_EXTERN,
Sym: s, // PC_F
Offset: pcAfterCall, // PC_B
})
p = appendp(p, AI64Store, constAddr(0))
// reset PC_B to function entry
p = appendp(p, AI32Const, constAddr(0))
p = appendp(p, ASet, regAddr(REG_PC_B))
// low-level WebAssembly call to function
switch call.To.Type {
case obj.TYPE_MEM:
p = appendp(p, ACall, call.To)
case obj.TYPE_NONE:
// (target PC is on stack)
p = appendp(p, AI32WrapI64)
p = appendp(p, AI32Const, constAddr(16)) // only needs PC_F bits (16-31), PC_B bits (0-15) are zero
p = appendp(p, AI32ShrU)
p = appendp(p, ACallIndirect)
default:
panic("bad target for CALL")
}
// gcWriteBarrier has no return value, it never unwinds the stack
if call.To.Sym == gcWriteBarrier {
break
}
// jmpdefer removes the frame of deferreturn from the Go stack.
// However, its WebAssembly function still returns normally,
// so we need to return from deferreturn without removing its
// stack frame (no RET), because the frame is already gone.
if call.To.Sym == jmpdefer {
p = appendp(p, AReturn)
break
}
// return value of call is on the top of the stack, indicating whether to unwind the WebAssembly stack
p = appendp(p, AIf)
if call.As == ACALLNORESUME && call.To.Sym != sigpanic { // sigpanic unwinds the stack, but it never resumes
// trying to unwind WebAssembly stack but call has no resume point, terminate with error
p = appendp(p, obj.AUNDEF)
} else {
// unwinding WebAssembly stack to switch goroutine, return 1
p = appendp(p, AI32Const, constAddr(1))
p = appendp(p, AReturn)
}
p = appendp(p, AEnd)
// jump to before the call if jmpdefer has reset the return address to the call's PC
if call.To.Sym == deferreturn {
p = appendp(p, AGet, regAddr(REG_PC_B))
p = appendp(p, AI32Const, constAddr(call.Pc))
p = appendp(p, AI32Eq)
p = appendp(p, ABrIf, constAddr(0))
p = appendp(p, AEnd) // end of Loop
}
case obj.ARET, ARETUNWIND:
ret := *p
p.As = obj.ANOP
if framesize > 0 {
// SP += framesize
p = appendp(p, AGet, regAddr(REG_SP))
p = appendp(p, AI32Const, constAddr(framesize))
p = appendp(p, AI32Add)
p = appendp(p, ASet, regAddr(REG_SP))
// TODO(neelance): This should theoretically set Spadj, but it only works without.
// p.Spadj = int32(-framesize)
}
if ret.To.Type == obj.TYPE_MEM {
// reset PC_B to function entry
p = appendp(p, AI32Const, constAddr(0))
p = appendp(p, ASet, regAddr(REG_PC_B))
// low-level WebAssembly call to function
p = appendp(p, ACall, ret.To)
p = appendp(p, AReturn)
break
}
// read return PC_F from Go stack
p = appendp(p, AGet, regAddr(REG_SP))
p = appendp(p, AI32Load16U, constAddr(2))
p = appendp(p, ASet, regAddr(REG_PC_F))
// read return PC_B from Go stack
p = appendp(p, AGet, regAddr(REG_SP))
p = appendp(p, AI32Load16U, constAddr(0))
p = appendp(p, ASet, regAddr(REG_PC_B))
// SP += 8
p = appendp(p, AGet, regAddr(REG_SP))
p = appendp(p, AI32Const, constAddr(8))
p = appendp(p, AI32Add)
p = appendp(p, ASet, regAddr(REG_SP))
if ret.As == ARETUNWIND {
// function needs to unwind the WebAssembly stack, return 1
p = appendp(p, AI32Const, constAddr(1))
p = appendp(p, AReturn)
break
}
// not unwinding the WebAssembly stack, return 0
p = appendp(p, AI32Const, constAddr(0))
p = appendp(p, AReturn)
}
p = p.Link
}
p = s.Func.Text
for p != nil {
switch p.From.Name {
case obj.NAME_AUTO:
p.From.Offset += int64(framesize)
case obj.NAME_PARAM:
p.From.Reg = REG_SP
p.From.Offset += int64(framesize) + 8 // parameters are after the frame and the 8-byte return address
}
switch p.To.Name {
case obj.NAME_AUTO:
p.To.Offset += int64(framesize)
case obj.NAME_PARAM:
p.To.Reg = REG_SP
p.To.Offset += int64(framesize) + 8 // parameters are after the frame and the 8-byte return address
}
switch p.As {
case AGet:
if p.From.Type == obj.TYPE_ADDR {
get := *p
p.As = obj.ANOP
switch get.From.Name {
case obj.NAME_EXTERN:
p = appendp(p, AI64Const, get.From)
case obj.NAME_AUTO, obj.NAME_PARAM:
p = appendp(p, AGet, regAddr(get.From.Reg))
if get.From.Reg == REG_SP {
p = appendp(p, AI64ExtendI32U)
}
if get.From.Offset != 0 {
p = appendp(p, AI64Const, constAddr(get.From.Offset))
p = appendp(p, AI64Add)
}
default:
panic("bad Get: invalid name")
}
}
case AI32Load, AI64Load, AF32Load, AF64Load, AI32Load8S, AI32Load8U, AI32Load16S, AI32Load16U, AI64Load8S, AI64Load8U, AI64Load16S, AI64Load16U, AI64Load32S, AI64Load32U:
if p.From.Type == obj.TYPE_MEM {
as := p.As
from := p.From
p.As = AGet
p.From = regAddr(from.Reg)
if from.Reg != REG_SP {
p = appendp(p, AI32WrapI64)
}
p = appendp(p, as, constAddr(from.Offset))
}
case AMOVB, AMOVH, AMOVW, AMOVD:
mov := *p
p.As = obj.ANOP
var loadAs obj.As
var storeAs obj.As
switch mov.As {
case AMOVB:
loadAs = AI64Load8U
storeAs = AI64Store8
case AMOVH:
loadAs = AI64Load16U
storeAs = AI64Store16
case AMOVW:
loadAs = AI64Load32U
storeAs = AI64Store32
case AMOVD:
loadAs = AI64Load
storeAs = AI64Store
}
appendValue := func() {
switch mov.From.Type {
case obj.TYPE_CONST:
p = appendp(p, AI64Const, constAddr(mov.From.Offset))
case obj.TYPE_ADDR:
switch mov.From.Name {
case obj.NAME_NONE, obj.NAME_PARAM, obj.NAME_AUTO:
p = appendp(p, AGet, regAddr(mov.From.Reg))
if mov.From.Reg == REG_SP {
p = appendp(p, AI64ExtendI32U)
}
p = appendp(p, AI64Const, constAddr(mov.From.Offset))
p = appendp(p, AI64Add)
case obj.NAME_EXTERN:
p = appendp(p, AI64Const, mov.From)
default:
panic("bad name for MOV")
}
case obj.TYPE_REG:
p = appendp(p, AGet, mov.From)
if mov.From.Reg == REG_SP {
p = appendp(p, AI64ExtendI32U)
}
case obj.TYPE_MEM:
p = appendp(p, AGet, regAddr(mov.From.Reg))
if mov.From.Reg != REG_SP {
p = appendp(p, AI32WrapI64)
}
p = appendp(p, loadAs, constAddr(mov.From.Offset))
default:
panic("bad MOV type")
}
}
switch mov.To.Type {
case obj.TYPE_REG:
appendValue()
if mov.To.Reg == REG_SP {
p = appendp(p, AI32WrapI64)
}
p = appendp(p, ASet, mov.To)
case obj.TYPE_MEM:
switch mov.To.Name {
case obj.NAME_NONE, obj.NAME_PARAM:
p = appendp(p, AGet, regAddr(mov.To.Reg))
if mov.To.Reg != REG_SP {
p = appendp(p, AI32WrapI64)
}
case obj.NAME_EXTERN:
p = appendp(p, AI32Const, obj.Addr{Type: obj.TYPE_ADDR, Name: obj.NAME_EXTERN, Sym: mov.To.Sym})
default:
panic("bad MOV name")
}
appendValue()
p = appendp(p, storeAs, constAddr(mov.To.Offset))
default:
panic("bad MOV type")
}
case ACallImport:
p.As = obj.ANOP
p = appendp(p, AGet, regAddr(REG_SP))
p = appendp(p, ACall, obj.Addr{Type: obj.TYPE_MEM, Name: obj.NAME_EXTERN, Sym: s})
p.Mark = WasmImport
}
p = p.Link
}
}
func constAddr(value int64) obj.Addr {
return obj.Addr{Type: obj.TYPE_CONST, Offset: value}
}
func regAddr(reg int16) obj.Addr {
return obj.Addr{Type: obj.TYPE_REG, Reg: reg}
}
// countRegisters returns the number of integer and float registers used by s.
// It does so by looking for the maximum I* and R* registers.
func countRegisters(s *obj.LSym) (numI, numF int16) {
for p := s.Func.Text; p != nil; p = p.Link {
var reg int16
switch p.As {
case AGet:
reg = p.From.Reg
case ASet:
reg = p.To.Reg
case ATee:
reg = p.To.Reg
default:
continue
}
if reg >= REG_R0 && reg <= REG_R15 {
if n := reg - REG_R0 + 1; numI < n {
numI = n
}
} else if reg >= REG_F0 && reg <= REG_F15 {
if n := reg - REG_F0 + 1; numF < n {
numF = n
}
}
}
return
}
func assemble(ctxt *obj.Link, s *obj.LSym, newprog obj.ProgAlloc) {
w := new(bytes.Buffer)
numI, numF := countRegisters(s)
// Function starts with declaration of locals: numbers and types.
switch s.Name {
// memchr and memcmp don't use the normal Go calling convention and need i32 variables.
case "memchr":
writeUleb128(w, 1) // number of sets of locals
writeUleb128(w, 3) // number of locals
w.WriteByte(0x7F) // i32
case "memcmp":
writeUleb128(w, 1) // number of sets of locals
writeUleb128(w, 2) // number of locals
w.WriteByte(0x7F) // i32
default:
numTypes := 0
if numI > 0 {
numTypes++
}
if numF > 0 {
numTypes++
}
writeUleb128(w, uint64(numTypes))
if numI > 0 {
writeUleb128(w, uint64(numI)) // number of locals
w.WriteByte(0x7E) // i64
}
if numF > 0 {
writeUleb128(w, uint64(numF)) // number of locals
w.WriteByte(0x7C) // f64
}
}
for p := s.Func.Text; p != nil; p = p.Link {
switch p.As {
case AGet:
if p.From.Type != obj.TYPE_REG {
panic("bad Get: argument is not a register")
}
reg := p.From.Reg
switch {
case reg >= REG_PC_F && reg <= REG_PAUSE:
w.WriteByte(0x23) // global.get
writeUleb128(w, uint64(reg-REG_PC_F))
case reg >= REG_R0 && reg <= REG_R15:
w.WriteByte(0x20) // local.get (i64)
writeUleb128(w, uint64(reg-REG_R0))
case reg >= REG_F0 && reg <= REG_F15:
w.WriteByte(0x20) // local.get (f64)
writeUleb128(w, uint64(numI+(reg-REG_F0)))
default:
panic("bad Get: invalid register")
}
continue
case ASet:
if p.To.Type != obj.TYPE_REG {
panic("bad Set: argument is not a register")
}
reg := p.To.Reg
switch {
case reg >= REG_PC_F && reg <= REG_PAUSE:
w.WriteByte(0x24) // global.set
writeUleb128(w, uint64(reg-REG_PC_F))
case reg >= REG_R0 && reg <= REG_F15:
if p.Link.As == AGet && p.Link.From.Reg == reg {
w.WriteByte(0x22) // local.tee
p = p.Link
} else {
w.WriteByte(0x21) // local.set
}
if reg <= REG_R15 {
writeUleb128(w, uint64(reg-REG_R0))
} else {
writeUleb128(w, uint64(numI+(reg-REG_F0)))
}
default:
panic("bad Set: invalid register")
}
continue
case ATee:
if p.To.Type != obj.TYPE_REG {
panic("bad Tee: argument is not a register")
}
reg := p.To.Reg
switch {
case reg >= REG_R0 && reg <= REG_R15:
w.WriteByte(0x22) // local.tee (i64)
writeUleb128(w, uint64(reg-REG_R0))
case reg >= REG_F0 && reg <= REG_F15:
w.WriteByte(0x22) // local.tee (f64)
writeUleb128(w, uint64(numI+(reg-REG_F0)))
default:
panic("bad Tee: invalid register")
}
continue
case ANot:
w.WriteByte(0x45) // i32.eqz
continue
case obj.AUNDEF:
w.WriteByte(0x00) // unreachable
continue
case obj.ANOP, obj.ATEXT, obj.AFUNCDATA, obj.APCDATA:
// ignore
continue
}
switch {
case p.As < AUnreachable || p.As > AF64ReinterpretI64:
panic(fmt.Sprintf("unexpected assembler op: %s", p.As))
case p.As < AEnd:
w.WriteByte(byte(p.As - AUnreachable + 0x00))
case p.As < ADrop:
w.WriteByte(byte(p.As - AEnd + 0x0B))
case p.As < AI32Load:
w.WriteByte(byte(p.As - ADrop + 0x1A))
default:
w.WriteByte(byte(p.As - AI32Load + 0x28))
}
switch p.As {
case ABlock, ALoop, AIf:
if p.From.Offset != 0 {
// block type, rarely used, e.g. for code compiled with emscripten
w.WriteByte(0x80 - byte(p.From.Offset))
continue
}
w.WriteByte(0x40)
case ABr, ABrIf:
if p.To.Type != obj.TYPE_CONST {
panic("bad Br/BrIf")
}
writeUleb128(w, uint64(p.To.Offset))
case ABrTable:
idxs := p.To.Val.([]uint64)
writeUleb128(w, uint64(len(idxs)-1))
for _, idx := range idxs {
writeUleb128(w, idx)
}
case ACall:
switch p.To.Type {
case obj.TYPE_CONST:
writeUleb128(w, uint64(p.To.Offset))
case obj.TYPE_MEM:
if p.To.Name != obj.NAME_EXTERN && p.To.Name != obj.NAME_STATIC {
fmt.Println(p.To)
panic("bad name for Call")
}
r := obj.Addrel(s)
r.Off = int32(w.Len())
r.Type = objabi.R_CALL
if p.Mark&WasmImport != 0 {
r.Type = objabi.R_WASMIMPORT
}
r.Sym = p.To.Sym
default:
panic("bad type for Call")
}
case ACallIndirect:
writeUleb128(w, uint64(p.To.Offset))
w.WriteByte(0x00) // reserved value
case AI32Const, AI64Const:
if p.From.Name == obj.NAME_EXTERN {
r := obj.Addrel(s)
r.Off = int32(w.Len())
r.Type = objabi.R_ADDR
r.Sym = p.From.Sym
r.Add = p.From.Offset
break
}
writeSleb128(w, p.From.Offset)
case AF64Const:
b := make([]byte, 8)
binary.LittleEndian.PutUint64(b, math.Float64bits(p.From.Val.(float64)))
w.Write(b)
case AI32Load, AI64Load, AF32Load, AF64Load, AI32Load8S, AI32Load8U, AI32Load16S, AI32Load16U, AI64Load8S, AI64Load8U, AI64Load16S, AI64Load16U, AI64Load32S, AI64Load32U:
if p.From.Offset < 0 {
panic("negative offset for *Load")
}
if p.From.Type != obj.TYPE_CONST {
panic("bad type for *Load")
}
if p.From.Offset > math.MaxUint32 {
ctxt.Diag("bad offset in %v", p)
}
writeUleb128(w, align(p.As))
writeUleb128(w, uint64(p.From.Offset))
case AI32Store, AI64Store, AF32Store, AF64Store, AI32Store8, AI32Store16, AI64Store8, AI64Store16, AI64Store32:
if p.To.Offset < 0 {
panic("negative offset")
}
if p.From.Offset > math.MaxUint32 {
ctxt.Diag("bad offset in %v", p)
}
writeUleb128(w, align(p.As))
writeUleb128(w, uint64(p.To.Offset))
case ACurrentMemory, AGrowMemory:
w.WriteByte(0x00)
}
}
w.WriteByte(0x0b) // end
s.P = w.Bytes()
}
func align(as obj.As) uint64 {
switch as {
case AI32Load8S, AI32Load8U, AI64Load8S, AI64Load8U, AI32Store8, AI64Store8:
return 0
case AI32Load16S, AI32Load16U, AI64Load16S, AI64Load16U, AI32Store16, AI64Store16:
return 1
case AI32Load, AF32Load, AI64Load32S, AI64Load32U, AI32Store, AF32Store, AI64Store32:
return 2
case AI64Load, AF64Load, AI64Store, AF64Store:
return 3
default:
panic("align: bad op")
}
}
func writeUleb128(w io.ByteWriter, v uint64) {
more := true
for more {
c := uint8(v & 0x7f)
v >>= 7
more = v != 0
if more {
c |= 0x80
}
w.WriteByte(c)
}
}
func writeSleb128(w io.ByteWriter, v int64) {
more := true
for more {
c := uint8(v & 0x7f)
s := uint8(v & 0x40)
v >>= 7
more = !((v == 0 && s == 0) || (v == -1 && s != 0))
if more {
c |= 0x80
}
w.WriteByte(c)
}
}
| []
| []
| []
| [] | [] | go | null | null | null |
third_party/com_github_tensorflow_tensorflow/py/python_configure.bzl | """Repository rule for Python autoconfiguration.
`python_configure` depends on the following environment variables:
* `PYTHON_BIN_PATH`: location of python binary.
* `PYTHON_LIB_PATH`: Location of python libraries.
"""
load(
"//third_party/com_github_tensorflow_tensorflow/remote_config:common.bzl",
"BAZEL_SH",
"PYTHON_BIN_PATH",
"PYTHON_LIB_PATH",
"TF_PYTHON_CONFIG_REPO",
"auto_config_fail",
"config_repo_label",
"execute",
"get_bash_bin",
"get_host_environ",
"get_python_bin",
"is_windows",
"raw_exec",
"read_dir",
)
def _genrule(src_dir, genrule_name, command, outs):
"""Returns a string with a genrule.
Genrule executes the given command and produces the given outputs.
"""
return (
"genrule(\n" +
' name = "' +
genrule_name + '",\n' +
" outs = [\n" +
outs +
"\n ],\n" +
' cmd = """\n' +
command +
'\n """,\n' +
")\n"
)
def _norm_path(path):
"""Returns a path with '/' and remove the trailing slash."""
path = path.replace("\\", "/")
if path[-1] == "/":
path = path[:-1]
return path
def _symlink_genrule_for_dir(
repository_ctx,
src_dir,
dest_dir,
genrule_name,
src_files = [],
dest_files = []):
"""Returns a genrule to symlink(or copy if on Windows) a set of files.
If src_dir is passed, files will be read from the given directory; otherwise
we assume files are in src_files and dest_files
"""
if src_dir != None:
src_dir = _norm_path(src_dir)
dest_dir = _norm_path(dest_dir)
files = "\n".join(read_dir(repository_ctx, src_dir))
# Create a list with the src_dir stripped to use for outputs.
dest_files = files.replace(src_dir, "").splitlines()
src_files = files.splitlines()
command = []
outs = []
for i in range(len(dest_files)):
if dest_files[i] != "":
# If we have only one file to link we do not want to use the dest_dir, as
# $(@D) will include the full path to the file.
dest = "$(@D)/" + dest_dir + dest_files[i] if len(dest_files) != 1 else "$(@D)/" + dest_files[i]
# Copy the headers to create a sandboxable setup.
cmd = "cp -f"
command.append(cmd + ' "%s" "%s"' % (src_files[i], dest))
outs.append(' "' + dest_dir + dest_files[i] + '",')
genrule = _genrule(
src_dir,
genrule_name,
" && ".join(command),
"\n".join(outs),
)
return genrule
def _get_python_lib(repository_ctx, python_bin):
"""Gets the python lib path."""
python_lib = get_host_environ(repository_ctx, PYTHON_LIB_PATH)
if python_lib != None:
return python_lib
# The interesting program to execute.
print_lib = [
"from __future__ import print_function",
"import site",
"import os",
"python_paths = []",
"if os.getenv('PYTHONPATH') is not None:",
" python_paths = os.getenv('PYTHONPATH').split(':')",
"try:",
" library_paths = site.getsitepackages()",
"except AttributeError:",
" from distutils.sysconfig import get_python_lib",
" library_paths = [get_python_lib()]",
"all_paths = set(python_paths + library_paths)",
"paths = []",
"for path in all_paths:",
" if os.path.isdir(path):",
" paths.append(path)",
"if len(paths) >=1:",
" print(paths[0])",
]
# The below script writes the above program to a file
# and executes it. This is to work around the limitation
# of not being able to upload files as part of execute.
cmd = "from os import linesep;"
cmd += "f = open('script.py', 'w');"
for line in print_lib:
cmd += "f.write(\"%s\" + linesep);" % line
cmd += "f.close();"
cmd += "from os import system;"
cmd += "system(\"%s script.py\");" % python_bin
result = execute(repository_ctx, [python_bin, "-c", cmd])
return result.stdout.strip()
def _check_python_lib(repository_ctx, python_lib):
"""Checks the python lib path."""
cmd = 'test -d "%s" -a -x "%s"' % (python_lib, python_lib)
result = raw_exec(repository_ctx, [get_bash_bin(repository_ctx), "-c", cmd])
if result.return_code == 1:
auto_config_fail("Invalid python library path: %s" % python_lib)
def _check_python_bin(repository_ctx, python_bin):
"""Checks the python bin path."""
cmd = '[[ -x "%s" ]] && [[ ! -d "%s" ]]' % (python_bin, python_bin)
result = raw_exec(repository_ctx, [get_bash_bin(repository_ctx), "-c", cmd])
if result.return_code == 1:
auto_config_fail("--define %s='%s' is not executable. Is it the python binary?" % (
PYTHON_BIN_PATH,
python_bin,
))
def _get_python_include(repository_ctx, python_bin):
"""Gets the python include path."""
result = execute(
repository_ctx,
[
python_bin,
"-c",
"from __future__ import print_function;" +
"from distutils import sysconfig;" +
"print(sysconfig.get_python_inc())",
],
error_details = ("Is the Python binary path set up right? " +
"(See ./configure or " + PYTHON_BIN_PATH + ".) " +
"Is distutils installed?"),
error_msg = "Problem getting python include path.",
)
return result.stdout.splitlines()[0]
def _get_python_import_lib_name(repository_ctx, python_bin):
"""Get Python import library name (pythonXY.lib) on Windows."""
result = execute(
repository_ctx,
[
python_bin,
"-c",
"import sys;" +
'print("python" + str(sys.version_info[0]) + ' +
' str(sys.version_info[1]) + ".lib")',
],
error_details = ("Is the Python binary path set up right? " +
"(See ./configure or " + PYTHON_BIN_PATH + ".) "),
error_msg = "Problem getting python import library.",
)
return result.stdout.splitlines()[0]
def _create_local_python_repository(repository_ctx):
"""Creates the repository containing files set up to build with Python."""
# Resolve all labels before doing any real work. Resolving causes the
# function to be restarted with all previous state being lost. This
# can easily lead to a O(n^2) runtime in the number of labels.
build_tpl = repository_ctx.path(Label("//third_party/com_github_tensorflow_tensorflow/py:BUILD.tpl"))
python_bin = get_python_bin(repository_ctx)
_check_python_bin(repository_ctx, python_bin)
python_lib = _get_python_lib(repository_ctx, python_bin)
_check_python_lib(repository_ctx, python_lib)
python_include = _get_python_include(repository_ctx, python_bin)
python_include_rule = _symlink_genrule_for_dir(
repository_ctx,
python_include,
"python_include",
"python_include",
)
python_import_lib_genrule = ""
# To build Python C/C++ extension on Windows, we need to link to python import library pythonXY.lib
# See https://docs.python.org/3/extending/windows.html
if is_windows(repository_ctx):
python_include = _norm_path(python_include)
python_import_lib_name = _get_python_import_lib_name(repository_ctx, python_bin)
python_import_lib_src = python_include.rsplit("/", 1)[0] + "/libs/" + python_import_lib_name
python_import_lib_genrule = _symlink_genrule_for_dir(
repository_ctx,
None,
"",
"python_import_lib",
[python_import_lib_src],
[python_import_lib_name],
)
repository_ctx.template("BUILD", build_tpl, {
"%{PYTHON_BIN_PATH}": python_bin,
"%{PYTHON_INCLUDE_GENRULE}": python_include_rule,
"%{PYTHON_IMPORT_LIB_GENRULE}": python_import_lib_genrule,
})
def _create_remote_python_repository(repository_ctx, remote_config_repo):
"""Creates pointers to a remotely configured repo set up to build with Python.
"""
repository_ctx.template("BUILD", config_repo_label(remote_config_repo, ":BUILD"), {})
def _python_autoconf_impl(repository_ctx):
"""Implementation of the python_autoconf repository rule."""
if get_host_environ(repository_ctx, TF_PYTHON_CONFIG_REPO) != None:
_create_remote_python_repository(
repository_ctx,
get_host_environ(repository_ctx, TF_PYTHON_CONFIG_REPO),
)
else:
_create_local_python_repository(repository_ctx)
_ENVIRONS = [
BAZEL_SH,
PYTHON_BIN_PATH,
PYTHON_LIB_PATH,
]
# remote_python_configure = repository_rule(
# attrs = {
# "environ": attr.string_dict(),
# },
# environ = _ENVIRONS,
# remotable = True,
# implementation = _create_local_python_repository,
# )
python_configure = repository_rule(
environ = _ENVIRONS + [TF_PYTHON_CONFIG_REPO],
implementation = _python_autoconf_impl,
)
"""Detects and configures the local Python.
Add the following to your WORKSPACE FILE:
```python
python_configure(name = "local_config_python")
```
Args:
name: A unique name for this workspace rule.
"""
| []
| []
| [
"PYTHONPATH"
]
| [] | ["PYTHONPATH"] | python | 1 | 0 | |
pkg/signature/kms/hashivault/client.go | //
// Copyright 2021 The Sigstore Authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package hashivault
import (
"context"
"crypto"
"encoding/base64"
"encoding/json"
"fmt"
"log"
"os"
"path/filepath"
"regexp"
"time"
"github.com/ReneKroon/ttlcache/v2"
vault "github.com/hashicorp/vault/api"
"github.com/mitchellh/go-homedir"
"github.com/pkg/errors"
"github.com/sigstore/sigstore/pkg/cryptoutils"
)
type hashivaultClient struct {
client *vault.Client
keyPath string
transitSecretEnginePath string
keyCache *ttlcache.Cache
}
var (
errReference = errors.New("kms specification should be in the format hashivault://<key>")
referenceRegex = regexp.MustCompile(`^hashivault://(?P<path>\w(([\w-.]+)?\w)?)$`)
prefixRegex = regexp.MustCompile("vault:v[0-9]+:")
)
const (
vaultV1DataPrefix = "vault:v1:"
// use a consistent key for cache lookups
cacheKey = "signer"
// ReferenceScheme schemes for various KMS services are copied from https://github.com/google/go-cloud/tree/master/secrets
ReferenceScheme = "hashivault://"
)
// ValidReference returns a non-nil error if the reference string is invalid
func ValidReference(ref string) error {
if !referenceRegex.MatchString(ref) {
return errReference
}
return nil
}
func parseReference(resourceID string) (keyPath string, err error) {
i := referenceRegex.SubexpIndex("path")
v := referenceRegex.FindStringSubmatch(resourceID)
if len(v) < i+1 {
err = errors.Errorf("invalid vault format %q", resourceID)
return
}
keyPath = v[i]
return
}
func newHashivaultClient(keyResourceID string) (*hashivaultClient, error) {
keyPath, err := parseReference(keyResourceID)
if err != nil {
return nil, err
}
address := os.Getenv("VAULT_ADDR")
if address == "" {
return nil, errors.New("VAULT_ADDR is not set")
}
token := os.Getenv("VAULT_TOKEN")
if token == "" {
log.Printf("VAULT_TOKEN is not set, trying to read token from file at path ~/.vault-token")
homeDir, err := homedir.Dir()
if err != nil {
return nil, errors.Wrap(err, "get home directory")
}
tokenFromFile, err := os.ReadFile(filepath.Join(homeDir, ".vault-token"))
if err != nil {
return nil, errors.Wrap(err, "read .vault-token file")
}
token = string(tokenFromFile)
}
client, err := vault.NewClient(&vault.Config{
Address: address,
})
if err != nil {
return nil, errors.Wrap(err, "new vault client")
}
client.SetToken(token)
transitSecretEnginePath := os.Getenv("TRANSIT_SECRET_ENGINE_PATH")
if transitSecretEnginePath == "" {
transitSecretEnginePath = "transit"
}
hvClient := &hashivaultClient{
client: client,
keyPath: keyPath,
transitSecretEnginePath: transitSecretEnginePath,
keyCache: ttlcache.NewCache(),
}
hvClient.keyCache.SetLoaderFunction(hvClient.keyCacheLoaderFunction)
hvClient.keyCache.SkipTTLExtensionOnHit(true)
return hvClient, nil
}
func (h *hashivaultClient) keyCacheLoaderFunction(key string) (data interface{}, ttl time.Duration, err error) {
ttl = time.Second * 300
var pubKey crypto.PublicKey
pubKey, err = h.fetchPublicKey(context.Background())
if err != nil {
data = nil
return
}
data = pubKey
return data, ttl, err
}
func (h *hashivaultClient) fetchPublicKey(_ context.Context) (crypto.PublicKey, error) {
client := h.client.Logical()
keyResult, err := client.Read(fmt.Sprintf("/%s/keys/%s", h.transitSecretEnginePath, h.keyPath))
if err != nil {
return nil, errors.Wrap(err, "public key")
}
keysData, hasKeys := keyResult.Data["keys"]
latestVersion, hasVersion := keyResult.Data["latest_version"]
if !hasKeys || !hasVersion {
return nil, errors.New("Failed to read transit key keys: corrupted response")
}
keys, ok := keysData.(map[string]interface{})
if !ok {
return nil, errors.New("Failed to read transit key keys: Invalid keys map")
}
keyVersion := latestVersion.(json.Number)
keyData, ok := keys[string(keyVersion)]
if !ok {
return nil, errors.New("Failed to read transit key keys: corrupted response")
}
publicKeyPem, ok := keyData.(map[string]interface{})["public_key"]
if !ok {
return nil, errors.New("Failed to read transit key keys: corrupted response")
}
return cryptoutils.UnmarshalPEMToPublicKey([]byte(publicKeyPem.(string)))
}
func (h *hashivaultClient) public() (crypto.PublicKey, error) {
return h.keyCache.Get(cacheKey)
}
func (h hashivaultClient) sign(digest []byte, alg crypto.Hash) ([]byte, error) {
client := h.client.Logical()
signResult, err := client.Write(fmt.Sprintf("/%s/sign/%s%s", h.transitSecretEnginePath, h.keyPath, hashString(alg)), map[string]interface{}{
"input": base64.StdEncoding.Strict().EncodeToString(digest),
"prehashed": alg != crypto.Hash(0),
})
if err != nil {
return nil, errors.Wrap(err, "Transit: failed to sign payload")
}
encodedSignature, ok := signResult.Data["signature"]
if !ok {
return nil, errors.New("Transit: response corrupted in-transit")
}
return vaultDecode(encodedSignature)
}
func (h hashivaultClient) verify(sig, digest []byte, alg crypto.Hash) error {
client := h.client.Logical()
encodedSig := base64.StdEncoding.EncodeToString(sig)
vaultDataPrefix := os.Getenv("VAULT_KEY_PREFIX")
if vaultDataPrefix == "" {
vaultDataPrefix = vaultV1DataPrefix
}
result, err := client.Write(fmt.Sprintf("/%s/verify/%s/%s", h.transitSecretEnginePath, h.keyPath, hashString(alg)), map[string]interface{}{
"input": base64.StdEncoding.EncodeToString(digest),
"signature": fmt.Sprintf("%s%s", vaultDataPrefix, encodedSig),
})
if err != nil {
return errors.Wrap(err, "verify")
}
valid, ok := result.Data["valid"]
if !ok {
return errors.New("corrupted response")
}
if isValid, ok := valid.(bool); ok && isValid {
return errors.New("Failed vault verification")
}
return nil
}
// Vault likes to prefix base64 data with a version prefix
func vaultDecode(data interface{}) ([]byte, error) {
encoded, ok := data.(string)
if !ok {
return nil, errors.New("Received non-string data")
}
return base64.StdEncoding.DecodeString(prefixRegex.ReplaceAllString(encoded, ""))
}
func hashString(h crypto.Hash) string {
var hashStr string
switch h {
case crypto.SHA224:
hashStr = "/sha2-224"
case crypto.SHA256:
hashStr = "/sha2-256"
case crypto.SHA384:
hashStr = "/sha2-384"
case crypto.SHA512:
hashStr = "/sha2-512"
default:
hashStr = ""
}
return hashStr
}
func (h hashivaultClient) createKey(typeStr string) (crypto.PublicKey, error) {
client := h.client.Logical()
if _, err := client.Write(fmt.Sprintf("/%s/keys/%s", h.transitSecretEnginePath, h.keyPath), map[string]interface{}{
"type": typeStr,
}); err != nil {
return nil, errors.Wrap(err, "Failed to create transit key")
}
return h.public()
}
| [
"\"VAULT_ADDR\"",
"\"VAULT_TOKEN\"",
"\"TRANSIT_SECRET_ENGINE_PATH\"",
"\"VAULT_KEY_PREFIX\""
]
| []
| [
"VAULT_ADDR",
"TRANSIT_SECRET_ENGINE_PATH",
"VAULT_TOKEN",
"VAULT_KEY_PREFIX"
]
| [] | ["VAULT_ADDR", "TRANSIT_SECRET_ENGINE_PATH", "VAULT_TOKEN", "VAULT_KEY_PREFIX"] | go | 4 | 0 | |
rbac/providers/ldap/delta_inbound_sync.py | # Copyright 2019 Contributors to Hyperledger Sawtooth
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ------------------------------------------------------------------------------
""" LDAP Delta Inbound Sync
"""
import os
import json
import time
from datetime import datetime, timezone
import rethinkdb as r
import ldap3
from rbac.providers.common import ldap_connector
from rbac.common.logs import get_logger
from rbac.providers.common.inbound_filters import (
inbound_user_filter,
inbound_group_filter,
)
from rbac.providers.common.db_queries import connect_to_db, save_sync_time
from rbac.providers.common.rbac_transactions import add_transaction
LDAP_DC = os.getenv("LDAP_DC")
LDAP_SERVER = os.getenv("LDAP_SERVER")
LDAP_USER = os.getenv("LDAP_USER")
LDAP_PASS = os.getenv("LDAP_PASS")
DELTA_SYNC_INTERVAL_SECONDS = int(os.getenv("DELTA_SYNC_INTERVAL_SECONDS", "3600"))
LOGGER = get_logger(__name__)
def fetch_ldap_data():
"""
Call to get entries for all (Users | Groups) in Active Directory, saves the time of the sync,
and inserts data into RethinkDB.
"""
LOGGER.debug("Connecting to RethinkDB...")
connect_to_db()
LOGGER.debug("Successfully connected to RethinkDB")
last_sync = (
r.table("sync_tracker")
.filter({"provider_id": LDAP_DC})
.max("timestamp")
.coerce_to("object")
.run()
)
last_sync_time = last_sync["timestamp"]
last_sync_time_formatted = to_date_ldap_query(rethink_timestamp=last_sync_time)
search_filter = (
"(&(|(objectClass=person)(objectClass=group))(whenChanged>=%s))"
% last_sync_time_formatted
)
ldap_connection = ldap_connector.await_connection(LDAP_SERVER, LDAP_USER, LDAP_PASS)
ldap_connection.search(
search_base=LDAP_DC,
search_filter=search_filter,
attributes=ldap3.ALL_ATTRIBUTES,
)
parsed_last_sync_time = datetime.strptime(
last_sync_time.split("+")[0], "%Y-%m-%dT%H:%M:%S.%f"
).replace(tzinfo=timezone.utc)
insert_to_db(data_dict=ldap_connection.entries, when_changed=parsed_last_sync_time)
def to_date_ldap_query(rethink_timestamp):
"""
Call to transform timestamp stored in RethinkDB to a string in the following format:YYYYmmddHHMMSS.Tz
"""
return datetime.strptime(
rethink_timestamp.split("+")[0], "%Y-%m-%dT%H:%M:%S.%f"
).strftime("%Y%m%d%H%M%S.0Z")
def insert_to_db(data_dict, when_changed):
"""Insert (Users | Groups) individually to RethinkDB from dict of data and begins delta sync timer."""
insertion_counter = 0
for entry in data_dict:
entry_to_insert = {}
entry_json = json.loads(entry.entry_to_json())
entry_attributes = entry_json["attributes"]
for attribute in entry_attributes:
if len(entry_attributes[attribute]) > 1:
entry_to_insert[attribute] = entry_attributes[attribute]
else:
entry_to_insert[attribute] = entry_attributes[attribute][0]
if entry.whenChanged.value > when_changed:
if "person" in entry.objectClass.value:
data_type = "user"
standardized_entry = inbound_user_filter(entry_to_insert, "ldap")
else:
data_type = "group"
standardized_entry = inbound_group_filter(entry_to_insert, "ldap")
entry_modified_timestamp = entry.whenChanged.value.strftime(
"%Y-%m-%dT%H:%M:%S.%f+00:00"
)
inbound_entry = {
"data": standardized_entry,
"data_type": data_type,
"sync_type": "delta",
"timestamp": entry_modified_timestamp,
"provider_id": LDAP_DC,
}
add_transaction(inbound_entry)
r.table("inbound_queue").insert(inbound_entry).run()
sync_source = "ldap-" + data_type
provider_id = LDAP_DC
save_sync_time(provider_id, sync_source, "delta", entry_modified_timestamp)
insertion_counter += 1
LOGGER.info("Inserted %s records into inbound_queue.", insertion_counter)
def inbound_delta_sync():
"""Runs the delta sync for data_type every DELTA_SYNC_INTERVAL_SECONDS."""
if LDAP_DC:
while True:
time.sleep(DELTA_SYNC_INTERVAL_SECONDS)
LOGGER.info("LDAP delta sync starting")
fetch_ldap_data()
LOGGER.info(
"LDAP delta sync completed, next delta sync will occur in %s seconds",
str(DELTA_SYNC_INTERVAL_SECONDS),
)
else:
LOGGER.info(
"LDAP Domain Controller is not provided, skipping LDAP delta syncs."
)
| []
| []
| [
"DELTA_SYNC_INTERVAL_SECONDS",
"LDAP_SERVER",
"LDAP_PASS",
"LDAP_DC",
"LDAP_USER"
]
| [] | ["DELTA_SYNC_INTERVAL_SECONDS", "LDAP_SERVER", "LDAP_PASS", "LDAP_DC", "LDAP_USER"] | python | 5 | 0 | |
source/conf.py | # -*- coding: utf-8 -*-
#
# armacode documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 08 13:12:12 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# Mock to ignore Modules
#import mock
#autodoc_mock_imports = ['Rhino', 'System', 'rhinoscriptsyntax', 'scriptcontext', 'clr', 'Grasshopper']
#for mod_name in autodoc_mock_imports :
# sys.modules[mod_name] = mock.Mock()
class Mock(object):
__all__ = []
def __init__(self, *args, **kwargs):
pass
def __call__(self, *args, **kwargs):
return Mock()
@classmethod
def __getattr__(cls, name):
if name in ('__file__', '__path__'):
return '/dev/null'
elif name[0] == name[0].upper():
mockType = type(name, (), {})
mockType.__module__ = __name__
return mockType
else:
return Mock()
MOCK_MODULES = ['Rhino', 'System', 'rhinoscriptsyntax', 'scriptcontext', 'clr', 'Grasshopper']
for mod_name in MOCK_MODULES:
sys.modules[mod_name] = Mock()
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..\\..\\..\\armacode\\Python'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.mathjax',
#'sphinx.ext.napoleon', #Required for google style docstrings Sphinx >1.3
#'sphinxcontrib.napoleon', #Required for google style docstrings
'sphinx.ext.ifconfig',
'sphinx.ext.viewcode',
'sphinx.ext.autodoc'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'armacode'
copyright = u'2014, Tony Ho'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = 'Unknown'
# The full version, including alpha/beta/rc tags.
release = 'Unknown'
# Auto detect version from Version file.
with open (os.getcwd()+"/../VERSION", "r") as myfile:
data = []
data.append(myfile.readline().strip("\r\n"))
release = data[0]
version = str.split(data[0])[0]
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = []
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
html_title = project + " Documentation"
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
html_favicon = "_static/ar-ma_favicon.ico"
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'armacode'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'armacode.tex', u'armacode Documentation',
u'Tony Ho', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'armacode', u'armacode Documentation',
[u'Tony Ho'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'armacode', u'armacode Documentation',
u'Tony Ho', 'armacode', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
# -- Options for Epub output ----------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'armacode'
epub_author = u'Tony Ho'
epub_publisher = u'Tony Ho'
epub_copyright = u'2014, Tony Ho'
# The basename for the epub file. It defaults to the project name.
#epub_basename = u'armacode'
# The HTML theme for the epub output. Since the default themes are not optimized
# for small screen space, using the same theme for HTML and epub output is
# usually not wise. This defaults to 'epub', a theme designed to save visual
# space.
#epub_theme = 'epub'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# A sequence of (type, uri, title) tuples for the guide element of content.opf.
#epub_guide = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
epub_exclude_files = ['search.html']
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Choose between 'default' and 'includehidden'.
#epub_tocscope = 'default'
# Fix unsupported image types using the PIL.
#epub_fix_images = False
# Scale large images.
#epub_max_image_width = 0
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#epub_show_urls = 'inline'
# If false, no index is generated.
#epub_use_index = True
# Napoleon settings
napoleon_google_docstring = True
napoleon_numpy_docstring = True
napoleon_include_private_with_doc = False
napoleon_include_special_with_doc = True
napoleon_use_admonition_for_examples = False
napoleon_use_admonition_for_notes = False
napoleon_use_admonition_for_references = False
napoleon_use_ivar = False
napoleon_use_param = True
napoleon_use_rtype = True
# Read the docs stuff
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
if not on_rtd:
#Choose a theme.
html_theme = "bootstrap" #Default theme
html_theme = "sphinx_rtd_theme" #ReadTheDocs, light theme
html_theme = "basic" #No Theme
if html_theme == "sphinx_rtd_theme":
print "Applying ReadTheDocs theme (sphinx_rtd_theme)"
import sphinx_rtd_theme
html_theme = "sphinx_rtd_theme"
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
html_theme_options = {
'canonical_url': '',
'analytics_id': '',
'logo_only': False,
'display_version': True,
'prev_next_buttons_location': 'bottom',
'style_external_links': False,
# 'vcs_pageview_mode': '',
# Toc options
'collapse_navigation': True,
'sticky_navigation': True,
'navigation_depth': 4,
'includehidden': True,
'titles_only': False
}
if html_theme == "bootstrap":
print "Applying AR-MA custom Bootstrap theme (sphinx_bootstrap_theme)"
#Bootstrap theme and bootswatch
#import sphinx_bootstrap_theme
html_theme = 'bootstrap'
html_theme_path = [".\\..\\theme"] #Customized ar-ma bootswatch
# (Optional) Logo. Should be small enough to fit the navbar (ideally 24x24).
# Path should be relative to the ``_static`` files directory.
html_logo = "_static\\ar-ma_logo.png"
# Theme options are theme-specific and customize the look and feel of a
# theme further.
html_theme_options = {
# Navigation bar title. (Default: ``project`` value)
#'navbar_title': "Demo",
# Tab name for entire site. (Default: "Site")
#'navbar_site_name': "Site",
# A list of tuples containing pages or urls to link to.
# Valid tuples should be in the following forms:
# (name, page) # a link to a page
# (name, "/aa/bb", 1) # a link to an arbitrary relative url
# (name, "http://example.com", True) # arbitrary absolute url
# Note the "1" or "True" value above as the third argument to indicate
# an arbitrary url.
# 'navbar_links': [
# ("Examples", "examples"),
# ("Link", "http://example.com", True),
# ],
# Render the next and previous page links in navbar. (Default: true)
'navbar_sidebarrel': True,
# Render the current pages TOC in the navbar. (Default: true)
'navbar_pagenav': True,
# Tab name for the current pages TOC. (Default: "Page")
#'navbar_pagenav_name': "Page",
# Global TOC depth for "site" navbar tab. (Default: 1)
# Switching to -1 shows all levels.
'globaltoc_depth': 2,
# Include hidden TOCs in Site navbar?
#
# Note: If this is "false", you cannot have mixed ``:hidden:`` and
# non-hidden ``toctree`` directives in the same page, or else the build
# will break.
#
# Values: "true" (default) or "false"
'globaltoc_includehidden': "true",
# HTML navbar class (Default: "navbar") to attach to <div> element.
# For black navbar, do "navbar navbar-inverse"
#'navbar_class': "navbar navbar-inverse",
# Fix navigation bar to top of page?
# Values: "true" (default) or "false"
'navbar_fixed_top': "true",
# Location of link to source.
# Options are "nav" (default), "footer" or anything else to exclude.
'source_link_position': "nav",
# Bootswatch (http://bootswatch.com/) theme.
#
# Options are nothing with "" (default) or the name of a valid theme
# such as "amelia" or "cosmo".
'bootswatch_theme': "ar-ma",
# Choose Bootstrap version.
# Values: "3" (default) or "2" (in quotes)
'bootstrap_version': "3"
}
| []
| []
| [
"READTHEDOCS"
]
| [] | ["READTHEDOCS"] | python | 1 | 0 | |
oscar/lib/python2.7/site-packages/tox/session.py | """
Automatically package and test a Python project against configurable
Python2 and Python3 based virtual environments. Environments are
setup by using virtualenv. Configuration is generally done through an
INI-style "tox.ini" file.
"""
from __future__ import with_statement
import tox
import py
import os
import sys
import subprocess
from tox._verlib import NormalizedVersion, IrrationalVersionError
from tox.venv import VirtualEnv
from tox.config import parseconfig
from tox.result import ResultLog
from subprocess import STDOUT
def now():
return py.std.time.time()
def prepare(args):
config = parseconfig(args)
if config.option.help:
show_help(config)
raise SystemExit(0)
elif config.option.helpini:
show_help_ini(config)
raise SystemExit(0)
return config
def main(args=None):
try:
config = prepare(args)
retcode = Session(config).runcommand()
raise SystemExit(retcode)
except KeyboardInterrupt:
raise SystemExit(2)
def show_help(config):
tw = py.io.TerminalWriter()
tw.write(config._parser._format_help())
tw.line()
tw.line("Environment variables", bold=True)
tw.line("TOXENV: comma separated list of environments "
"(overridable by '-e')")
tw.line("TOX_TESTENV_PASSENV: space-separated list of extra "
"environment variables to be passed into test command "
"environments")
def show_help_ini(config):
tw = py.io.TerminalWriter()
tw.sep("-", "per-testenv attributes")
for env_attr in config._testenv_attr:
tw.line("%-15s %-8s default: %s" %
(env_attr.name, "<" + env_attr.type + ">", env_attr.default), bold=True)
tw.line(env_attr.help)
tw.line()
class Action(object):
def __init__(self, session, venv, msg, args):
self.venv = venv
self.msg = msg
self.activity = msg.split(" ", 1)[0]
self.session = session
self.report = session.report
self.args = args
self.id = venv and venv.envconfig.envname or "tox"
self._popenlist = []
if self.venv:
self.venvname = self.venv.name
else:
self.venvname = "GLOB"
if msg == "runtests":
cat = "test"
else:
cat = "setup"
envlog = session.resultlog.get_envlog(self.venvname)
self.commandlog = envlog.get_commandlog(cat)
def __enter__(self):
self.report.logaction_start(self)
def __exit__(self, *args):
self.report.logaction_finish(self)
def setactivity(self, name, msg):
self.activity = name
self.report.verbosity0("%s %s: %s" % (self.venvname, name, msg), bold=True)
def info(self, name, msg):
self.report.verbosity1("%s %s: %s" % (self.venvname, name, msg), bold=True)
def _initlogpath(self, actionid):
if self.venv:
logdir = self.venv.envconfig.envlogdir
else:
logdir = self.session.config.logdir
try:
l = logdir.listdir("%s-*" % actionid)
except py.error.ENOENT:
logdir.ensure(dir=1)
l = []
num = len(l)
path = logdir.join("%s-%s.log" % (actionid, num))
f = path.open('w')
f.flush()
return f
def popen(self, args, cwd=None, env=None, redirect=True, returnout=False, ignore_ret=False):
stdout = outpath = None
resultjson = self.session.config.option.resultjson
if resultjson or redirect:
fout = self._initlogpath(self.id)
fout.write("actionid: %s\nmsg: %s\ncmdargs: %r\nenv: %s\n\n" % (
self.id, self.msg, args, env))
fout.flush()
self.popen_outpath = outpath = py.path.local(fout.name)
fin = outpath.open()
fin.read() # read the header, so it won't be written to stdout
stdout = fout
elif returnout:
stdout = subprocess.PIPE
if cwd is None:
# XXX cwd = self.session.config.cwd
cwd = py.path.local()
try:
popen = self._popen(args, cwd, env=env,
stdout=stdout, stderr=STDOUT)
except OSError as e:
self.report.error("invocation failed (errno %d), args: %s, cwd: %s" %
(e.errno, args, cwd))
raise
popen.outpath = outpath
popen.args = [str(x) for x in args]
popen.cwd = cwd
popen.action = self
self._popenlist.append(popen)
try:
self.report.logpopen(popen, env=env)
try:
if resultjson and not redirect:
assert popen.stderr is None # prevent deadlock
out = None
last_time = now()
while 1:
fin_pos = fin.tell()
# we have to read one byte at a time, otherwise there
# might be no output for a long time with slow tests
data = fin.read(1)
if data:
sys.stdout.write(data)
if '\n' in data or (now() - last_time) > 1:
# we flush on newlines or after 1 second to
# provide quick enough feedback to the user
# when printing a dot per test
sys.stdout.flush()
last_time = now()
elif popen.poll() is not None:
if popen.stdout is not None:
popen.stdout.close()
break
else:
py.std.time.sleep(0.1)
fin.seek(fin_pos)
fin.close()
else:
out, err = popen.communicate()
except KeyboardInterrupt:
self.report.keyboard_interrupt()
popen.wait()
raise KeyboardInterrupt()
ret = popen.wait()
finally:
self._popenlist.remove(popen)
if ret and not ignore_ret:
invoked = " ".join(map(str, popen.args))
if outpath:
self.report.error("invocation failed (exit code %d), logfile: %s" %
(ret, outpath))
out = outpath.read()
self.report.error(out)
if hasattr(self, "commandlog"):
self.commandlog.add_command(popen.args, out, ret)
raise tox.exception.InvocationError(
"%s (see %s)" % (invoked, outpath), ret)
else:
raise tox.exception.InvocationError("%r" % (invoked, ), ret)
if not out and outpath:
out = outpath.read()
if hasattr(self, "commandlog"):
self.commandlog.add_command(popen.args, out, ret)
return out
def _rewriteargs(self, cwd, args):
newargs = []
for arg in args:
if sys.platform != "win32" and isinstance(arg, py.path.local):
arg = cwd.bestrelpath(arg)
newargs.append(str(arg))
# subprocess does not always take kindly to .py scripts
# so adding the interpreter here.
if sys.platform == "win32":
ext = os.path.splitext(str(newargs[0]))[1].lower()
if ext == '.py' and self.venv:
newargs = [str(self.envconfig.envpython)] + newargs
return newargs
def _popen(self, args, cwd, stdout, stderr, env=None):
args = self._rewriteargs(cwd, args)
if env is None:
env = os.environ.copy()
return self.session.popen(args, shell=False, cwd=str(cwd),
universal_newlines=True,
stdout=stdout, stderr=stderr, env=env)
class Reporter(object):
actionchar = "-"
def __init__(self, session):
self.tw = py.io.TerminalWriter()
self.session = session
self._reportedlines = []
# self.cumulated_time = 0.0
def logpopen(self, popen, env):
""" log information about the action.popen() created process. """
cmd = " ".join(map(str, popen.args))
if popen.outpath:
self.verbosity1(" %s$ %s >%s" % (popen.cwd, cmd, popen.outpath,))
else:
self.verbosity1(" %s$ %s " % (popen.cwd, cmd))
def logaction_start(self, action):
msg = action.msg + " " + " ".join(map(str, action.args))
self.verbosity2("%s start: %s" % (action.venvname, msg), bold=True)
assert not hasattr(action, "_starttime")
action._starttime = now()
def logaction_finish(self, action):
duration = now() - action._starttime
# self.cumulated_time += duration
self.verbosity2("%s finish: %s after %.2f seconds" % (
action.venvname, action.msg, duration), bold=True)
def startsummary(self):
self.tw.sep("_", "summary")
def info(self, msg):
if self.session.config.option.verbosity >= 2:
self.logline(msg)
def using(self, msg):
if self.session.config.option.verbosity >= 1:
self.logline("using %s" % (msg,), bold=True)
def keyboard_interrupt(self):
self.error("KEYBOARDINTERRUPT")
# def venv_installproject(self, venv, pkg):
# self.logline("installing to %s: %s" % (venv.envconfig.envname, pkg))
def keyvalue(self, name, value):
if name.endswith(":"):
name += " "
self.tw.write(name, bold=True)
self.tw.write(value)
self.tw.line()
def line(self, msg, **opts):
self.logline(msg, **opts)
def good(self, msg):
self.logline(msg, green=True)
def warning(self, msg):
self.logline("WARNING:" + msg, red=True)
def error(self, msg):
self.logline("ERROR: " + msg, red=True)
def skip(self, msg):
self.logline("SKIPPED:" + msg, yellow=True)
def logline(self, msg, **opts):
self._reportedlines.append(msg)
self.tw.line("%s" % msg, **opts)
def verbosity0(self, msg, **opts):
if self.session.config.option.verbosity >= 0:
self.logline("%s" % msg, **opts)
def verbosity1(self, msg, **opts):
if self.session.config.option.verbosity >= 1:
self.logline("%s" % msg, **opts)
def verbosity2(self, msg, **opts):
if self.session.config.option.verbosity >= 2:
self.logline("%s" % msg, **opts)
# def log(self, msg):
# py.builtin.print_(msg, file=sys.stderr)
class Session:
""" (unstable API). the session object that ties
together configuration, reporting, venv creation, testing. """
def __init__(self, config, popen=subprocess.Popen, Report=Reporter):
self.config = config
self.popen = popen
self.resultlog = ResultLog()
self.report = Report(self)
self.make_emptydir(config.logdir)
config.logdir.ensure(dir=1)
# self.report.using("logdir %s" %(self.config.logdir,))
self.report.using("tox.ini: %s" % (self.config.toxinipath,))
self._spec2pkg = {}
self._name2venv = {}
try:
self.venvlist = [
self.getvenv(x)
for x in self.config.envlist
]
except LookupError:
raise SystemExit(1)
self._actions = []
def _makevenv(self, name):
envconfig = self.config.envconfigs.get(name, None)
if envconfig is None:
self.report.error("unknown environment %r" % name)
raise LookupError(name)
venv = VirtualEnv(envconfig=envconfig, session=self)
self._name2venv[name] = venv
return venv
def getvenv(self, name):
""" return a VirtualEnv controler object for the 'name' env. """
try:
return self._name2venv[name]
except KeyError:
return self._makevenv(name)
def newaction(self, venv, msg, *args):
action = Action(self, venv, msg, args)
self._actions.append(action)
return action
def runcommand(self):
self.report.using("tox-%s from %s" % (tox.__version__, tox.__file__))
if self.config.minversion:
minversion = NormalizedVersion(self.config.minversion)
toxversion = NormalizedVersion(tox.__version__)
if toxversion < minversion:
self.report.error(
"tox version is %s, required is at least %s" % (
toxversion, minversion))
raise SystemExit(1)
if self.config.option.showconfig:
self.showconfig()
elif self.config.option.listenvs:
self.showenvs()
else:
return self.subcommand_test()
def _copyfiles(self, srcdir, pathlist, destdir):
for relpath in pathlist:
src = srcdir.join(relpath)
if not src.check():
self.report.error("missing source file: %s" % (src,))
raise SystemExit(1)
target = destdir.join(relpath)
target.dirpath().ensure(dir=1)
src.copy(target)
def _makesdist(self):
setup = self.config.setupdir.join("setup.py")
if not setup.check():
raise tox.exception.MissingFile(setup)
action = self.newaction(None, "packaging")
with action:
action.setactivity("sdist-make", setup)
self.make_emptydir(self.config.distdir)
action.popen([sys.executable, setup, "sdist", "--formats=zip",
"--dist-dir", self.config.distdir, ],
cwd=self.config.setupdir)
try:
return self.config.distdir.listdir()[0]
except py.error.ENOENT:
# check if empty or comment only
data = []
with open(str(setup)) as fp:
for line in fp:
if line and line[0] == '#':
continue
data.append(line)
if not ''.join(data).strip():
self.report.error(
'setup.py is empty'
)
raise SystemExit(1)
self.report.error(
'No dist directory found. Please check setup.py, e.g with:\n'
' python setup.py sdist'
)
raise SystemExit(1)
def make_emptydir(self, path):
if path.check():
self.report.info(" removing %s" % path)
py.std.shutil.rmtree(str(path), ignore_errors=True)
path.ensure(dir=1)
def setupenv(self, venv):
if not venv.matching_platform():
venv.status = "platform mismatch"
return # we simply omit non-matching platforms
action = self.newaction(venv, "getenv", venv.envconfig.envdir)
with action:
venv.status = 0
envlog = self.resultlog.get_envlog(venv.name)
try:
status = venv.update(action=action)
except tox.exception.InvocationError:
status = sys.exc_info()[1]
if status:
commandlog = envlog.get_commandlog("setup")
commandlog.add_command(["setup virtualenv"], str(status), 1)
venv.status = status
self.report.error(str(status))
return False
commandpath = venv.getcommandpath("python")
envlog.set_python_info(commandpath)
return True
def finishvenv(self, venv):
action = self.newaction(venv, "finishvenv")
with action:
venv.finish()
return True
def developpkg(self, venv, setupdir):
action = self.newaction(venv, "developpkg", setupdir)
with action:
try:
venv.developpkg(setupdir, action)
return True
except tox.exception.InvocationError:
venv.status = sys.exc_info()[1]
return False
def installpkg(self, venv, path):
"""Install package in the specified virtual environment.
:param :class:`tox.config.VenvConfig`: Destination environment
:param str path: Path to the distribution package.
:return: True if package installed otherwise False.
:rtype: bool
"""
self.resultlog.set_header(installpkg=py.path.local(path))
action = self.newaction(venv, "installpkg", path)
with action:
try:
venv.installpkg(path, action)
return True
except tox.exception.InvocationError:
venv.status = sys.exc_info()[1]
return False
def get_installpkg_path(self):
"""
:return: Path to the distribution
:rtype: py.path.local
"""
if not self.config.option.sdistonly and (self.config.sdistsrc or
self.config.option.installpkg):
path = self.config.option.installpkg
if not path:
path = self.config.sdistsrc
path = self._resolve_pkg(path)
self.report.info("using package %r, skipping 'sdist' activity " %
str(path))
else:
try:
path = self._makesdist()
except tox.exception.InvocationError:
v = sys.exc_info()[1]
self.report.error("FAIL could not package project - v = %r" %
v)
return
sdistfile = self.config.distshare.join(path.basename)
if sdistfile != path:
self.report.info("copying new sdistfile to %r" %
str(sdistfile))
try:
sdistfile.dirpath().ensure(dir=1)
except py.error.Error:
self.report.warning("could not copy distfile to %s" %
sdistfile.dirpath())
else:
path.copy(sdistfile)
return path
def subcommand_test(self):
if self.config.skipsdist:
self.report.info("skipping sdist step")
path = None
else:
path = self.get_installpkg_path()
if not path:
return 2
if self.config.option.sdistonly:
return
for venv in self.venvlist:
if self.setupenv(venv):
if venv.envconfig.usedevelop:
self.developpkg(venv, self.config.setupdir)
elif self.config.skipsdist or venv.envconfig.skip_install:
self.finishvenv(venv)
else:
self.installpkg(venv, path)
# write out version dependency information
action = self.newaction(venv, "envreport")
with action:
pip = venv.getcommandpath("pip")
output = venv._pcall([str(pip), "freeze"],
cwd=self.config.toxinidir,
action=action)
# the output contains a mime-header, skip it
output = output.split("\n\n")[-1]
packages = output.strip().split("\n")
action.setactivity("installed", ",".join(packages))
envlog = self.resultlog.get_envlog(venv.name)
envlog.set_installed(packages)
self.runtestenv(venv)
retcode = self._summary()
return retcode
def runtestenv(self, venv, redirect=False):
if not self.config.option.notest:
if venv.status:
return
venv.test(redirect=redirect)
else:
venv.status = "skipped tests"
def _summary(self):
self.report.startsummary()
retcode = 0
for venv in self.venvlist:
status = venv.status
if isinstance(status, tox.exception.InterpreterNotFound):
msg = " %s: %s" % (venv.envconfig.envname, str(status))
if self.config.option.skip_missing_interpreters:
self.report.skip(msg)
else:
retcode = 1
self.report.error(msg)
elif status == "platform mismatch":
msg = " %s: %s" % (venv.envconfig.envname, str(status))
self.report.skip(msg)
elif status and status == "ignored failed command":
msg = " %s: %s" % (venv.envconfig.envname, str(status))
self.report.good(msg)
elif status and status != "skipped tests":
msg = " %s: %s" % (venv.envconfig.envname, str(status))
self.report.error(msg)
retcode = 1
else:
if not status:
status = "commands succeeded"
self.report.good(" %s: %s" % (venv.envconfig.envname, status))
if not retcode:
self.report.good(" congratulations :)")
path = self.config.option.resultjson
if path:
path = py.path.local(path)
path.write(self.resultlog.dumps_json())
self.report.line("wrote json report at: %s" % path)
return retcode
def showconfig(self):
self.info_versions()
self.report.keyvalue("config-file:", self.config.option.configfile)
self.report.keyvalue("toxinipath: ", self.config.toxinipath)
self.report.keyvalue("toxinidir: ", self.config.toxinidir)
self.report.keyvalue("toxworkdir: ", self.config.toxworkdir)
self.report.keyvalue("setupdir: ", self.config.setupdir)
self.report.keyvalue("distshare: ", self.config.distshare)
self.report.keyvalue("skipsdist: ", self.config.skipsdist)
self.report.tw.line()
for envconfig in self.config.envconfigs.values():
self.report.line("[testenv:%s]" % envconfig.envname, bold=True)
for attr in self.config._parser._testenv_attr:
self.report.line(" %-15s = %s"
% (attr.name, getattr(envconfig, attr.name)))
def showenvs(self):
for env in self.config.envlist:
self.report.line("%s" % env)
def info_versions(self):
versions = ['tox-%s' % tox.__version__]
try:
version = py.process.cmdexec("virtualenv --version")
except py.process.cmdexec.Error:
versions.append("virtualenv-1.9.1 (vendored)")
else:
versions.append("virtualenv-%s" % version.strip())
self.report.keyvalue("tool-versions:", " ".join(versions))
def _resolve_pkg(self, pkgspec):
try:
return self._spec2pkg[pkgspec]
except KeyError:
self._spec2pkg[pkgspec] = x = self._resolvepkg(pkgspec)
return x
def _resolvepkg(self, pkgspec):
if not os.path.isabs(str(pkgspec)):
return pkgspec
p = py.path.local(pkgspec)
if p.check():
return p
if not p.dirpath().check(dir=1):
raise tox.exception.MissingDirectory(p.dirpath())
self.report.info("determining %s" % p)
candidates = p.dirpath().listdir(p.basename)
if len(candidates) == 0:
raise tox.exception.MissingDependency(pkgspec)
if len(candidates) > 1:
items = []
for x in candidates:
ver = getversion(x.basename)
if ver is not None:
items.append((ver, x))
else:
self.report.warning("could not determine version of: %s" %
str(x))
items.sort()
if not items:
raise tox.exception.MissingDependency(pkgspec)
return items[-1][1]
else:
return candidates[0]
_rex_getversion = py.std.re.compile("[\w_\-\+\.]+-(.*)(\.zip|\.tar.gz)")
def getversion(basename):
m = _rex_getversion.match(basename)
if m is None:
return None
version = m.group(1)
try:
return NormalizedVersion(version)
except IrrationalVersionError:
return None
| []
| []
| []
| [] | [] | python | 0 | 0 | |
core/wsgi.py | """
WSGI config for DjChannelsDemo project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'core.settings')
application = get_wsgi_application()
| []
| []
| []
| [] | [] | python | 0 | 0 | |
backend/core/MlDiagnosis/django_api/diabetesEnv/diabetes_api/wsgi.py | """
WSGI config for diabetes_api project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'diabetes_api.settings')
application = get_wsgi_application()
| []
| []
| []
| [] | [] | python | 0 | 0 | |
indulgence/wsgi.py | """
WSGI config for indulgence project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "indulgence.settings")
application = get_wsgi_application()
| []
| []
| []
| [] | [] | python | 0 | 0 | |
Project/train/train.py | import argparse
import json
import os
import pickle
import sys
import sagemaker_containers
import pandas as pd
import torch
import torch.optim as optim
import torch.utils.data
from model import LSTMClassifier
def model_fn(model_dir):
"""Load the PyTorch model from the `model_dir` directory."""
print("Loading model.")
# First, load the parameters used to create the model.
model_info = {}
model_info_path = os.path.join(model_dir, 'model_info.pth')
with open(model_info_path, 'rb') as f:
model_info = torch.load(f)
print("model_info: {}".format(model_info))
# Determine the device and construct the model.
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model = LSTMClassifier(model_info['embedding_dim'], model_info['hidden_dim'], model_info['vocab_size'])
# Load the stored model parameters.
model_path = os.path.join(model_dir, 'model.pth')
with open(model_path, 'rb') as f:
model.load_state_dict(torch.load(f))
# Load the saved word_dict.
word_dict_path = os.path.join(model_dir, 'word_dict.pkl')
with open(word_dict_path, 'rb') as f:
model.word_dict = pickle.load(f)
model.to(device).eval()
print("Done loading model.")
return model
def _get_train_data_loader(batch_size, training_dir):
print("Get train data loader.")
train_data = pd.read_csv(os.path.join(training_dir, "train.csv"), header=None, names=None)
train_y = torch.from_numpy(train_data[[0]].values).float().squeeze()
train_X = torch.from_numpy(train_data.drop([0], axis=1).values).long()
train_ds = torch.utils.data.TensorDataset(train_X, train_y)
return torch.utils.data.DataLoader(train_ds, batch_size=batch_size)
def train(model, train_loader, epochs, optimizer, loss_fn, device):
"""
This is the training method that is called by the PyTorch training script. The parameters
passed are as follows:
model - The PyTorch model that we wish to train.
train_loader - The PyTorch DataLoader that should be used during training.
epochs - The total number of epochs to train for.
optimizer - The optimizer to use during training.
loss_fn - The loss function used for training.
device - Where the model and data should be loaded (gpu or cpu).
"""
for epoch in range(1, epochs + 1):
model.train()
total_loss = 0
for batch in train_loader:
batch_X, batch_y = batch
batch_X = batch_X.to(device)
batch_y = batch_y.to(device)
# TODO: Complete this train method to train the model provided.
optimizer.zero_grad()
prediction = model(batch_X)
loss = loss_fn(prediction, batch_y)
loss.backward()
optimizer.step()
total_loss += loss.data.item()
print("Epoch: {}, BCELoss: {}".format(epoch, total_loss / len(train_loader)))
if __name__ == '__main__':
# All of the model parameters and training parameters are sent as arguments when the script
# is executed. Here we set up an argument parser to easily access the parameters.
parser = argparse.ArgumentParser()
# Training Parameters
parser.add_argument('--batch-size', type=int, default=512, metavar='N',
help='input batch size for training (default: 512)')
parser.add_argument('--epochs', type=int, default=10, metavar='N',
help='number of epochs to train (default: 10)')
parser.add_argument('--seed', type=int, default=1, metavar='S',
help='random seed (default: 1)')
# Model Parameters
parser.add_argument('--embedding_dim', type=int, default=32, metavar='N',
help='size of the word embeddings (default: 32)')
parser.add_argument('--hidden_dim', type=int, default=100, metavar='N',
help='size of the hidden dimension (default: 100)')
parser.add_argument('--vocab_size', type=int, default=5000, metavar='N',
help='size of the vocabulary (default: 5000)')
# SageMaker Parameters
parser.add_argument('--hosts', type=list, default=json.loads(os.environ['SM_HOSTS']))
parser.add_argument('--current-host', type=str, default=os.environ['SM_CURRENT_HOST'])
parser.add_argument('--model-dir', type=str, default=os.environ['SM_MODEL_DIR'])
parser.add_argument('--data-dir', type=str, default=os.environ['SM_CHANNEL_TRAINING'])
parser.add_argument('--num-gpus', type=int, default=os.environ['SM_NUM_GPUS'])
args = parser.parse_args()
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
print("Using device {}.".format(device))
torch.manual_seed(args.seed)
# Load the training data.
train_loader = _get_train_data_loader(args.batch_size, args.data_dir)
# Build the model.
model = LSTMClassifier(args.embedding_dim, args.hidden_dim, args.vocab_size).to(device)
with open(os.path.join(args.data_dir, "word_dict.pkl"), "rb") as f:
model.word_dict = pickle.load(f)
print("Model loaded with embedding_dim {}, hidden_dim {}, vocab_size {}.".format(
args.embedding_dim, args.hidden_dim, args.vocab_size
))
# Train the model.
optimizer = optim.Adam(model.parameters())
loss_fn = torch.nn.BCELoss()
train(model, train_loader, args.epochs, optimizer, loss_fn, device)
# Save the parameters used to construct the model
model_info_path = os.path.join(args.model_dir, 'model_info.pth')
with open(model_info_path, 'wb') as f:
model_info = {
'embedding_dim': args.embedding_dim,
'hidden_dim': args.hidden_dim,
'vocab_size': args.vocab_size,
}
torch.save(model_info, f)
# Save the word_dict
word_dict_path = os.path.join(args.model_dir, 'word_dict.pkl')
with open(word_dict_path, 'wb') as f:
pickle.dump(model.word_dict, f)
# Save the model parameters
model_path = os.path.join(args.model_dir, 'model.pth')
with open(model_path, 'wb') as f:
torch.save(model.cpu().state_dict(), f)
| []
| []
| [
"SM_MODEL_DIR",
"SM_NUM_GPUS",
"SM_CURRENT_HOST",
"SM_CHANNEL_TRAINING",
"SM_HOSTS"
]
| [] | ["SM_MODEL_DIR", "SM_NUM_GPUS", "SM_CURRENT_HOST", "SM_CHANNEL_TRAINING", "SM_HOSTS"] | python | 5 | 0 | |
volttron/platform/aip.py | # -*- coding: utf-8 -*- {{{
# vim: set fenc=utf-8 ft=python sw=4 ts=4 sts=4 et:
# Copyright (c) 2013, Battelle Memorial Institute
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation
# are those of the authors and should not be interpreted as representing
# official policies, either expressed or implied, of the FreeBSD
# Project.
#
# This material was prepared as an account of work sponsored by an
# agency of the United States Government. Neither the United States
# Government nor the United States Department of Energy, nor Battelle,
# nor any of their employees, nor any jurisdiction or organization that
# has cooperated in the development of these materials, makes any
# warranty, express or implied, or assumes any legal liability or
# responsibility for the accuracy, completeness, or usefulness or any
# information, apparatus, product, software, or process disclosed, or
# represents that its use would not infringe privately owned rights.
#
# Reference herein to any specific commercial product, process, or
# service by trade name, trademark, manufacturer, or otherwise does not
# necessarily constitute or imply its endorsement, recommendation, or
# favoring by the United States Government or any agency thereof, or
# Battelle Memorial Institute. The views and opinions of authors
# expressed herein do not necessarily state or reflect those of the
# United States Government or any agency thereof.
#
# PACIFIC NORTHWEST NATIONAL LABORATORY
# operated by BATTELLE for the UNITED STATES DEPARTMENT OF ENERGY
# under Contract DE-AC05-76RL01830
# pylint: disable=W0142
#}}}
'''Component for the instantiation and packaging of agents.'''
import contextlib
import errno
from fcntl import fcntl, F_GETFL, F_SETFL
import logging
import os
from os import O_NONBLOCK
import shutil
import signal
import subprocess
from subprocess import PIPE
import sys
import syslog
import uuid
import gevent
from gevent import select
import simplejson as jsonapi
from wheel.tool import unpack
import zmq
from . import messaging
from .messaging import topics
from .packages import UnpackedPackage
try:
from volttron.restricted import auth
from volttron.restricted import certs
from volttron.restricted.resmon import ResourceError
except ImportError:
auth = None
_log = logging.getLogger(__name__)
def process_wait(p):
timeout = 0.01
while True:
result = p.poll()
if result is not None:
return result
gevent.sleep(timeout)
if timeout < 0.5:
timeout *= 2
def gevent_readlines(fd):
fcntl(fd, F_SETFL, fcntl(fd, F_GETFL) | O_NONBLOCK)
data = []
while True:
select.select([fd], [], [])
buf = fd.read(4096)
if not buf:
break
parts = buf.split('\n')
if len(parts) < 2:
data.extend(parts)
else:
first, rest, data = (
''.join(data + parts[0:1]), parts[1:-1], parts[-1:])
yield first
for line in rest:
yield line
if any(data):
yield ''.join(data)
_level_map = {syslog.LOG_DEBUG: logging.DEBUG,
syslog.LOG_INFO: logging.INFO,
syslog.LOG_NOTICE: logging.INFO,
syslog.LOG_WARNING: logging.WARNING,
syslog.LOG_ERR: logging.ERROR,
syslog.LOG_CRIT: logging.CRITICAL,
syslog.LOG_ALERT: logging.CRITICAL,
syslog.LOG_EMERG: logging.CRITICAL,}
def _log_stream(name, agent, pid, level, stream):
log = logging.getLogger(name)
extra = {'processName': agent, 'process': pid}
for line in stream:
if line[0:1] == '{' and line[-1:] == '}':
try:
obj = jsonapi.loads(line)
record = logging.makeLogRecord(obj)
except Exception:
pass
else:
record.remote_name, record.name = record.name, name
record.__dict__.update(extra)
log.handle(record)
continue
if line[0:1] == '<' and line[2:3] == '>' and line[1:2].isdigit():
yield _level_map.get(int(line[1]), level), line[3:]
else:
yield level, line
def log_stream(name, agent, pid, path, stream):
log = logging.getLogger(name)
extra = {'processName': agent, 'process': pid}
unset = {'thread': None, 'threadName': None, 'module': None}
for level, line in stream:
record = logging.LogRecord(name, level, path, 0, line, [], None)
record.__dict__.update(extra)
record.__dict__.update(unset)
log.handle(record)
class IgnoreErrno(object):
ignore = []
def __init__(self, errno, *more):
self.ignore = [errno]
self.ignore.extend(more)
def __enter__(self):
return
def __exit__(self, exc_type, exc_value, traceback):
try:
return exc_value.errno in self.ignore
except AttributeError:
pass
ignore_enoent = IgnoreErrno(errno.ENOENT)
class ExecutionEnvironment(object):
'''Environment reserved for agent execution.
Deleting ExecutionEnvironment objects should cause the process to
end and all resources to be returned to the system.
'''
def __init__(self):
self.process = None
def execute(self, *args, **kwargs):
try:
self.process = subprocess.Popen(*args, **kwargs)
except OSError as e:
if e.filename:
raise
raise OSError(*(e.args + (args[0],)))
def __call__(self, *args, **kwargs):
self.execute(*args, **kwargs)
class AIPplatform(object):
'''Manages the main workflow of receiving and sending agents.'''
def __init__(self, env, **kwargs):
self.env = env
self.agents = {}
def setup(self):
for path in [self.run_dir, self.config_dir, self.install_dir]:
if not os.path.exists(path):
os.makedirs(path, 0775)
def finish(self):
for exeenv in self.agents.itervalues():
if exeenv.process.poll() is None:
exeenv.process.send_signal(signal.SIGINT)
for exeenv in self.agents.itervalues():
if exeenv.process.poll() is None:
exeenv.process.terminate()
for exeenv in self.agents.itervalues():
if exeenv.process.poll() is None:
exeenv.process.kill()
def _sub_socket(self):
sock = messaging.Socket(zmq.SUB)
sock.connect(self.env.subscribe_address)
return sock
def _pub_socket(self):
sock = messaging.Socket(zmq.PUSH)
sock.connect(self.env.publish_address)
return sock
def shutdown(self):
with contextlib.closing(self._pub_socket()) as sock:
sock.send_message(topics.PLATFORM_SHUTDOWN,
{'reason': 'Received shutdown command'},
flags=zmq.NOBLOCK)
subscribe_address = property(lambda me: me.env.subscribe_address)
publish_address = property(lambda me: me.env.publish_address)
config_dir = property(lambda me: os.path.abspath(me.env.volttron_home))
install_dir = property(lambda me: os.path.join(me.config_dir, 'agents'))
run_dir = property(lambda me: os.path.join(me.config_dir, 'run'))
def autostart(self):
agents, errors = [], []
for agent_uuid, agent_name in self.list_agents().iteritems():
try:
priority = self._agent_priority(agent_uuid)
except EnvironmentError as exc:
errors.append((agent_uuid, str(exc)))
continue
if priority is not None:
agents.append((priority, agent_uuid))
agents.sort(reverse=True)
for _, agent_uuid in agents:
try:
self.start_agent(agent_uuid)
except Exception as exc:
errors.append((agent_uuid, str(exc)))
return errors
def land_agent(self, agent_wheel):
if auth is None:
raise NotImplementedError()
agent_uuid = self.install_agent(agent_wheel)
try:
self.start_agent(agent_uuid)
self.prioritize_agent(agent_uuid)
except:
self.remove_agent(agent_uuid)
raise
return agent_uuid
def install_agent(self, agent_wheel):
while True:
agent_uuid = str(uuid.uuid4())
if agent_uuid in self.agents:
continue
agent_path = os.path.join(self.install_dir, agent_uuid)
try:
os.mkdir(agent_path)
break
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
try:
if auth is not None and self.env.verify_agents:
unpacker = auth.VolttronPackageWheelFile(agent_wheel, certsobj=certs.Certs())
unpacker.unpack(dest=agent_path)
else:
unpack(agent_wheel, dest=agent_path)
except Exception:
shutil.rmtree(agent_path)
raise
return agent_uuid
def remove_agent(self, agent_uuid):
if agent_uuid not in os.listdir(self.install_dir):
raise ValueError('invalid agent')
self.stop_agent(agent_uuid)
self.agents.pop(agent_uuid, None)
shutil.rmtree(os.path.join(self.install_dir, agent_uuid))
def agent_name(self, agent_uuid):
agent_path = os.path.join(self.install_dir, agent_uuid)
for agent_name in os.listdir(agent_path):
dist_info = os.path.join(
agent_path, agent_name, agent_name + '.dist-info')
if os.path.exists(dist_info):
return agent_name
raise KeyError(agent_uuid)
def list_agents(self):
agents = {}
for agent_uuid in os.listdir(self.install_dir):
try:
agents[agent_uuid] = self.agent_name(agent_uuid)
except KeyError:
pass
return agents
def active_agents(self):
return {agent_uuid: execenv.name
for agent_uuid, execenv in self.agents.iteritems()}
def clear_status(self, clear_all=False):
remove = []
for agent_uuid, execenv in self.agents.iteritems():
if execenv.process.poll() is not None:
if clear_all:
remove.append(agent_uuid)
else:
path = os.path.join(self.install_dir, agent_uuid)
if not os.path.exists(path):
remove.append(agent_uuid)
for agent_uuid in remove:
self.agents.pop(agent_uuid, None)
def status_agents(self):
return [(agent_uuid, agent_name, self.agent_status(agent_uuid))
for agent_uuid, agent_name in self.active_agents().iteritems()]
def tag_agent(self, agent_uuid, tag):
tag_file = os.path.join(self.install_dir, agent_uuid, 'TAG')
if not tag:
with ignore_enoent:
os.unlink(tag_file)
else:
with open(tag_file, 'w') as file:
file.write(tag[:64])
def agent_tag(self, agent_uuid):
if '/' in agent_uuid or agent_uuid in ['.', '..']:
raise ValueError('invalid agent')
tag_file = os.path.join(self.install_dir, agent_uuid, 'TAG')
with ignore_enoent, open(tag_file, 'r') as file:
return file.readline(64)
def _agent_priority(self, agent_uuid):
autostart = os.path.join(self.install_dir, agent_uuid, 'AUTOSTART')
with ignore_enoent, open(autostart) as file:
return file.readline(100).strip()
def agent_priority(self, agent_uuid):
if '/' in agent_uuid or agent_uuid in ['.', '..']:
raise ValueError('invalid agent')
return self._agent_priority(agent_uuid)
def prioritize_agent(self, agent_uuid, priority='50'):
if '/' in agent_uuid or agent_uuid in ['.', '..']:
raise ValueError('invalid agent')
autostart = os.path.join(self.install_dir, agent_uuid, 'AUTOSTART')
if priority is None:
with ignore_enoent:
os.unlink(autostart)
else:
with open(autostart, 'w') as file:
file.write(priority.strip())
def _check_resources(self, resmon, execreqs, reserve=False):
hard_reqs = execreqs.get('hard_requirements', {})
failed_terms = resmon.check_hard_resources(hard_reqs)
if failed_terms:
msg = '\n'.join(' {}: {} ({})'.format(
term, hard_reqs[term], avail)
for term, avail in failed_terms.iteritems())
_log.error('hard resource requirements not met:\n%s', msg)
raise ValueError('hard resource requirements not met')
requirements = execreqs.get('requirements', {})
try:
if reserve:
return resmon.reserve_soft_resources(requirements)
else:
failed_terms = resmon.check_soft_resources(requirements)
if failed_terms:
errmsg = 'soft resource requirements not met'
else:
return
except ResourceError as exc:
errmsg, failed_terms = exc.args
msg = '\n'.join(' {}: {} ({})'.format(
term, requirements.get(term, '<unset>'), avail)
for term, avail in failed_terms.iteritems())
_log.error('%s:\n%s', errmsg, msg)
raise ValueError(errmsg)
def check_resources(self, execreqs):
resmon = getattr(self.env, 'resmon', None)
if resmon:
return self._check_resources(resmon, execreqs, reserve=False)
def _reserve_resources(self, resmon, execreqs):
return self._check_resources(resmon, execreqs, reserve=True)
def get_execreqs(self, agent_uuid):
name = self.agent_name(agent_uuid)
pkg = UnpackedPackage(os.path.join(self.install_dir, agent_uuid, name))
return self._read_execreqs(pkg.distinfo)
def _read_execreqs(self, dist_info):
execreqs_json = os.path.join(dist_info, 'execreqs.json')
try:
with ignore_enoent, open(execreqs_json) as file:
return jsonapi.load(file)
except Exception as exc:
msg = 'error reading execution requirements: {}: {}'.format(
execreqs_json, exc)
_log.error(msg)
raise ValueError(msg)
_log.warning('missing execution requirements: %s', execreqs_json)
return {}
def _launch_agent(self, agent_uuid, agent_path, name=None):
execenv = self.agents.get(agent_uuid)
if execenv and execenv.process.poll() is None:
_log.warning('request to start already running agent %s', agent_path)
raise ValueError('agent is already running')
pkg = UnpackedPackage(agent_path)
if auth is not None and self.env.verify_agents:
auth.UnpackedPackageVerifier(pkg.distinfo).verify()
metadata = pkg.metadata
try:
exports = metadata['extensions']['python.exports']
except KeyError:
try:
exports = metadata['exports']
except KeyError:
raise ValueError('no entry points exported')
try:
module = exports['volttron.agent']['launch']
except KeyError:
try:
module = exports['setuptools.installation']['eggsecutable']
except KeyError:
_log.error('no agent launch class specified in package %s', agent_path)
raise ValueError('no agent launch class specified in package')
config = os.path.join(pkg.distinfo, 'config')
tag = self.agent_tag(agent_uuid)
environ = os.environ.copy()
environ['PYTHONPATH'] = ':'.join([agent_path] + sys.path)
environ['PATH'] = (os.path.abspath(os.path.dirname(sys.executable)) +
':' + environ['PATH'])
if os.path.exists(config):
environ['AGENT_CONFIG'] = config
else:
environ.pop('AGENT_CONFIG', None)
if tag:
environ['AGENT_TAG'] = tag
else:
environ.pop('AGENT_TAG', None)
environ['AGENT_SUB_ADDR'] = self.subscribe_address
environ['AGENT_PUB_ADDR'] = self.publish_address
environ['AGENT_UUID'] = agent_uuid
module, _, func = module.partition(':')
if func:
code = '__import__({0!r}, fromlist=[{1!r}]).{1}()'.format(module, func)
argv = [sys.executable, '-c', code]
else:
argv = [sys.executable, '-m', module]
resmon = getattr(self.env, 'resmon', None)
if resmon is None:
execenv = ExecutionEnvironment()
else:
execreqs = self._read_execreqs(pkg.distinfo)
execenv = self._reserve_resources(resmon, execreqs)
execenv.name = name or agent_path
_log.info('starting agent %s', agent_path)
data_dir = os.path.join(os.path.dirname(pkg.distinfo),
'{}.agent-data'.format(pkg.package_name))
if not os.path.exists(data_dir):
os.mkdir(data_dir)
execenv.execute(argv, cwd=data_dir, env=environ, close_fds=True,
stdin=open(os.devnull), stdout=PIPE, stderr=PIPE)
self.agents[agent_uuid] = execenv
pid = execenv.process.pid
_log.info('agent %s has PID %s', agent_path, pid)
gevent.spawn(log_stream, 'agents.stderr', name, pid, argv[0],
_log_stream('agents.log', name, pid, logging.ERROR,
gevent_readlines(execenv.process.stderr)))
gevent.spawn(log_stream, 'agents.stdout', name, pid, argv[0],
((logging.INFO, line) for line in
gevent_readlines(execenv.process.stdout)))
def launch_agent(self, agent_path):
while True:
agent_uuid = str(uuid.uuid4())
if not (agent_uuid in self.agents or
os.path.exists(os.path.join(self.install_dir, agent_uuid))):
break
if not os.path.exists(agent_path):
msg = 'agent not found: {}'.format(agent_path)
_log.error(msg)
raise ValueError(msg)
self._launch_agent(agent_uuid, os.path.abspath(agent_path))
def agent_status(self, agent_uuid):
execenv = self.agents.get(agent_uuid)
if execenv is None:
return (None, None)
return (execenv.process.pid, execenv.process.poll())
def start_agent(self, agent_uuid):
name = self.agent_name(agent_uuid)
self._launch_agent(
agent_uuid, os.path.join(self.install_dir, agent_uuid, name), name)
def stop_agent(self, agent_uuid):
try:
execenv = self.agents[agent_uuid]
except KeyError:
return
if execenv.process.poll() is None:
execenv.process.send_signal(signal.SIGINT)
try:
return gevent.with_timeout(3, process_wait, execenv.process)
except gevent.Timeout:
execenv.process.terminate()
try:
return gevent.with_timeout(3, process_wait, execenv.process)
except gevent.Timeout:
execenv.process.kill()
try:
return gevent.with_timeout(3, process_wait, execenv.process)
except gevent.Timeout:
raise ValueError('process is unresponsive')
return execenv.process.poll()
| []
| []
| []
| [] | [] | python | 0 | 0 | |
examples/categories/categories.py | import sendgrid
import json
import os
sg = sendgrid.SendGridAPIClient(apikey=os.environ.get('SENDGRID_API_KEY'))
##################################################
# Retrieve all categories #
# GET /categories #
params = {'category': 'test_string', 'limit': 1, 'offset': 1}
response = sg.client.categories.get(query_params=params)
print(response.status_code)
print(response.body)
print(response.headers)
##################################################
# Retrieve Email Statistics for Categories #
# GET /categories/stats #
params = {'end_date': '2016-04-01', 'aggregated_by': 'day', 'limit': 1, 'offset': 1, 'start_date': '2016-01-01', 'categories': 'test_string'}
response = sg.client.categories.stats.get(query_params=params)
print(response.status_code)
print(response.body)
print(response.headers)
##################################################
# Retrieve sums of email stats for each category [Needs: Stats object defined, has category ID?] #
# GET /categories/stats/sums #
params = {'end_date': '2016-04-01', 'aggregated_by': 'day', 'limit': 1, 'sort_by_metric': 'test_string', 'offset': 1, 'start_date': '2016-01-01', 'sort_by_direction': 'asc'}
response = sg.client.categories.stats.sums.get(query_params=params)
print(response.status_code)
print(response.body)
print(response.headers)
| []
| []
| [
"SENDGRID_API_KEY"
]
| [] | ["SENDGRID_API_KEY"] | python | 1 | 0 | |
joeBot/JoeMakerBot.py | import os
from datetime import datetime
from dotenv import load_dotenv
from web3 import Web3
from joeBot import Constants, JoeSubGraph
from joeBot.Constants import ZERO_ADDRESS_256
from joeBot.Utils import readable
load_dotenv()
# web3
w3 = Web3(Web3.HTTPProvider(Constants.AVAX_RPC))
if not w3.isConnected():
print("Error web3 can't connect")
class JoeMaker:
def __init__(self):
# account
self.account = w3.eth.account.privateKeyToAccount((os.getenv("PRIVATE_KEY")))
# contracts
self.joeMakerV1 = w3.eth.contract(address=Constants.JOEMAKERV1_ADDRESS, abi=Constants.JOEMAKERV1_ABI)
self.joeMakerV2 = w3.eth.contract(address=Constants.JOEMAKERV2_ADDRESS, abi=Constants.JOEMAKERV2_ABI)
self.joeMakerV3 = w3.eth.contract(address=Constants.JOEMAKERV3_ADDRESS, abi=Constants.JOEMAKERV3_ABI)
# version actually used
self.joeMaker = self.joeMakerV3
def changeToVersion(self, version):
if version == "v1":
self.joeMaker = self.joeMakerV1
elif version == "v2":
self.joeMaker = self.joeMakerV2
elif version == "v3":
self.joeMaker = self.joeMakerV3
else:
raise ValueError
return "Now using JoeMaker{}".format(version.upper())
def setBridges(self, tokens, bridges):
"""
set bridges.
"""
errors = []
for token, bridge in zip(map(w3.toChecksumAddress, tokens), map(w3.toChecksumAddress, bridges)):
set_bridge = self.joeMaker.functions.setBridge(token, bridge)
try:
set_bridge.call()
try:
tx_hash = self.execContract(set_bridge)
w3.eth.wait_for_transaction_receipt(tx_hash)
except Exception as e:
errors.append("[{}] Error setting bridge:\n{} -> {}: {}".format(
datetime.utcnow().strftime("%d/%m/%Y %H:%M:%S"),
token, bridge, e))
except Exception as e:
errors.append("[{}] Error setting bridge locally:\n{} -> {}: {}".format(
datetime.utcnow().strftime("%d/%m/%Y %H:%M:%S"),
token, bridge, e))
return errors
def execContract(self, func_):
"""
call contract transactional function func
"""
nonce = w3.eth.getTransactionCount(self.account.address)
gas = int(float(func_.estimateGas({'from': self.account.address})) * (1 + Constants.PREMIUM_PER_TRANSACTION))
if gas > Constants.MAX_GAS_PER_BLOCK:
raise Exception("Max gas per block reached")
construct_txn = func_.buildTransaction({'from': self.account.address, 'nonce': nonce, 'gas': gas})
signed = self.account.signTransaction(construct_txn)
tx_hash = w3.eth.sendRawTransaction(signed.rawTransaction)
return tx_hash.hex()
def _callConvertLocally(self, tokens0, tokens1):
safe_tokens0, safe_tokens1, error_on_pairs = [], [], []
for token0, token1 in zip(map(Web3.toChecksumAddress, tokens0), map(Web3.toChecksumAddress, tokens1)):
try:
self.joeMaker.functions.convert(token0, token1).call({"from": self.account.address})
safe_tokens0.append(token0)
safe_tokens1.append(token1)
except Exception as e:
print(e)
error_on_pairs.append(
"[{}] Error at convert locally:\n{} - {}: {}".format(
datetime.utcnow().strftime("%d/%m/%Y %H:%M:%S"),
token0, token1, e))
return safe_tokens0, safe_tokens1, error_on_pairs
def _callConvertMultiple(self, groups_tokens0, groups_tokens1, error_on_pairs):
pairs, joe_bought_back, = [], []
pos = "Starts convertMultiple()"
for group_tokens0, group_tokens1 in zip(groups_tokens0, groups_tokens1):
call_convert_multiple = self.joeMaker.functions.convertMultiple(group_tokens0, group_tokens1)
try:
pos = "Sends convertMultiple()"
tx_hash = self.execContract(call_convert_multiple)
pos = "Waits for convertMultiple()"
transaction_receipt = w3.eth.wait_for_transaction_receipt(tx_hash)
pos = "Decodes convertMultiple() receipt"
pairs, joe_bought_back = decodeTransactionReceipt(
transaction_receipt, group_tokens0, group_tokens1, pairs, joe_bought_back)
except Exception as e:
error_on_pairs.append(
"[{}] Error at {}:\n{}".format(
datetime.utcnow().strftime("%d/%m/%Y %H:%M:%S"), pos, e))
self._callConvert(group_tokens0, group_tokens1, pairs, joe_bought_back, error_on_pairs)
return pairs, joe_bought_back, error_on_pairs
def _callConvert(self, tokens0, tokens1, pairs, joe_bought_back, error_on_pairs):
pos = "Starts convert()"
for token0, token1 in zip(tokens0, tokens1):
call_convert = self.joeMaker.functions.convert(token0, token1)
try:
pos = "Sends convert()"
tx_hash = self.execContract(call_convert)
pos = "Waits for convert()"
transaction_receipt = w3.eth.wait_for_transaction_receipt(tx_hash)
pos = "Decodes convert() receipt"
pairs, joe_bought_back = decodeTransactionReceipt(
transaction_receipt, token0, token1, pairs, joe_bought_back)
except Exception as e:
error_on_pairs.append(
"[{}] Error at {}:\n{} - {}: {}".format(datetime.utcnow().strftime("%d/%m/%Y %H:%M:%S"),
pos, token0, token1, e))
def callConvertMultiple(self, min_usd_value):
# Gets JoeMakerV2 position that are worth more than min_usd_value
tokens0, tokens1 = JoeSubGraph.getJoeMakerPostitions(min_usd_value, self.joeMaker.address)
print(tokens0, tokens1)
# Gets the list of tokens that are safe to convert, those that doesn't revert locally
safe_tokens0, safe_tokens1, error_on_pairs = self._callConvertLocally(tokens0, tokens1)
# Groups tokens by list of 25 to avoid reverting because there isn't enough gas
groups_tokens0, groups_tokens1 = getGroupsOf(safe_tokens0), getGroupsOf(safe_tokens1)
# return 0, 0, 0
# calls ConvertMultiple with the previously grouped tokens
pairs, joe_bought_back, error_on_pairs = self._callConvertMultiple(groups_tokens0, groups_tokens1,
error_on_pairs)
# return pairs, joe_bought_back, error_on_pairs
return pairs, joe_bought_back, error_on_pairs
def decodeTxHash(self, tx_hashs):
tx_hashs = tx_hashs.split()
joe_bought_back_last7d = JoeSubGraph.getJoeBuyBackLast7d()
JM_ADDRESS_256 = "0x000000000000000000000000{}".format(self.joeMaker.address[2:].lower())
nb_tokens, joe_bought_back, pairs = 0, [], []
for tx_hash in tx_hashs:
logs = w3.eth.wait_for_transaction_receipt(tx_hash)["logs"]
for i in range(len(logs)):
if len(logs[i]["topics"]) > 2:
if (logs[i]["topics"][1].hex() == JM_ADDRESS_256
and logs[i]["topics"][2].hex() == JM_ADDRESS_256 and len(logs[i]["data"]) == 64 * 2 + 2):
pairTokens = [getSymbolOf(token) for token in getPairTokens(logs[i].address)]
pairs.append("{} - {}".format(pairTokens[0], pairTokens[1]))
if logs[i]["topics"][2].hex() == ZERO_ADDRESS_256:
if logs[i - 1]["topics"][1].hex() == ZERO_ADDRESS_256:
shift = 1
else:
shift = 0
if i > 2:
joe_bought_back.append(int("0x" + logs[i - shift - 2]["data"][-64:], 16) / 1e18)
nb_tokens += 1
joe_bought_back.append(int("0x" + logs[-1]["data"][-64:], 16) / 1e18)
joe_price = JoeSubGraph.getJoePrice()
sum_ = sum(joe_bought_back)
message = ["{} : {} $JOE".format(pair, readable(amount, 2)) for pair, amount in
zip(pairs, joe_bought_back)]
message.append("Total buyback: {} $JOE worth ${}".format(readable(sum_, 2),
readable(sum_ * joe_price, 2)))
message.append("Last 7 days buyback: {} $JOE worth ${}".format(
readable(joe_bought_back_last7d + sum_, 2),
readable((joe_bought_back_last7d + sum_) * joe_price, 2)))
if JoeSubGraph.getJoeBuyBackLast7d(True)[-1] == 0:
JoeSubGraph.addJoeBuyBackToLast7d(sum_, True)
return message
def getGroupsOf(tokens, size=20):
groups, temp = [], []
for i, data in enumerate(tokens):
temp.append(Web3.toChecksumAddress(data))
if (i + 1) % size == 0:
groups.append(temp)
temp = []
if temp:
groups.append(temp)
return groups
# cache
symbolOf = {}
pairTokens = {}
def getSymbolOf(tokenAddress):
global symbolOf
if tokenAddress not in symbolOf:
symbolOf[tokenAddress] = w3.eth.contract(address=tokenAddress,
abi=Constants.ERC20_ABI).functions.symbol().call()
return symbolOf[tokenAddress]
def getPairTokens(pairAddress):
global pairTokens
if pairAddress not in pairTokens:
pair_contract = w3.eth.contract(address=pairAddress, abi=Constants.PAIR_ABI)
pairTokens[pairAddress] = (pair_contract.functions.token0().call(), pair_contract.functions.token1().call())
return pairTokens[pairAddress]
def decodeTransactionReceipt(transaction_receipt, tokens0, tokens1, joe_bought_back, pairs):
logs = transaction_receipt["logs"]
nb_tokens = 0
for i in range(len(logs)):
if len(logs[i]["topics"]) > 2 and logs[i]["topics"][2].hex() == ZERO_ADDRESS_256:
if logs[i - 1]["topics"][1].hex() == ZERO_ADDRESS_256:
shift = 1
else:
shift = 0
if i > 2:
joe_bought_back.append(int("0x" + logs[i - shift - 2]["data"][-64:], 16) / 1e18)
try:
pairs.append("{} - {}".format(getSymbolOf(tokens0[nb_tokens]), getSymbolOf(tokens1[nb_tokens])))
except IndexError:
pairs.append("0x" + logs[i - shift - 1]["topics"][-1].hex()[-40:])
nb_tokens += 1
joe_bought_back.append(int("0x" + logs[-1]["data"][-64:], 16) / 1e18)
return pairs, joe_bought_back
# Only executed if you run main.py
if __name__ == '__main__':
joeMaker = JoeMaker()
print("\n".join(joeMaker.decodeTxHash("0xb52c9d72aa4862f9310b9eca93ace03216ced0ee9a5009493cfe309de3c6db87 0x1defaa7b2e8e5ea2ed37878ce30ffdaed02920f769e9968f0eb59293f0bc1335 0x81c9746823905676cdbfe8ccc943135e48ae51d551d8875b1b19b0873f3707f9 0x33b0b9d6d53b92acb22579572e7bab59960d135d4104d28e096570a3489b7330 0x2aec7944968bee3c34d66c5d0e13c922012124524756df235ea1d723c69622e1 0xdaa69a6a17760350ca47b81edcd6998e0bbd92951218a288d36b2c570010edf5 0x2f1585cdb6e3582deed34d0c32ff7323d75ff06cf814747814cb79db1b5851d0 0x00c43f125ae79b11d2162710eb9a342f4d52ca66ae21d1c5bc17b06c8f0fd746 0x711c043c369a3763db4f1d6e9768ffaf4f2e8b1a1541703d07e9c304a4c955e2 0x1b4bb2779dcac3189d96efcb693e5d69ed96702e04feb9f0707cf7884a2f93d3 0x3101ff25bdd333508e8db2cc7e749f1ab6bbd9826663a163470f75c04be229b2 0xe9a12bdfb950be5d66a2be807e130750259a0f0142505949cc1849a3cece64c5 0x2b6e22f76b64dba96bb05c5e91d250f0519841a7dd646895a8101244cf0aa5aa")))
# joeMaker.changeToVersion("v1")
# print(joeMaker.callConvertMultiple(6000))
# print(w3.eth.get_transaction_receipt("0xa51a19ae77462e16f4a6aceb8d2d8b938e86ef52c1e0c392df938d36565ad89d"))
# print(sum(JoeSubGraph.getJoeMakerPostitions(10000, joeMaker.joeMaker.address, True)[3]))
| []
| []
| [
"PRIVATE_KEY"
]
| [] | ["PRIVATE_KEY"] | python | 1 | 0 | |
sagemaker-python-sdk/pytorch_lstm_word_language_model/source/train.py | # Based on github.com/pytorch/examples/blob/master/word_language_model
import argparse
import math
import os
from shutil import copy
import time
import torch
import torch.nn as nn
import data
from rnn import RNNModel
parser = argparse.ArgumentParser(description='PyTorch Wikitext-2 RNN/LSTM Language Model')
# Hyperparameters sent by the client are passed as command-line arguments to the script.
parser.add_argument('--emsize', type=int, default=200,
help='size of word embeddings')
parser.add_argument('--nhid', type=int, default=200,
help='number of hidden units per layer')
parser.add_argument('--nlayers', type=int, default=2,
help='number of layers')
parser.add_argument('--lr', type=float, default=20,
help='initial learning rate')
parser.add_argument('--clip', type=float, default=0.25,
help='gradient clipping')
parser.add_argument('--epochs', type=int, default=40,
help='upper epoch limit')
parser.add_argument('--batch_size', type=int, default=20, metavar='N',
help='batch size')
parser.add_argument('--bptt', type=int, default=35,
help='sequence length')
parser.add_argument('--dropout', type=float, default=0.2,
help='dropout applied to layers (0 = no dropout)')
parser.add_argument('--tied', type=bool, default=False,
help='tie the word embedding and softmax weights')
parser.add_argument('--seed', type=int, default=1111,
help='random seed')
parser.add_argument('--log-interval', type=int, default=200, metavar='N',
help='report interval')
# Data and model checkpoints/otput directories from the container environment
parser.add_argument('--model-dir', type=str, default=os.environ['SM_MODEL_DIR'])
parser.add_argument('--output-data-dir', type=str, default=os.environ['SM_OUTPUT_DATA_DIR'])
parser.add_argument('--data-dir', type=str, default=os.environ['SM_CHANNEL_TRAINING'])
args = parser.parse_args()
print(args)
model_path = os.path.join(args.model_dir, 'model.pth')
model_info_path = os.path.join(args.model_dir, 'model_info.pth')
checkpoint_path = os.path.join(args.output_data_dir, 'model.pth')
checkpoint_state_path = os.path.join(args.output_data_dir, 'model_info.pth')
# Set the random seed manually for reproducibility.
torch.manual_seed(args.seed)
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
###############################################################################
# Load data
###############################################################################
print('Load data')
corpus = data.Corpus(args.data_dir)
# Starting from sequential data, batchify arranges the dataset into columns.
# For instance, with the alphabet as the sequence and batch size 4, we'd get
# ┌ a g m s ┐
# │ b h n t │
# │ c i o u │
# │ d j p v │
# │ e k q w │
# └ f l r x ┘.
# These columns are treated as independent by the model, which means that the
# dependence of e. g. 'g' on 'f' can not be learned, but allows more efficient
# batch processing.
def batchify(data, bsz):
# Work out how cleanly we can divide the dataset into bsz parts.
nbatch = data.size(0) // bsz
# Trim off any extra elements that wouldn't cleanly fit (remainders).
data = data.narrow(0, 0, nbatch * bsz)
# Evenly divide the data across the bsz batches.
data = data.view(bsz, -1).t().contiguous()
return data.to(device)
print('Batchify dataset')
eval_batch_size = 10
train_data = batchify(corpus.train, args.batch_size)
val_data = batchify(corpus.valid, eval_batch_size)
test_data = batchify(corpus.test, eval_batch_size)
###############################################################################
# Build the model
###############################################################################
print('Build the model')
ntokens = len(corpus.dictionary)
rnn_type = 'LSTM'
model = RNNModel(rnn_type, ntokens, args.emsize, args.nhid, args.nlayers, args.dropout, args.tied).to(device)
criterion = nn.CrossEntropyLoss()
# Save the data into model dir to be used with the model later
for file_name in os.listdir(args.data_dir):
full_file_name = os.path.join(args.data_dir, file_name)
if os.path.isfile(full_file_name):
copy(full_file_name, args.model_dir)
# Save arguments used to create model for restoring the model later
with open(model_info_path, 'wb') as f:
model_info = {
'rnn_type': rnn_type,
'ntoken': ntokens,
'ninp': args.emsize,
'nhid': args.nhid,
'nlayers': args.nlayers,
'dropout': args.dropout,
'tie_weights': args.tied
}
torch.save(model_info, f)
###############################################################################
# Training code
###############################################################################
def repackage_hidden(h):
"""Wraps hidden states in new Tensors, to detach them from their history."""
if isinstance(h, torch.Tensor):
return h.detach()
else:
return tuple(repackage_hidden(v) for v in h)
# get_batch subdivides the source data into chunks of length args.bptt.
# If source is equal to the example output of the batchify function, with
# a bptt-limit of 2, we'd get the following two Variables for i = 0:
# ┌ a g m s ┐ ┌ b h n t ┐
# └ b h n t ┘ └ c i o u ┘
# Note that despite the name of the function, the subdivison of data is not
# done along the batch dimension (i.e. dimension 1), since that was handled
# by the batchify function. The chunks are along dimension 0, corresponding
# to the seq_len dimension in the LSTM.
def get_batch(source, i):
seq_len = min(args.bptt, len(source) - 1 - i)
data = source[i:i+seq_len]
target = source[i+1:i+1+seq_len].view(-1)
return data, target
def evaluate(data_source):
# Turn on evaluation mode which disables dropout.
model.eval()
total_loss = 0.
hidden = model.init_hidden(eval_batch_size)
with torch.no_grad():
for i in range(0, data_source.size(0) - 1, args.bptt):
data, targets = get_batch(data_source, i)
output, hidden = model(data, hidden)
output_flat = output.view(-1, ntokens)
total_loss += len(data) * criterion(output_flat, targets).item()
hidden = repackage_hidden(hidden)
return total_loss / len(data_source)
def train():
# Turn on training mode which enables dropout.
model.train()
total_loss = 0.
start_time = time.time()
hidden = model.init_hidden(args.batch_size)
for batch, i in enumerate(range(0, train_data.size(0) - 1, args.bptt)):
data, targets = get_batch(train_data, i)
# Starting each batch, we detach the hidden state from how it was previously produced.
# If we didn't, the model would try backpropagating all the way to start of the dataset.
hidden = repackage_hidden(hidden)
model.zero_grad()
output, hidden = model(data, hidden)
loss = criterion(output.view(-1, ntokens), targets)
loss.backward()
# `clip_grad_norm` helps prevent the exploding gradient problem in RNNs / LSTMs.
torch.nn.utils.clip_grad_norm(model.parameters(), args.clip)
for p in model.parameters():
p.data.add_(-lr, p.grad.data)
total_loss += loss.item()
if batch % args.log_interval == 0 and batch > 0:
cur_loss = total_loss / args.log_interval
elapsed = time.time() - start_time
print('| epoch {:3d} | {:5d}/{:5d} batches | lr {:02.2f} | ms/batch {:5.2f} | '
'loss {:5.2f} | ppl {:8.2f}'.format(
epoch, batch, len(train_data) // args.bptt, lr,
elapsed * 1000 / args.log_interval, cur_loss, math.exp(cur_loss)))
total_loss = 0
start_time = time.time()
# Loop over epochs.
lr = args.lr
best_state = None
print('Starting training.')
for epoch in range(1, args.epochs+1):
epoch_start_time = time.time()
train()
val_loss = evaluate(val_data)
print('-' * 89)
print('| end of epoch {:3d} | time: {:5.2f}s | valid loss {:5.2f} | '
'valid ppl {:8.2f}'.format(epoch, (time.time() - epoch_start_time),
val_loss, math.exp(val_loss)))
print('-' * 89)
# Save the model if the validation loss is the best we've seen so far.
if not best_state or val_loss < best_state['val_loss']:
best_state = {
'epoch': epoch,
'lr': lr,
'val_loss': val_loss,
'val_ppl': math.exp(val_loss),
}
print('Saving the best model: {}'.format(best_state))
with open(checkpoint_path, 'wb') as f:
torch.save(model.state_dict(), f)
with open(checkpoint_state_path, 'w') as f:
f.write('epoch {:3d} | lr: {:5.2f} | valid loss {:5.2f} | '
'valid ppl {:8.2f}'.format(epoch, lr, val_loss, math.exp(val_loss)))
else:
# Anneal the learning rate if no improvement has been seen in the validation dataset.
lr /= 4.0
# Load the best saved model.
with open(checkpoint_path, 'rb') as f:
model.load_state_dict(torch.load(f))
# after load the rnn params are not a continuous chunk of memory
# this makes them a continuous chunk, and will speed up forward pass
model.rnn.flatten_parameters()
# Run on test data.
test_loss = evaluate(test_data)
print('=' * 89)
print('| End of training | test loss {:5.2f} | test ppl {:8.2f}'.format(
test_loss, math.exp(test_loss)))
print('=' * 89)
# Move the best model to cpu and resave it
with open(model_path, 'wb') as f:
torch.save(model.cpu().state_dict(), f)
| []
| []
| [
"SM_CHANNEL_TRAINING",
"SM_MODEL_DIR",
"SM_OUTPUT_DATA_DIR"
]
| [] | ["SM_CHANNEL_TRAINING", "SM_MODEL_DIR", "SM_OUTPUT_DATA_DIR"] | python | 3 | 0 | |
ludwig/utils/misc.py | #! /usr/bin/env python
# coding=utf-8
# Copyright (c) 2019 Uber Technologies, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
import copy
import os
import random
import subprocess
import sys
from collections import OrderedDict, Mapping
import numpy
import ludwig.globals
def get_experiment_description(model_definition,
dataset_type='generic',
data_csv=None,
data_hdf5=None,
metadata_json=None,
data_train_csv=None,
data_validation_csv=None,
data_test_csv=None,
data_train_hdf5=None,
data_validation_hdf5=None,
data_test_hdf5=None,
random_seed=None):
description = OrderedDict()
description['ludwig_version'] = ludwig.globals.LUDWIG_VERSION
description['command'] = ' '.join(sys.argv)
try:
is_a_git_repo = subprocess.call(['git', 'branch'],
stderr=subprocess.STDOUT,
stdout=open(os.devnull, 'w')) == 0
if is_a_git_repo:
description['commit_hash'] = \
subprocess.check_output(['git', 'rev-parse', 'HEAD']).decode(
'utf-8')[:12]
except:
pass
description['dataset_type'] = dataset_type
if random_seed is not None:
description['random_seed'] = random_seed
if data_csv is not None:
description['input_data'] = data_csv
elif data_hdf5 is not None and metadata_json is not None:
description['input_data'] = data_hdf5
description['input_metadata'] = metadata_json
elif data_train_csv is not None:
description['input_data_train'] = data_train_csv
if data_validation_csv is not None:
description['input_data_validation'] = data_validation_csv
if data_test_csv is not None:
description['input_data_test'] = data_test_csv
elif data_train_hdf5 is not None and metadata_json is not None:
description['input_data_train'] = data_train_hdf5
if data_validation_hdf5 is not None:
description['input_data_validation'] = data_validation_hdf5
if data_test_hdf5 is not None:
description['input_data_test'] = data_test_hdf5
description['input_metadata'] = metadata_json
description['model_definition'] = model_definition
return description
def set_random_seed(random_seed):
os.environ['PYTHONHASHSEED'] = str(random_seed)
random.seed(random_seed)
numpy.random.seed(random_seed)
def merge_dict(dct, merge_dct):
""" Recursive dict merge. Inspired by :meth:``dict.update()``, instead of
updating only top-level keys, dict_merge recurses down into dicts nested
to an arbitrary depth, updating keys. The ``merge_dct`` is merged into
``dct``.
:param dct: dict onto which the merge is executed
:param merge_dct: dct merged into dct
:return: None
"""
dct = copy.deepcopy(dct)
for k, v in merge_dct.items():
if (k in dct and isinstance(dct[k], dict)
and isinstance(merge_dct[k], Mapping)):
dct[k] = merge_dict(dct[k], merge_dct[k])
else:
dct[k] = merge_dct[k]
return dct
def sum_dicts(dicts, dict_type=dict):
summed_dict = dict_type()
for d in dicts:
for key, value in d.items():
if key in summed_dict:
prev_value = summed_dict[key]
if isinstance(value, (dict, OrderedDict)):
summed_dict[key] = sum_dicts([prev_value, value],
dict_type=type(value))
elif isinstance(value, numpy.ndarray):
summed_dict[key] = numpy.concatenate((prev_value, value))
else:
summed_dict[key] = prev_value + value
else:
summed_dict[key] = value
return summed_dict
def get_from_registry(key, registry):
if hasattr(key, 'lower'):
key = key.lower()
if key in registry:
return registry[key]
else:
raise ValueError(
'Key {} not supported, available options: {}'.format(
key, registry.keys()
)
)
def set_default_value(dictionary, key, value):
if key not in dictionary:
dictionary[key] = value
| []
| []
| [
"PYTHONHASHSEED"
]
| [] | ["PYTHONHASHSEED"] | python | 1 | 0 | |
aiohttp/helpers.py | """Various helper functions"""
import asyncio
import base64
import binascii
import cgi
import dataclasses
import datetime
import enum
import functools
import netrc
import os
import platform
import re
import sys
import time
import warnings
import weakref
from collections import namedtuple
from contextlib import suppress
from email.utils import parsedate
from http.cookies import SimpleCookie
from math import ceil
from pathlib import Path
from types import TracebackType
from typing import (
Any,
Callable,
Dict,
Generator,
Generic,
Iterable,
Iterator,
List,
Mapping,
Optional,
Pattern,
Tuple,
Type,
TypeVar,
Union,
cast,
)
from urllib.parse import quote
from urllib.request import getproxies, proxy_bypass
import async_timeout
from multidict import CIMultiDict, MultiDict, MultiDictProxy
from typing_extensions import Protocol, final
from yarl import URL
from . import hdrs
from .log import client_logger
from .typedefs import PathLike # noqa
__all__ = ("BasicAuth", "ChainMapProxy", "ETag")
PY_38 = sys.version_info >= (3, 8)
PY_310 = sys.version_info >= (3, 10)
COOKIE_MAX_LENGTH = 4096
try:
from typing import ContextManager
except ImportError:
from typing_extensions import ContextManager
_T = TypeVar("_T")
_S = TypeVar("_S")
_SENTINEL = enum.Enum("_SENTINEL", "sentinel")
sentinel = _SENTINEL.sentinel
NO_EXTENSIONS = bool(os.environ.get("AIOHTTP_NO_EXTENSIONS")) # type: bool
# N.B. sys.flags.dev_mode is available on Python 3.7+, use getattr
# for compatibility with older versions
DEBUG = getattr(sys.flags, "dev_mode", False) or (
not sys.flags.ignore_environment and bool(os.environ.get("PYTHONASYNCIODEBUG"))
) # type: bool
CHAR = {chr(i) for i in range(0, 128)}
CTL = {chr(i) for i in range(0, 32)} | {
chr(127),
}
SEPARATORS = {
"(",
")",
"<",
">",
"@",
",",
";",
":",
"\\",
'"',
"/",
"[",
"]",
"?",
"=",
"{",
"}",
" ",
chr(9),
}
TOKEN = CHAR ^ CTL ^ SEPARATORS
class noop:
def __await__(self) -> Generator[None, None, None]:
yield
if PY_38:
iscoroutinefunction = asyncio.iscoroutinefunction
else:
def iscoroutinefunction(func: Any) -> bool:
while isinstance(func, functools.partial):
func = func.func
return asyncio.iscoroutinefunction(func)
json_re = re.compile(r"(?:application/|[\w.-]+/[\w.+-]+?\+)json$", re.IGNORECASE)
class BasicAuth(namedtuple("BasicAuth", ["login", "password", "encoding"])):
"""Http basic authentication helper."""
def __new__(
cls, login: str, password: str = "", encoding: str = "latin1"
) -> "BasicAuth":
if login is None:
raise ValueError("None is not allowed as login value")
if password is None:
raise ValueError("None is not allowed as password value")
if ":" in login:
raise ValueError('A ":" is not allowed in login (RFC 1945#section-11.1)')
return super().__new__(cls, login, password, encoding)
@classmethod
def decode(cls, auth_header: str, encoding: str = "latin1") -> "BasicAuth":
"""Create a BasicAuth object from an Authorization HTTP header."""
try:
auth_type, encoded_credentials = auth_header.split(" ", 1)
except ValueError:
raise ValueError("Could not parse authorization header.")
if auth_type.lower() != "basic":
raise ValueError("Unknown authorization method %s" % auth_type)
try:
decoded = base64.b64decode(
encoded_credentials.encode("ascii"), validate=True
).decode(encoding)
except binascii.Error:
raise ValueError("Invalid base64 encoding.")
try:
# RFC 2617 HTTP Authentication
# https://www.ietf.org/rfc/rfc2617.txt
# the colon must be present, but the username and password may be
# otherwise blank.
username, password = decoded.split(":", 1)
except ValueError:
raise ValueError("Invalid credentials.")
return cls(username, password, encoding=encoding)
@classmethod
def from_url(cls, url: URL, *, encoding: str = "latin1") -> Optional["BasicAuth"]:
"""Create BasicAuth from url."""
if not isinstance(url, URL):
raise TypeError("url should be yarl.URL instance")
if url.user is None:
return None
return cls(url.user, url.password or "", encoding=encoding)
def encode(self) -> str:
"""Encode credentials."""
creds = (f"{self.login}:{self.password}").encode(self.encoding)
return "Basic %s" % base64.b64encode(creds).decode(self.encoding)
def strip_auth_from_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
auth = BasicAuth.from_url(url)
if auth is None:
return url, None
else:
return url.with_user(None), auth
def netrc_from_env() -> Optional[netrc.netrc]:
"""Load netrc from file.
Attempt to load it from the path specified by the env-var
NETRC or in the default location in the user's home directory.
Returns None if it couldn't be found or fails to parse.
"""
netrc_env = os.environ.get("NETRC")
if netrc_env is not None:
netrc_path = Path(netrc_env)
else:
try:
home_dir = Path.home()
except RuntimeError as e: # pragma: no cover
# if pathlib can't resolve home, it may raise a RuntimeError
client_logger.debug(
"Could not resolve home directory when "
"trying to look for .netrc file: %s",
e,
)
return None
netrc_path = home_dir / (
"_netrc" if platform.system() == "Windows" else ".netrc"
)
try:
return netrc.netrc(str(netrc_path))
except netrc.NetrcParseError as e:
client_logger.warning("Could not parse .netrc file: %s", e)
except OSError as e:
# we couldn't read the file (doesn't exist, permissions, etc.)
if netrc_env or netrc_path.is_file():
# only warn if the environment wanted us to load it,
# or it appears like the default file does actually exist
client_logger.warning("Could not read .netrc file: %s", e)
return None
@dataclasses.dataclass(frozen=True)
class ProxyInfo:
proxy: URL
proxy_auth: Optional[BasicAuth]
def proxies_from_env() -> Dict[str, ProxyInfo]:
proxy_urls = {
k: URL(v)
for k, v in getproxies().items()
if k in ("http", "https", "ws", "wss")
}
netrc_obj = netrc_from_env()
stripped = {k: strip_auth_from_url(v) for k, v in proxy_urls.items()}
ret = {}
for proto, val in stripped.items():
proxy, auth = val
if proxy.scheme in ("https", "wss"):
client_logger.warning(
"%s proxies %s are not supported, ignoring", proxy.scheme.upper(), proxy
)
continue
if netrc_obj and auth is None:
auth_from_netrc = None
if proxy.host is not None:
auth_from_netrc = netrc_obj.authenticators(proxy.host)
if auth_from_netrc is not None:
# auth_from_netrc is a (`user`, `account`, `password`) tuple,
# `user` and `account` both can be username,
# if `user` is None, use `account`
*logins, password = auth_from_netrc
login = logins[0] if logins[0] else logins[-1]
auth = BasicAuth(cast(str, login), cast(str, password))
ret[proto] = ProxyInfo(proxy, auth)
return ret
def get_env_proxy_for_url(url: URL) -> Tuple[URL, Optional[BasicAuth]]:
"""Get a permitted proxy for the given URL from the env."""
if url.host is not None and proxy_bypass(url.host):
raise LookupError(f"Proxying is disallowed for `{url.host!r}`")
proxies_in_env = proxies_from_env()
try:
proxy_info = proxies_in_env[url.scheme]
except KeyError:
raise LookupError(f"No proxies found for `{url!s}` in the env")
else:
return proxy_info.proxy, proxy_info.proxy_auth
@dataclasses.dataclass(frozen=True)
class MimeType:
type: str
subtype: str
suffix: str
parameters: "MultiDictProxy[str]"
@functools.lru_cache(maxsize=56)
def parse_mimetype(mimetype: str) -> MimeType:
"""Parses a MIME type into its components.
mimetype is a MIME type string.
Returns a MimeType object.
Example:
>>> parse_mimetype('text/html; charset=utf-8')
MimeType(type='text', subtype='html', suffix='',
parameters={'charset': 'utf-8'})
"""
if not mimetype:
return MimeType(
type="", subtype="", suffix="", parameters=MultiDictProxy(MultiDict())
)
parts = mimetype.split(";")
params = MultiDict() # type: MultiDict[str]
for item in parts[1:]:
if not item:
continue
key, _, value = item.partition("=")
params.add(key.lower().strip(), value.strip(' "'))
fulltype = parts[0].strip().lower()
if fulltype == "*":
fulltype = "*/*"
mtype, _, stype = fulltype.partition("/")
stype, _, suffix = stype.partition("+")
return MimeType(
type=mtype, subtype=stype, suffix=suffix, parameters=MultiDictProxy(params)
)
def guess_filename(obj: Any, default: Optional[str] = None) -> Optional[str]:
name = getattr(obj, "name", None)
if name and isinstance(name, str) and name[0] != "<" and name[-1] != ">":
return Path(name).name
return default
not_qtext_re = re.compile(r"[^\041\043-\133\135-\176]")
QCONTENT = {chr(i) for i in range(0x20, 0x7F)} | {"\t"}
def quoted_string(content: str) -> str:
"""Return 7-bit content as quoted-string.
Format content into a quoted-string as defined in RFC5322 for
Internet Message Format. Notice that this is not the 8-bit HTTP
format, but the 7-bit email format. Content must be in usascii or
a ValueError is raised.
"""
if not (QCONTENT > set(content)):
raise ValueError(f"bad content for quoted-string {content!r}")
return not_qtext_re.sub(lambda x: "\\" + x.group(0), content)
def content_disposition_header(
disptype: str, quote_fields: bool = True, _charset: str = "utf-8", **params: str
) -> str:
"""Sets ``Content-Disposition`` header for MIME.
This is the MIME payload Content-Disposition header from RFC 2183
and RFC 7579 section 4.2, not the HTTP Content-Disposition from
RFC 6266.
disptype is a disposition type: inline, attachment, form-data.
Should be valid extension token (see RFC 2183)
quote_fields performs value quoting to 7-bit MIME headers
according to RFC 7578. Set to quote_fields to False if recipient
can take 8-bit file names and field values.
_charset specifies the charset to use when quote_fields is True.
params is a dict with disposition params.
"""
if not disptype or not (TOKEN > set(disptype)):
raise ValueError("bad content disposition type {!r}" "".format(disptype))
value = disptype
if params:
lparams = []
for key, val in params.items():
if not key or not (TOKEN > set(key)):
raise ValueError(
"bad content disposition parameter" " {!r}={!r}".format(key, val)
)
if quote_fields:
if key.lower() == "filename":
qval = quote(val, "", encoding=_charset)
lparams.append((key, '"%s"' % qval))
else:
try:
qval = quoted_string(val)
except ValueError:
qval = "".join(
(_charset, "''", quote(val, "", encoding=_charset))
)
lparams.append((key + "*", qval))
else:
lparams.append((key, '"%s"' % qval))
else:
qval = val.replace("\\", "\\\\").replace('"', '\\"')
lparams.append((key, '"%s"' % qval))
sparams = "; ".join("=".join(pair) for pair in lparams)
value = "; ".join((value, sparams))
return value
def is_expected_content_type(
response_content_type: str, expected_content_type: str
) -> bool:
"""Checks if received content type is processable as an expected one.
Both arguments should be given without parameters.
"""
if expected_content_type == "application/json":
return json_re.match(response_content_type) is not None
return expected_content_type in response_content_type
class _TSelf(Protocol, Generic[_T]):
_cache: Dict[str, _T]
class reify(Generic[_T]):
"""Use as a class method decorator.
It operates almost exactly like
the Python `@property` decorator, but it puts the result of the
method it decorates into the instance dict after the first call,
effectively replacing the function it decorates with an instance
variable. It is, in Python parlance, a data descriptor.
"""
def __init__(self, wrapped: Callable[..., _T]) -> None:
self.wrapped = wrapped
self.__doc__ = wrapped.__doc__
self.name = wrapped.__name__
def __get__(self, inst: _TSelf[_T], owner: Optional[Type[Any]] = None) -> _T:
try:
try:
return inst._cache[self.name]
except KeyError:
val = self.wrapped(inst)
inst._cache[self.name] = val
return val
except AttributeError:
if inst is None:
return self
raise
def __set__(self, inst: _TSelf[_T], value: _T) -> None:
raise AttributeError("reified property is read-only")
reify_py = reify
try:
from ._helpers import reify as reify_c
if not NO_EXTENSIONS:
reify = reify_c # type: ignore[misc,assignment]
except ImportError:
pass
_ipv4_pattern = (
r"^(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}"
r"(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)$"
)
_ipv6_pattern = (
r"^(?:(?:(?:[A-F0-9]{1,4}:){6}|(?=(?:[A-F0-9]{0,4}:){0,6}"
r"(?:[0-9]{1,3}\.){3}[0-9]{1,3}$)(([0-9A-F]{1,4}:){0,5}|:)"
r"((:[0-9A-F]{1,4}){1,5}:|:)|::(?:[A-F0-9]{1,4}:){5})"
r"(?:(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])\.){3}"
r"(?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9])|(?:[A-F0-9]{1,4}:){7}"
r"[A-F0-9]{1,4}|(?=(?:[A-F0-9]{0,4}:){0,7}[A-F0-9]{0,4}$)"
r"(([0-9A-F]{1,4}:){1,7}|:)((:[0-9A-F]{1,4}){1,7}|:)|(?:[A-F0-9]{1,4}:){7}"
r":|:(:[A-F0-9]{1,4}){7})$"
)
_ipv4_regex = re.compile(_ipv4_pattern)
_ipv6_regex = re.compile(_ipv6_pattern, flags=re.IGNORECASE)
_ipv4_regexb = re.compile(_ipv4_pattern.encode("ascii"))
_ipv6_regexb = re.compile(_ipv6_pattern.encode("ascii"), flags=re.IGNORECASE)
def _is_ip_address(
regex: Pattern[str], regexb: Pattern[bytes], host: Optional[Union[str, bytes]]
) -> bool:
if host is None:
return False
if isinstance(host, str):
return bool(regex.match(host))
elif isinstance(host, (bytes, bytearray, memoryview)):
return bool(regexb.match(host))
else:
raise TypeError(f"{host} [{type(host)}] is not a str or bytes")
is_ipv4_address = functools.partial(_is_ip_address, _ipv4_regex, _ipv4_regexb)
is_ipv6_address = functools.partial(_is_ip_address, _ipv6_regex, _ipv6_regexb)
def is_ip_address(host: Optional[Union[str, bytes, bytearray, memoryview]]) -> bool:
return is_ipv4_address(host) or is_ipv6_address(host)
def next_whole_second() -> datetime.datetime:
"""Return current time rounded up to the next whole second."""
return datetime.datetime.now(datetime.timezone.utc).replace(
microsecond=0
) + datetime.timedelta(seconds=0)
_cached_current_datetime = None # type: Optional[int]
_cached_formatted_datetime = ""
def rfc822_formatted_time() -> str:
global _cached_current_datetime
global _cached_formatted_datetime
now = int(time.time())
if now != _cached_current_datetime:
# Weekday and month names for HTTP date/time formatting;
# always English!
# Tuples are constants stored in codeobject!
_weekdayname = ("Mon", "Tue", "Wed", "Thu", "Fri", "Sat", "Sun")
_monthname = (
"", # Dummy so we can use 1-based month numbers
"Jan",
"Feb",
"Mar",
"Apr",
"May",
"Jun",
"Jul",
"Aug",
"Sep",
"Oct",
"Nov",
"Dec",
)
year, month, day, hh, mm, ss, wd, *tail = time.gmtime(now)
_cached_formatted_datetime = "%s, %02d %3s %4d %02d:%02d:%02d GMT" % (
_weekdayname[wd],
day,
_monthname[month],
year,
hh,
mm,
ss,
)
_cached_current_datetime = now
return _cached_formatted_datetime
def _weakref_handle(info: "Tuple[weakref.ref[object], str]") -> None:
ref, name = info
ob = ref()
if ob is not None:
with suppress(Exception):
getattr(ob, name)()
def weakref_handle(
ob: object, name: str, timeout: float, loop: asyncio.AbstractEventLoop
) -> Optional[asyncio.TimerHandle]:
if timeout is not None and timeout > 0:
when = loop.time() + timeout
if timeout >= 5:
when = ceil(when)
return loop.call_at(when, _weakref_handle, (weakref.ref(ob), name))
return None
def call_later(
cb: Callable[[], Any], timeout: float, loop: asyncio.AbstractEventLoop
) -> Optional[asyncio.TimerHandle]:
if timeout is not None and timeout > 0:
when = loop.time() + timeout
if timeout > 5:
when = ceil(when)
return loop.call_at(when, cb)
return None
class TimeoutHandle:
"""Timeout handle"""
def __init__(
self, loop: asyncio.AbstractEventLoop, timeout: Optional[float]
) -> None:
self._timeout = timeout
self._loop = loop
self._callbacks = (
[]
) # type: List[Tuple[Callable[..., None], Tuple[Any, ...], Dict[str, Any]]]
def register(
self, callback: Callable[..., None], *args: Any, **kwargs: Any
) -> None:
self._callbacks.append((callback, args, kwargs))
def close(self) -> None:
self._callbacks.clear()
def start(self) -> Optional[asyncio.Handle]:
timeout = self._timeout
if timeout is not None and timeout > 0:
when = self._loop.time() + timeout
if timeout >= 5:
when = ceil(when)
return self._loop.call_at(when, self.__call__)
else:
return None
def timer(self) -> "BaseTimerContext":
if self._timeout is not None and self._timeout > 0:
timer = TimerContext(self._loop)
self.register(timer.timeout)
return timer
else:
return TimerNoop()
def __call__(self) -> None:
for cb, args, kwargs in self._callbacks:
with suppress(Exception):
cb(*args, **kwargs)
self._callbacks.clear()
class BaseTimerContext(ContextManager["BaseTimerContext"]):
pass
class TimerNoop(BaseTimerContext):
def __enter__(self) -> BaseTimerContext:
return self
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> None:
return
class TimerContext(BaseTimerContext):
"""Low resolution timeout context manager"""
def __init__(self, loop: asyncio.AbstractEventLoop) -> None:
self._loop = loop
self._tasks = [] # type: List[asyncio.Task[Any]]
self._cancelled = False
def __enter__(self) -> BaseTimerContext:
task = asyncio.current_task(loop=self._loop)
if task is None:
raise RuntimeError(
"Timeout context manager should be used " "inside a task"
)
if self._cancelled:
raise asyncio.TimeoutError from None
self._tasks.append(task)
return self
def __exit__(
self,
exc_type: Optional[Type[BaseException]],
exc_val: Optional[BaseException],
exc_tb: Optional[TracebackType],
) -> Optional[bool]:
if self._tasks:
self._tasks.pop()
if exc_type is asyncio.CancelledError and self._cancelled:
raise asyncio.TimeoutError from None
return None
def timeout(self) -> None:
if not self._cancelled:
for task in set(self._tasks):
task.cancel()
self._cancelled = True
def ceil_timeout(delay: Optional[float]) -> async_timeout.Timeout:
if delay is None or delay <= 0:
return async_timeout.timeout(None)
loop = asyncio.get_running_loop()
now = loop.time()
when = now + delay
if delay > 5:
when = ceil(when)
return async_timeout.timeout_at(when)
class HeadersMixin:
__slots__ = ("_content_type", "_content_dict", "_stored_content_type")
def __init__(self) -> None:
super().__init__()
self._content_type = None # type: Optional[str]
self._content_dict = None # type: Optional[Dict[str, str]]
self._stored_content_type: Union[str, _SENTINEL] = sentinel
def _parse_content_type(self, raw: str) -> None:
self._stored_content_type = raw
if raw is None:
# default value according to RFC 2616
self._content_type = "application/octet-stream"
self._content_dict = {}
else:
self._content_type, self._content_dict = cgi.parse_header(raw)
@property
def content_type(self) -> str:
"""The value of content part for Content-Type HTTP header."""
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined]
if self._stored_content_type != raw:
self._parse_content_type(raw)
return self._content_type # type: ignore[return-value]
@property
def charset(self) -> Optional[str]:
"""The value of charset part for Content-Type HTTP header."""
raw = self._headers.get(hdrs.CONTENT_TYPE) # type: ignore[attr-defined]
if self._stored_content_type != raw:
self._parse_content_type(raw)
return self._content_dict.get("charset") # type: ignore[union-attr]
@property
def content_length(self) -> Optional[int]:
"""The value of Content-Length HTTP header."""
content_length = self._headers.get( # type: ignore[attr-defined]
hdrs.CONTENT_LENGTH
)
if content_length is not None:
return int(content_length)
else:
return None
def set_result(fut: "asyncio.Future[_T]", result: _T) -> None:
if not fut.done():
fut.set_result(result)
def set_exception(fut: "asyncio.Future[_T]", exc: BaseException) -> None:
if not fut.done():
fut.set_exception(exc)
@final
class ChainMapProxy(Mapping[str, Any]):
__slots__ = ("_maps",)
def __init__(self, maps: Iterable[Mapping[str, Any]]) -> None:
self._maps = tuple(maps)
def __init_subclass__(cls) -> None:
raise TypeError(
"Inheritance class {} from ChainMapProxy "
"is forbidden".format(cls.__name__)
)
def __getitem__(self, key: str) -> Any:
for mapping in self._maps:
try:
return mapping[key]
except KeyError:
pass
raise KeyError(key)
def get(self, key: str, default: Any = None) -> Any:
return self[key] if key in self else default
def __len__(self) -> int:
# reuses stored hash values if possible
return len(set().union(*self._maps)) # type: ignore[arg-type]
def __iter__(self) -> Iterator[str]:
d = {} # type: Dict[str, Any]
for mapping in reversed(self._maps):
# reuses stored hash values if possible
d.update(mapping)
return iter(d)
def __contains__(self, key: object) -> bool:
return any(key in m for m in self._maps)
def __bool__(self) -> bool:
return any(self._maps)
def __repr__(self) -> str:
content = ", ".join(map(repr, self._maps))
return f"ChainMapProxy({content})"
class CookieMixin:
def __init__(self) -> None:
super().__init__()
self._cookies = SimpleCookie() # type: SimpleCookie[str]
@property
def cookies(self) -> "SimpleCookie[str]":
return self._cookies
def set_cookie(
self,
name: str,
value: str,
*,
expires: Optional[str] = None,
domain: Optional[str] = None,
max_age: Optional[Union[int, str]] = None,
path: str = "/",
secure: Optional[bool] = None,
httponly: Optional[bool] = None,
version: Optional[str] = None,
samesite: Optional[str] = None,
) -> None:
"""Set or update response cookie.
Sets new cookie or updates existent with new value.
Also updates only those params which are not None.
"""
old = self._cookies.get(name)
if old is not None and old.coded_value == "":
# deleted cookie
self._cookies.pop(name, None)
self._cookies[name] = value
c = self._cookies[name]
if expires is not None:
c["expires"] = expires
elif c.get("expires") == "Thu, 01 Jan 1970 00:00:00 GMT":
del c["expires"]
if domain is not None:
c["domain"] = domain
if max_age is not None:
c["max-age"] = str(max_age)
elif "max-age" in c:
del c["max-age"]
c["path"] = path
if secure is not None:
c["secure"] = secure
if httponly is not None:
c["httponly"] = httponly
if version is not None:
c["version"] = version
if samesite is not None:
c["samesite"] = samesite
if DEBUG:
cookie_length = len(c.output(header="")[1:])
if cookie_length > COOKIE_MAX_LENGTH:
warnings.warn(
"The size of is too large, it might get ignored by the client.",
UserWarning,
stacklevel=2,
)
def del_cookie(
self, name: str, *, domain: Optional[str] = None, path: str = "/"
) -> None:
"""Delete cookie.
Creates new empty expired cookie.
"""
# TODO: do we need domain/path here?
self._cookies.pop(name, None)
self.set_cookie(
name,
"",
max_age=0,
expires="Thu, 01 Jan 1970 00:00:00 GMT",
domain=domain,
path=path,
)
def populate_with_cookies(
headers: "CIMultiDict[str]", cookies: "SimpleCookie[str]"
) -> None:
for cookie in cookies.values():
value = cookie.output(header="")[1:]
headers.add(hdrs.SET_COOKIE, value)
# https://tools.ietf.org/html/rfc7232#section-2.3
_ETAGC = r"[!#-}\x80-\xff]+"
_ETAGC_RE = re.compile(_ETAGC)
_QUOTED_ETAG = fr'(W/)?"({_ETAGC})"'
QUOTED_ETAG_RE = re.compile(_QUOTED_ETAG)
LIST_QUOTED_ETAG_RE = re.compile(fr"({_QUOTED_ETAG})(?:\s*,\s*|$)|(.)")
ETAG_ANY = "*"
@dataclasses.dataclass(frozen=True)
class ETag:
value: str
is_weak: bool = False
def validate_etag_value(value: str) -> None:
if value != ETAG_ANY and not _ETAGC_RE.fullmatch(value):
raise ValueError(
f"Value {value!r} is not a valid etag. Maybe it contains '\"'?"
)
def parse_http_date(date_str: Optional[str]) -> Optional[datetime.datetime]:
"""Process a date string, return a datetime object"""
if date_str is not None:
timetuple = parsedate(date_str)
if timetuple is not None:
with suppress(ValueError):
return datetime.datetime(*timetuple[:6], tzinfo=datetime.timezone.utc)
return None
| []
| []
| [
"PYTHONASYNCIODEBUG",
"AIOHTTP_NO_EXTENSIONS",
"NETRC"
]
| [] | ["PYTHONASYNCIODEBUG", "AIOHTTP_NO_EXTENSIONS", "NETRC"] | python | 3 | 0 | |
ripley/test/python/run_readWriteOnRipley.py |
##############################################################################
#
# Copyright (c) 2003-2018 by The University of Queensland
# http://www.uq.edu.au
#
# Primary Business: Queensland, Australia
# Licensed under the Apache License, version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
#
# Development until 2012 by Earth Systems Science Computational Center (ESSCC)
# Development 2012-2013 by School of Earth Sciences
# Development from 2014 by Centre for Geoscience Computing (GeoComp)
#
##############################################################################
from __future__ import print_function, division
__copyright__="""Copyright (c) 2003-2018 by The University of Queensland
http://www.uq.edu.au
Primary Business: Queensland, Australia"""
__license__="""Licensed under the Apache License, version 2.0
http://www.apache.org/licenses/LICENSE-2.0"""
__url__="https://launchpad.net/escript-finley"
import os
import numpy as np
import esys.escriptcore.utestselect as unittest
from esys.escriptcore.testing import *
from esys.escript import *
from esys.ripley import *
try:
RIPLEY_WORKDIR=os.environ['RIPLEY_WORKDIR']
except KeyError:
RIPLEY_WORKDIR='/tmp'
HAVE_UNZIP = hasFeature('unzip')
#NE=4 # number elements, must be even
#for x in [int(sqrt(mpiSize)),2,3,5,7,1]:
# NX=x
# NY=mpiSize//x
# if NX*NY == mpiSize:
# break
#
#for x in [(int(mpiSize**(1/3.)),int(mpiSize**(1/3.))),(2,3),(2,2),(1,2),(1,1)]:
# NXb=x[0]
# NYb=x[1]
# NZb=mpiSize//(x[0]*x[1])
# if NXb*NYb*NZb == mpiSize:
# break
mpiSize = getMPISizeWorld()
mpiRank = getMPIRankWorld()
def adjust(NE, ftype):
if ftype in (ContinuousFunction, Solution):
return [i+1 for i in NE]
return NE
class WriteBinaryGridTestBase(unittest.TestCase): #subclassing required
NX = 10*mpiSize-1
NZ = 10
def generateUniqueData(self, ftype):
dim = self.domain.getDim()
FSx=ftype(self.domain).getX()
NE = adjust(self.NE, ftype)
# normalise and scale range of values
x = [FSx[i]-inf(FSx[i]) for i in range(dim)]
x = [(NE[i]-1)*(x[i]/sup(x[i])) for i in range(dim)]
xMax = [int(sup(x[i]))+1 for i in range(dim)]
nvals=NE[0]*NE[1]
data = x[0] + xMax[0]*x[1]
if self.datatype == DATATYPE_INT32:
data += 0.05
if dim > 2:
data = data + xMax[0]*xMax[1]*x[2]
nvals*=NE[2]
grid = np.array(range(nvals), dtype=self.dtype).reshape(tuple(reversed(NE)))
return data, grid
def writeThenRead(self, data, ftype, fcode):
filename = os.path.join(RIPLEY_WORKDIR, "_wgrid%dd%s"%(self.domain.getDim(),fcode))
filename = filename + self.dtype.replace('<','L').replace('>','B')
self.domain.writeBinaryGrid(data, filename, self.byteorder, self.datatype)
MPIBarrierWorld()
result = np.fromfile(filename, dtype=self.dtype).reshape(
tuple(reversed(adjust(self.NE,ftype))))
return result
def test_writeGrid2D(self):
self.NE = [self.NX, self.NZ]
self.domain = Rectangle(self.NE[0], self.NE[1], d1=0)
for ftype,fcode in [(ReducedFunction,'RF'), (ContinuousFunction,'CF'), (Solution, 'Sol')]:
data, ref = self.generateUniqueData(ftype)
result = self.writeThenRead(data, ftype, fcode)
self.assertAlmostEqual(Lsup(ref-result), 0, delta=1e-9,
msg="Data doesn't match for "+str(ftype(self.domain)))
def test_writeGrid3D(self):
self.NE = [self.NX, self.NX, self.NZ]
self.domain = Brick(self.NE[0], self.NE[1], self.NE[2], d2=0)
for ftype,fcode in [(ReducedFunction,'RF'), (ContinuousFunction,'CF'), (Solution, 'Sol')]:
data, ref = self.generateUniqueData(ftype)
result = self.writeThenRead(data, ftype, fcode)
self.assertAlmostEqual(Lsup(ref-result), 0, delta=1e-9,
msg="Data doesn't match for "+str(ftype(self.domain)))
class Test_writeBinaryGridRipley_LITTLE_FLOAT32(WriteBinaryGridTestBase):
def setUp(self):
self.byteorder = BYTEORDER_LITTLE_ENDIAN
self.datatype = DATATYPE_FLOAT32
self.dtype = "<f4"
class Test_writeBinaryGridRipley_LITTLE_FLOAT64(WriteBinaryGridTestBase):
def setUp(self):
self.byteorder = BYTEORDER_LITTLE_ENDIAN
self.datatype = DATATYPE_FLOAT64
self.dtype = "<f8"
class Test_writeBinaryGridRipley_LITTLE_INT32(WriteBinaryGridTestBase):
def setUp(self):
self.byteorder = BYTEORDER_LITTLE_ENDIAN
self.datatype = DATATYPE_INT32
self.dtype = "<i4"
class Test_writeBinaryGridRipley_BIG_FLOAT32(WriteBinaryGridTestBase):
def setUp(self):
self.byteorder = BYTEORDER_BIG_ENDIAN
self.datatype = DATATYPE_FLOAT32
self.dtype = ">f4"
class Test_writeBinaryGridRipley_BIG_FLOAT64(WriteBinaryGridTestBase):
def setUp(self):
self.byteorder = BYTEORDER_BIG_ENDIAN
self.datatype = DATATYPE_FLOAT64
self.dtype = ">f8"
class Test_writeBinaryGridRipley_BIG_INT32(WriteBinaryGridTestBase):
def setUp(self):
self.byteorder = BYTEORDER_BIG_ENDIAN
self.datatype = DATATYPE_INT32
self.dtype = ">i4"
class ReadBinaryGridTestBase(unittest.TestCase): #subclassing required
"""
The reader tests work in several stages:
1) create numpy array and write to temporary file (ref)
2) call readBinaryGrid with that filename
3) write the resulting Data object using writeBinaryGrid (test)
4) read the result using numpy and compare (ref) and (test)
As such, it is important to note that a working writeBinaryGrid() method
is assumed!
"""
# set defaults which may be overridden in subclasses
NX = 10
NZ = 8
fspaces = [(ReducedFunction,'RF'), (ContinuousFunction,'CF')]
byteorder = BYTEORDER_NATIVE
datatype = DATATYPE_FLOAT64
dtype = "f8"
shape = ()
fill = -42.57
first = [0,0,0]
multiplier = [1,1,1]
reverse = [0,0,0]
def generateUniqueData(self, ftype):
dim = self.domain.getDim()
NE = adjust(self.Ndata, ftype)
nvals=NE[0]*NE[1]
if dim > 2:
nvals*=NE[2]
grid = np.array(range(nvals), dtype=self.dtype).reshape(tuple(reversed(NE)))
return grid
def write(self, data, filename):
self.domain.writeBinaryGrid(data, filename, self.byteorder, self.datatype)
def read(self, filename, ftype):
first = self.first[:self.domain.getDim()]
multiplier = self.multiplier[:self.domain.getDim()]
reverse = self.reverse[:self.domain.getDim()]
numValues=adjust(self.Ndata, ftype)
return readBinaryGrid(filename, ftype(self.domain),
shape=self.shape, fill=self.fill, byteOrder=self.byteorder,
dataType=self.datatype, first=first, numValues=numValues,
multiplier=multiplier, reverse=reverse)
def numpy2Data2Numpy(self, ref, ftype, fcode):
filename = os.path.join(RIPLEY_WORKDIR, "_rgrid%dd%s"%(self.domain.getDim(),fcode))
filename = filename + self.dtype.replace('<','L').replace('>','B')
if mpiRank == 0:
ref.tofile(filename)
MPIBarrierWorld()
# step 2 - read
data = self.read(filename, ftype)
MPIBarrierWorld()
# step 3 - write
self.write(data, filename) # overwrite is ok
MPIBarrierWorld()
result = np.fromfile(filename, dtype=self.dtype).reshape(
tuple(reversed(adjust(self.NE,ftype))))
return result
def test_readGrid2D(self):
if self.multiplier[0] == 1:
self.NE = [self.NX*mpiSize-1, self.NZ*self.multiplier[1]]
else:
self.NE = [self.NX*mpiSize*self.multiplier[0]-1, self.NZ*self.multiplier[1]]
self.domain = Rectangle(self.NE[0], self.NE[1], d0=mpiSize, d1=1)
for ftype,fcode in self.fspaces:
self.Ndata = [self.NX*mpiSize-1, self.NZ]
if ftype==ContinuousFunction:
self.Ndata[1] = self.NZ-1
# step 1 - generate
ref = self.generateUniqueData(ftype)
# step 2 & 3
result = self.numpy2Data2Numpy(ref, ftype, fcode)
# apply transformations to be able to compare
if self.reverse[0]:
result = result[...,::-1]
if self.reverse[1]:
result = result[::-1,:]
for i in range(2):
ref = np.repeat(ref, self.multiplier[i], axis=1-i)
# if domain larger than data: add column(s)/row(s) with fill value
fill=np.array(self.fill, dtype=ref.dtype)
realNE = adjust(self.NE,ftype)
for d in range(2):
excess = realNE[d]-ref.shape[1-d]
if excess > 0:
shape = list(ref.shape)
shape[1-d] = excess
extra = fill * np.ones(shape)
if self.reverse[d]:
ref = np.append(extra, ref, axis=1-d)
else:
ref = np.append(ref, extra, axis=1-d)
# step 4 - compare
self.assertAlmostEqual(Lsup(ref-result), 0, delta=1e-9,
msg="Data doesn't match for "+str(ftype(self.domain)))
def test_readGrid3D(self):
if self.multiplier[0] == 1:
self.NE = [self.NX*mpiSize-1, self.NX*self.multiplier[1], self.NZ*self.multiplier[2]]
else:
self.NE = [self.NX*mpiSize*self.multiplier[0]-1,
self.NX*self.multiplier[1], self.NZ*self.multiplier[2]]
self.domain = Brick(self.NE[0], self.NE[1], self.NE[2], d0=mpiSize, d1=1, d2=1)
for ftype,fcode in self.fspaces:
self.Ndata = [self.NX*mpiSize-1, self.NX, self.NZ]
if ftype==ContinuousFunction:
self.Ndata[1] = self.NX-1
self.Ndata[2] = self.NZ-1
# step 1 - generate
ref = self.generateUniqueData(ftype)
# step 2 & 3
result = self.numpy2Data2Numpy(ref, ftype, fcode)
# apply transformations to be able to compare
if self.reverse[0]:
result = result[...,::-1]
if self.reverse[1]:
result = result[...,::-1,:]
if self.reverse[2]:
result = result[::-1,:,:]
for i in range(3):
ref = np.repeat(ref, self.multiplier[i], axis=2-i)
# if domain larger than data: add column(s)/row(s) with fill value
fill=np.array(self.fill, dtype=ref.dtype)
realNE = adjust(self.NE,ftype)
for d in range(3):
excess = realNE[d]-ref.shape[2-d]
if excess > 0:
shape = list(ref.shape)
shape[2-d] = excess
extra = fill * np.ones(shape)
if self.reverse[d]:
ref = np.append(extra, ref, axis=2-d)
else:
ref = np.append(ref, extra, axis=2-d)
# step 4 - compare
self.assertAlmostEqual(Lsup(ref-result), 0, delta=1e-9,
msg="Data doesn't match for "+str(ftype(self.domain)))
# The following block tests the reader for different byte orders and data
# types with domain-filling data (i.e. multiplier=1, reverse=0 and N=NE)
class Test_readBinaryGridRipley_LITTLE_FLOAT32(ReadBinaryGridTestBase):
def setUp(self):
self.byteorder = BYTEORDER_LITTLE_ENDIAN
self.datatype = DATATYPE_FLOAT32
self.dtype = "<f4"
class Test_readBinaryGridRipley_LITTLE_FLOAT64(ReadBinaryGridTestBase):
def setUp(self):
self.byteorder = BYTEORDER_LITTLE_ENDIAN
self.datatype = DATATYPE_FLOAT64
self.dtype = "<f8"
class Test_readBinaryGridRipley_LITTLE_INT32(ReadBinaryGridTestBase):
def setUp(self):
self.byteorder = BYTEORDER_LITTLE_ENDIAN
self.datatype = DATATYPE_INT32
self.dtype = "<i4"
class Test_readBinaryGridRipley_BIG_FLOAT32(ReadBinaryGridTestBase):
def setUp(self):
self.byteorder = BYTEORDER_BIG_ENDIAN
self.datatype = DATATYPE_FLOAT32
self.dtype = ">f4"
class Test_readBinaryGridRipley_BIG_FLOAT64(ReadBinaryGridTestBase):
def setUp(self):
self.byteorder = BYTEORDER_BIG_ENDIAN
self.datatype = DATATYPE_FLOAT64
self.dtype = ">f8"
class Test_readBinaryGridRipley_BIG_INT32(ReadBinaryGridTestBase):
def setUp(self):
self.byteorder = BYTEORDER_BIG_ENDIAN
self.datatype = DATATYPE_INT32
self.dtype = ">i4"
@unittest.skip("reverseX not supported yet")
class Test_readBinaryGridRipley_reverseX(ReadBinaryGridTestBase):
def setUp(self):
self.reverse = [1,0,0]
@unittest.skip("reverseY not supported yet")
class Test_readBinaryGridRipley_reverseY(ReadBinaryGridTestBase):
def setUp(self):
self.reverse = [0,1,0]
class Test_readBinaryGridRipley_reverseZ(ReadBinaryGridTestBase):
def setUp(self):
self.reverse = [0,0,1]
class Test_readBinaryGridRipley_multiplierX(ReadBinaryGridTestBase):
def setUp(self):
self.multiplier = [2,1,1]
class Test_readBinaryGridRipley_multiplierY(ReadBinaryGridTestBase):
def setUp(self):
self.multiplier = [1,2,1]
class Test_readBinaryGridRipley_multiplierZ(ReadBinaryGridTestBase):
def setUp(self):
self.multiplier = [1,1,2]
class Test_readBinaryGridRipley_multiplierXYZ(ReadBinaryGridTestBase):
def setUp(self):
self.multiplier = [2,3,4]
@unittest.skipIf(getMPISizeWorld() > 1,
"Skipping compressed binary grid tests due to element stretching")
class Test_readBinaryGridZippedRipley(unittest.TestCase):
# constants
byteorder = BYTEORDER_NATIVE
datatype = DATATYPE_FLOAT64
def read(self, filename, FS, expected, zipped = False):
first = [0 for i in expected]
reverse = [0 for i in expected]
scale = [1 for i in expected]
if not zipped:
return readBinaryGrid(filename, FS, (), 50000,
self.byteorder, self.datatype, first, expected, scale, reverse)
if not HAVE_UNZIP:
raise unittest.SkipTest("unzip library not available (boost_iostreams)")
return ripleycpp._readBinaryGridFromZipped(filename, FS, (), 50000,
self.byteorder, self.datatype, first, expected, scale, reverse)
def test_readCompressed2D(self):
NE = [9, 10]
domain = Rectangle(NE[0], NE[1], d1=0)
for filename, ftype in [("RectRedF%s.grid.gz", ReducedFunction),
("RectConF%s.grid.gz", ContinuousFunction)]:
FS = ftype(domain)
filename = os.path.join("ref_data", filename%mpiSize)
unzipped = self.read(filename[:-3], FS, adjust(NE, ftype))
zipped = self.read(filename, FS, adjust(NE, ftype), True)
self.assertEqual(Lsup(zipped - unzipped), 0, "Data objects don't match for "+str(FS))
def test_readCompressed3D(self):
NE = [9, 9, 10]
domain = Brick(NE[0], NE[1], NE[2], d1=0, d2=0)
for filename, ftype in [("BrickRedF%s.grid.gz", ReducedFunction),
("BrickConF%s.grid.gz", ContinuousFunction)]:
FS = ftype(domain)
filename = os.path.join("ref_data", filename%mpiSize)
unzipped = self.read(filename[:-3], FS, adjust(NE, ftype))
zipped = self.read(filename, FS, adjust(NE, ftype), True)
self.assertEqual(Lsup(zipped - unzipped), 0, "Data objects don't match for "+str(FS))
if __name__ == '__main__':
run_tests(__name__, exit_on_failure=True)
| []
| []
| [
"RIPLEY_WORKDIR"
]
| [] | ["RIPLEY_WORKDIR"] | python | 1 | 0 | |
cmd/junction/main.go | package main
import (
"log"
"net"
"net/http"
"os"
"github.com/codegangsta/cli"
"github.com/travis-ci/junction/database"
junctionhttp "github.com/travis-ci/junction/http"
"github.com/travis-ci/junction/junction"
)
func main() {
app := cli.NewApp()
app.Name = "junction"
app.Usage = "Start the Junction HTTP server"
app.Action = runJunction
app.Flags = []cli.Flag{
cli.StringFlag{
Name: "addr",
Usage: "TCP address to listen on",
Value: func() string {
v := ":" + os.Getenv("PORT")
if v == ":" {
// Bind to a random port
v = ":0"
}
return v
}(),
EnvVar: "JUNCTION_ADDR",
},
cli.StringSliceFlag{
Name: "worker-token",
Usage: "List of tokens to use for workers",
EnvVar: "JUNCTION_WORKER_TOKENS",
},
cli.StringSliceFlag{
Name: "admin-token",
Usage: "List of tokens to use for admins",
EnvVar: "JUNCTION_ADMIN_TOKENS",
},
cli.StringFlag{
Name: "database-url",
Usage: "URL to Postgres database to connect to",
EnvVar: "JUNCTION_DATABASE_URL,DATABASE_URL",
},
cli.IntFlag{
Name: "database-max-pool-size",
Usage: "The maximum number of open connection to keep for the Postgres database",
Value: 10,
EnvVar: "JUNCTION_DATABASE_MAX_POOL_SIZE",
},
}
app.Run(os.Args)
}
func runJunction(c *cli.Context) {
database, err := database.NewPostgres(&database.PostgresConfig{
URL: c.String("database-url"),
MaxOpenConns: c.Int("database-max-pool-size"),
})
if err != nil {
log.Fatalf("Error initializing database: %s", err)
}
coreConfig := &junction.CoreConfig{
Database: database,
WorkerTokens: c.StringSlice("worker-token"),
AdminTokens: c.StringSlice("admin-token"),
}
core, err := junction.NewCore(coreConfig)
if err != nil {
log.Fatalf("Error initializing core: %s", err)
}
server := &http.Server{
Handler: junctionhttp.Handler(core),
}
listener, err := net.Listen("tcp", c.String("addr"))
if err != nil {
log.Fatalf("Error listening on TCP: %s", err)
}
log.Printf("Listening on %s", listener.Addr().String())
err = server.Serve(listener)
if err != nil {
log.Fatalf("Error serving on HTTP: %s", err)
}
}
| [
"\"PORT\""
]
| []
| [
"PORT"
]
| [] | ["PORT"] | go | 1 | 0 | |
src/pkg/reg/adapter/tencentcr/adapter.go | package tencentcr
import (
"errors"
"fmt"
"net/http"
"net/url"
"os"
"path"
"strconv"
"strings"
"github.com/docker/distribution/registry/client/auth/challenge"
commonhttp "github.com/goharbor/harbor/src/common/http"
"github.com/goharbor/harbor/src/lib/log"
adp "github.com/goharbor/harbor/src/pkg/reg/adapter"
"github.com/goharbor/harbor/src/pkg/reg/adapter/native"
"github.com/goharbor/harbor/src/pkg/reg/model"
"github.com/goharbor/harbor/src/pkg/registry/auth/bearer"
"github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/common"
"github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/common/profile"
"github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/common/regions"
tcr "github.com/tencentcloud/tencentcloud-sdk-go/tencentcloud/tcr/v20190924"
)
var (
errInvalidTcrEndpoint error = errors.New("[tencent-tcr.newAdapter] Invalid TCR instance endpoint")
errPingTcrEndpointFailed error = errors.New("[tencent-tcr.newAdapter] Ping TCR instance endpoint failed")
)
func init() {
var envTcrQPSLimit, _ = strconv.Atoi(os.Getenv("TCR_QPS_LIMIT"))
if envTcrQPSLimit > 1 && envTcrQPSLimit < tcrQPSLimit {
tcrQPSLimit = envTcrQPSLimit
}
if err := adp.RegisterFactory(model.RegistryTypeTencentTcr, new(factory)); err != nil {
log.Errorf("failed to register factory for %s: %v", model.RegistryTypeTencentTcr, err)
return
}
log.Infof("the factory for adapter %s registered", model.RegistryTypeTencentTcr)
}
type factory struct{}
/**
* Implement Factory Interface
**/
var _ adp.Factory = &factory{}
// Create ...
func (f *factory) Create(r *model.Registry) (adp.Adapter, error) {
return newAdapter(r)
}
// AdapterPattern ...
func (f *factory) AdapterPattern() *model.AdapterPattern {
return getAdapterInfo()
}
func getAdapterInfo() *model.AdapterPattern {
return &model.AdapterPattern{}
}
type adapter struct {
*native.Adapter
registryID *string
regionName *string
tcrClient *tcr.Client
pageSize *int64
client *commonhttp.Client
registry *model.Registry
}
/**
* Implement Adapter Interface
**/
var _ adp.Adapter = &adapter{}
func newAdapter(registry *model.Registry) (a *adapter, err error) {
if !isSecretID(registry.Credential.AccessKey) {
err = errors.New("[tencent-tcr.newAdapter] Please use SecretId/SecretKey, NOT docker login Username/Password")
log.Debugf("[tencent-tcr.newAdapter] error=%v", err)
return
}
// Query TCR instance info via endpoint.
var registryURL *url.URL
registryURL, _ = url.Parse(registry.URL)
// only validate registryURL.Host in non-UT scenario
if os.Getenv("UTTEST") != "true" {
if strings.Index(registryURL.Host, ".tencentcloudcr.com") < 0 {
log.Errorf("[tencent-tcr.newAdapter] errInvalidTcrEndpoint=%v", err)
return nil, errInvalidTcrEndpoint
}
}
realm, service, err := ping(registry)
log.Debugf("[tencent-tcr.newAdapter] realm=%s, service=%s error=%v", realm, service, err)
if err != nil {
log.Errorf("[tencent-tcr.newAdapter] ping failed. error=%v", err)
return
}
// Create TCR API client
var tcrCredential = common.NewCredential(registry.Credential.AccessKey, registry.Credential.AccessSecret)
var cfp = profile.NewClientProfile()
var client *tcr.Client
// temp client used to get TCR instance info
client, err = tcr.NewClient(tcrCredential, regions.Guangzhou, cfp)
if err != nil {
return
}
var req = tcr.NewDescribeInstancesRequest()
req.AllRegion = common.BoolPtr(true)
req.Filters = []*tcr.Filter{
{
Name: common.StringPtr("RegistryName"),
Values: []*string{common.StringPtr(strings.ReplaceAll(registryURL.Host, ".tencentcloudcr.com", ""))},
},
}
var resp = tcr.NewDescribeInstancesResponse()
resp, err = client.DescribeInstances(req)
if err != nil {
log.Errorf("DescribeInstances error=%s", err.Error())
return
}
if *resp.Response.TotalCount == 0 {
err = fmt.Errorf("[tencent-tcr.newAdapter] Can not get TCR instance info. RequestId=%s", *resp.Response.RequestId)
return
}
var instanceInfo = resp.Response.Registries[0]
log.Debugf("[tencent-tcr.InstanceInfo] registry.URL=%s, host=%s, PublicDomain=%s, RegionName=%s, RegistryId=%s",
registry.URL, registryURL.Host, *instanceInfo.PublicDomain, *instanceInfo.RegionName, *instanceInfo.RegistryId)
// rebuild TCR SDK client
client = &tcr.Client{}
client.Init(*instanceInfo.RegionName).
WithCredential(tcrCredential).
WithProfile(cfp).
WithHttpTransport(newRateLimitedTransport(tcrQPSLimit, http.DefaultTransport))
if err != nil {
return
}
var credential = NewAuth(instanceInfo.RegistryId, client)
var transport = commonhttp.GetHTTPTransport(commonhttp.WithInsecure(registry.Insecure))
var authorizer = bearer.NewAuthorizer(realm, service, credential, transport)
return &adapter{
registry: registry,
registryID: instanceInfo.RegistryId,
regionName: instanceInfo.RegionName,
tcrClient: client,
pageSize: common.Int64Ptr(20),
client: commonhttp.NewClient(
&http.Client{
Transport: transport,
},
credential,
),
Adapter: native.NewAdapterWithAuthorizer(registry, authorizer),
}, nil
}
func ping(registry *model.Registry) (string, string, error) {
client := &http.Client{
Transport: commonhttp.GetHTTPTransport(commonhttp.WithInsecure(registry.Insecure)),
}
resp, err := client.Get(registry.URL + "/v2/")
log.Debugf("[tencent-tcr.ping] error=%v", err)
if err != nil {
return "", "", err
}
defer resp.Body.Close()
challenges := challenge.ResponseChallenges(resp)
for _, challenge := range challenges {
if challenge.Scheme == "bearer" {
return challenge.Parameters["realm"], challenge.Parameters["service"], nil
}
}
return "", "", fmt.Errorf("[tencent-tcr.ping] bearer auth scheme isn't supported: %v", challenges)
}
func (a *adapter) Info() (info *model.RegistryInfo, err error) {
info = &model.RegistryInfo{
Type: model.RegistryTypeTencentTcr,
SupportedResourceTypes: []string{
model.ResourceTypeImage,
model.ResourceTypeChart,
},
SupportedResourceFilters: []*model.FilterStyle{
{
Type: model.FilterTypeName,
Style: model.FilterStyleTypeText,
},
{
Type: model.FilterTypeTag,
Style: model.FilterStyleTypeText,
},
},
SupportedTriggers: []string{
model.TriggerTypeManual,
model.TriggerTypeScheduled,
},
}
return
}
func (a *adapter) PrepareForPush(resources []*model.Resource) (err error) {
log.Debugf("[tencent-tcr.PrepareForPush]")
for _, resource := range resources {
if resource == nil {
return errors.New("the resource cannot be null")
}
if resource.Metadata == nil {
return errors.New("[tencent-tcr.PrepareForPush] the metadata of resource cannot be null")
}
if resource.Metadata.Repository == nil {
return errors.New("[tencent-tcr.PrepareForPush] the namespace of resource cannot be null")
}
if len(resource.Metadata.Repository.Name) == 0 {
return errors.New("[tencent-tcr.PrepareForPush] the name of the namespace cannot be null")
}
var paths = strings.Split(resource.Metadata.Repository.Name, "/")
var namespace = paths[0]
var repository = path.Join(paths[1:]...)
log.Debugf("[tencent-tcr.PrepareForPush.createPrivateNamespace] namespace=%s", namespace)
err = a.createPrivateNamespace(namespace)
if err != nil {
return
}
log.Debugf("[tencent-tcr.PrepareForPush.createRepository] namespace=%s, repository=%s", namespace, repository)
err = a.createRepository(namespace, repository)
if err != nil {
return
}
return
}
return
}
| [
"\"TCR_QPS_LIMIT\"",
"\"UTTEST\""
]
| []
| [
"UTTEST",
"TCR_QPS_LIMIT"
]
| [] | ["UTTEST", "TCR_QPS_LIMIT"] | go | 2 | 0 | |
plc4go/internal/plc4go/spi/PlcDiscoverer.go | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package spi
import (
"github.com/apache/plc4x/plc4go/internal/plc4go/spi/options"
"github.com/apache/plc4x/plc4go/pkg/plc4go/model"
)
type PlcDiscoverer interface {
Discover(callback func(event model.PlcDiscoveryEvent), discoveryOptions ...options.WithDiscoveryOption) error
}
| []
| []
| []
| [] | [] | go | null | null | null |
src/main/java/dev/jbang/cli/Edit.java | package dev.jbang.cli;
import static dev.jbang.Settings.CP_SEPARATOR;
import static dev.jbang.cli.BaseBuildCommand.escapeOSArguments;
import static dev.jbang.util.Util.isWindows;
import static dev.jbang.util.Util.verboseMsg;
import static java.lang.System.out;
import java.io.File;
import java.io.IOException;
import java.nio.charset.Charset;
import java.nio.file.FileSystems;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.StandardWatchEventKinds;
import java.nio.file.WatchEvent;
import java.nio.file.WatchKey;
import java.nio.file.WatchService;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.Optional;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import dev.jbang.Cache;
import dev.jbang.Settings;
import dev.jbang.dependencies.DependencyUtil;
import dev.jbang.dependencies.JitPackUtil;
import dev.jbang.dependencies.MavenRepo;
import dev.jbang.net.EditorManager;
import dev.jbang.source.RefTarget;
import dev.jbang.source.RunContext;
import dev.jbang.source.ScriptSource;
import dev.jbang.source.Source;
import dev.jbang.util.ConsoleInput;
import dev.jbang.util.TemplateEngine;
import dev.jbang.util.Util;
import io.quarkus.qute.Template;
import picocli.CommandLine;
@CommandLine.Command(name = "edit", description = "Setup a temporary project to edit script in an IDE.")
public class Edit extends BaseScriptDepsCommand {
@CommandLine.Option(names = {
"--live" }, description = "Setup temporary project, regenerate project on dependency changes.")
boolean live;
@CommandLine.Option(names = {
"--open" }, description = "Opens editor/IDE on the temporary project.", defaultValue = "${JBANG_EDITOR:-}", preprocessor = StrictParameterPreprocessor.class)
Optional<String> editor;
@CommandLine.Option(names = { "--no-open" })
boolean noOpen;
@Override
public Integer doCall() throws IOException {
if (insecure) {
enableInsecure();
}
RunContext ctx = RunContext.create(null, null,
dependencyInfoMixin.getProperties(),
dependencyInfoMixin.getDependencies(),
dependencyInfoMixin.getClasspaths(),
forcejsh);
Source src = Source.forResource(scriptOrFile, ctx);
if (!(src instanceof ScriptSource)) {
throw new ExitException(EXIT_INVALID_INPUT, "You can only edit source files");
}
ScriptSource ssrc = (ScriptSource) src;
File project = createProjectForEdit(ssrc, ctx, false);
// err.println(project.getAbsolutePath());
if (!noOpen && editor.isPresent()) {
if (editor.get().isEmpty()) {
askAndInstallEditor();
}
if ("gitpod".equals(editor.get()) && System.getenv("GITPOD_WORKSPACE_URL") != null) {
info("Open this url to edit the project in your gitpod session:\n\n"
+ System.getenv("GITPOD_WORKSPACE_URL") + "#" + project.getAbsolutePath() + "\n\n");
} else {
List<String> optionList = new ArrayList<>();
optionList.add(editor.get());
optionList.add(project.getAbsolutePath());
String[] cmd;
final String editorCommand = escapeOSArguments(optionList).stream().collect(Collectors.joining(" "));
if (isWindows()) {
cmd = new String[] { "cmd", "/c", editorCommand };
} else {
cmd = new String[] { "sh", "-c", editorCommand };
}
info("Running `" + String.join(" ", cmd) + "`");
new ProcessBuilder(cmd).start();
}
}
if (!live) {
out.println(project.getAbsolutePath()); // quit(project.getAbsolutePath());
} else {
try (final WatchService watchService = FileSystems.getDefault().newWatchService()) {
File orginalFile = src.getResourceRef().getFile();
if (!orginalFile.exists()) {
throw new ExitException(2, "Cannot live edit " + src.getResourceRef().getOriginalResource());
}
Path watched = orginalFile.getAbsoluteFile().getParentFile().toPath();
watched.register(watchService,
StandardWatchEventKinds.ENTRY_MODIFY);
info("Watching for changes in " + watched);
while (true) {
final WatchKey wk = watchService.take();
for (WatchEvent<?> event : wk.pollEvents()) {
// we only register "ENTRY_MODIFY" so the context is always a Path.
// but relative to the watched directory
final Path changed = watched.resolve((Path) event.context());
verboseMsg("Changed file: " + changed.toString());
if (Files.isSameFile(orginalFile.toPath(), changed)) {
try {
// TODO only regenerate when dependencies changes.
info("Regenerating project.");
ctx = RunContext.empty();
src = Source.forResource(scriptOrFile, ctx);
createProjectForEdit((ScriptSource) src, ctx, true);
} catch (RuntimeException ee) {
warn("Error when re-generating project. Ignoring it, but state might be undefined: "
+ ee.getMessage());
}
}
}
// reset the key
boolean valid = wk.reset();
if (!valid) {
warn("edit-live file watch key no longer valid!");
}
}
} catch (InterruptedException e) {
warn("edit-live interrupted");
}
}
return EXIT_OK;
}
private void askAndInstallEditor() throws IOException {
File editorBinPath = EditorManager.getVSCodiumBinPath().toFile();
File dataPath = EditorManager.getVSCodiumDataPath().toFile();
Path editorPath = EditorManager.getVSCodiumPath();
editor = Optional.of(editorBinPath.getAbsolutePath());
if (!editorBinPath.exists()) {
String question = "You requested to open default editor but no default editor configured." +
"\n" +
"jbang can download and configure a visual studio code with Java support to use\n" +
"See https://vscodium.com for details\n" +
"\n" +
"Do you want jbang to download VSCodium for you into " + editorPath + " ? \n\n" +
"0) Yes, please." +
"\n\n" +
"Any other response will result in exit.\n";
ConsoleInput con = new ConsoleInput(
1,
10,
TimeUnit.SECONDS);
Util.infoMsg(question);
Util.infoMsg("Type in your choice (0) and hit enter. Times out after 10 seconds.");
String input = con.readLine();
boolean abort = true;
try {
int result = Integer.parseInt(input);
if (result == 0) {
abort = false;
}
} catch (NumberFormatException ef) {
Util.errorMsg("Could not parse answer as a number. Aborting");
}
if (abort)
throw new ExitException(10,
"No default editor configured and automatic download not accepted.\n Please try again accepting the download or use an explicit editor, i.e. `jbang edit --open=eclipse xyz.java`");
editorPath = EditorManager.downloadAndInstallEditor();
if (!dataPath.exists()) {
verboseMsg("Making portable data path " + dataPath.toString());
dataPath.mkdirs();
}
verboseMsg("Installing Java extensions...");
ProcessBuilder pb = new ProcessBuilder(editor.get(),
"--install-extension", "redhat.java",
"--install-extension", "vscjava.vscode-java-debug",
"--install-extension", "vscjava.vscode-java-test",
"--install-extension", "vscjava.vscode-java-dependency");
pb.inheritIO();
Process process = pb.start();
try {
int exit = process.waitFor();
if (exit > 0) {
throw new ExitException(EXIT_INTERNAL_ERROR,
"Could not install and setup extensions into vscodium. Aborting.");
}
} catch (InterruptedException e) {
e.printStackTrace();
}
}
}
/** Create Project to use for editing **/
File createProjectForEdit(ScriptSource src, RunContext ctx, boolean reload) throws IOException {
File originalFile = src.getResourceRef().getFile();
List<String> dependencies = ctx.collectAllDependenciesFor(src);
String cp = ctx.resolveClassPath(src);
List<String> resolvedDependencies = Arrays.asList(cp.split(CP_SEPARATOR));
File baseDir = Settings.getCacheDir(Cache.CacheClass.projects).toFile();
String name = originalFile.getName();
name = Util.unkebabify(name);
File tmpProjectDir = new File(baseDir, name + "_jbang_" +
Util.getStableID(originalFile.getAbsolutePath()));
tmpProjectDir.mkdirs();
tmpProjectDir = new File(tmpProjectDir, stripPrefix(name));
tmpProjectDir.mkdirs();
File srcDir = new File(tmpProjectDir, "src");
srcDir.mkdir();
Path srcFile = srcDir.toPath().resolve(name);
Util.createLink(srcFile, originalFile.toPath());
for (ScriptSource source : src.getAllSources()) {
File sfile = null;
if (source.getJavaPackage().isPresent()) {
File packageDir = new File(srcDir, source.getJavaPackage().get().replace(".", File.separator));
packageDir.mkdirs();
sfile = new File(packageDir, source.getResourceRef().getFile().getName());
} else {
sfile = new File(srcDir, source.getResourceRef().getFile().getName());
}
Path destFile = source.getResourceRef().getFile().toPath().toAbsolutePath();
Util.createLink(sfile.toPath(), destFile);
}
for (RefTarget ref : src.getAllFiles()) {
File target = ref.to(srcDir.toPath()).toFile();
target.getParentFile().mkdirs();
Util.createLink(target.toPath(), ref.getSource().getFile().toPath().toAbsolutePath());
}
// create build gradle
Optional<String> packageName = Util.getSourcePackage(
new String(Files.readAllBytes(srcFile), Charset.defaultCharset()));
String baseName = Util.getBaseName(name);
String fullClassName;
if (packageName.isPresent()) {
fullClassName = packageName.get() + "." + baseName;
} else {
fullClassName = baseName;
}
String templateName = "build.qute.gradle";
Path destination = new File(tmpProjectDir, "build.gradle").toPath();
TemplateEngine engine = TemplateEngine.instance();
// both collectDependencies and repositories are manipulated by
// resolveDependencies
List<MavenRepo> repositories = src.getAllRepositories();
if (repositories.isEmpty()) {
repositories.add(DependencyUtil.toMavenRepo("mavencentral"));
}
// Turn any URL dependencies into regular GAV coordinates
List<String> depIds = dependencies
.stream()
.map(JitPackUtil::ensureGAV)
.collect(Collectors.toList());
// And if we encountered URLs let's make sure the JitPack repo is available
if (!depIds.equals(dependencies)
&& !repositories.stream().anyMatch(r -> DependencyUtil.REPO_JITPACK.equals(r.getUrl()))) {
repositories.add(DependencyUtil.toMavenRepo(DependencyUtil.ALIAS_JITPACK));
}
renderTemplate(engine, depIds, fullClassName, baseName, resolvedDependencies, repositories,
templateName,
ctx.getArguments(),
destination);
// setup eclipse
templateName = ".qute.classpath";
destination = new File(tmpProjectDir, ".classpath").toPath();
renderTemplate(engine, dependencies, fullClassName, baseName, resolvedDependencies, repositories,
templateName,
ctx.getArguments(),
destination);
templateName = ".qute.project";
destination = new File(tmpProjectDir, ".project").toPath();
renderTemplate(engine, dependencies, fullClassName, baseName, resolvedDependencies, repositories,
templateName,
ctx.getArguments(),
destination);
templateName = "main.qute.launch";
destination = new File(tmpProjectDir, ".eclipse/" + baseName + ".launch").toPath();
destination.toFile().getParentFile().mkdirs();
renderTemplate(engine, dependencies, fullClassName, baseName, resolvedDependencies, repositories,
templateName,
ctx.getArguments(),
destination);
templateName = "main-port-4004.qute.launch";
destination = new File(tmpProjectDir, ".eclipse/" + baseName + "-port-4004.launch").toPath();
renderTemplate(engine, dependencies, fullClassName, baseName, resolvedDependencies, repositories,
templateName,
ctx.getArguments(),
destination);
// setup vscode
templateName = "launch.qute.json";
destination = new File(tmpProjectDir, ".vscode/launch.json").toPath();
if (isNeeded(reload, destination)) {
destination.toFile().getParentFile().mkdirs();
renderTemplate(engine, dependencies, fullClassName, baseName, resolvedDependencies, repositories,
templateName,
ctx.getArguments(),
destination);
}
// setup vscode
templateName = "README.qute.md";
destination = new File(tmpProjectDir, "README.md").toPath();
if (isNeeded(reload, destination)) {
destination.toFile().getParentFile().mkdirs();
renderTemplate(engine, dependencies, fullClassName, baseName, resolvedDependencies, repositories,
templateName,
ctx.getArguments(),
destination);
}
templateName = "settings.qute.json";
destination = new File(tmpProjectDir, ".vscode/settings.json").toPath();
if (isNeeded(reload, destination)) {
destination.toFile().getParentFile().mkdirs();
renderTemplate(engine, dependencies, fullClassName, baseName, resolvedDependencies, repositories,
templateName,
ctx.getArguments(),
destination);
}
// setup intellij - disabled for now as idea was not picking these up directly
/*
* templateName = "idea-port-4004.qute.xml"; destination = new
* File(tmpProjectDir, ".idea/runConfigurations/" + baseName +
* "-port-4004.xml").toPath(); destination.toFile().getParentFile().mkdirs();
* renderTemplate(engine, collectDependencies, baseName, resolvedDependencies,
* templateName, script.getArguments(), destination);
*
* templateName = "idea.qute.xml"; destination = new File(tmpProjectDir,
* ".idea/runConfigurations/" + baseName + ".xml").toPath();
* destination.toFile().getParentFile().mkdirs(); renderTemplate(engine,
* collectDependencies, baseName, resolvedDependencies, templateName,
* script.getArguments(), destination);
*/
return tmpProjectDir;
}
private boolean isNeeded(boolean reload, Path file) {
return !file.toFile().exists() && !reload;
}
private void renderTemplate(TemplateEngine engine, List<String> collectDependencies, String fullclassName,
String baseName,
List<String> resolvedDependencies, List<MavenRepo> repositories, String templateName,
List<String> userParams, Path destination)
throws IOException {
Template template = engine.getTemplate(templateName);
if (template == null)
throw new ExitException(1, "Could not locate template named: '" + templateName + "'");
String result = template
.data("repositories",
repositories.stream()
.map(MavenRepo::getUrl)
.filter(s -> !"".equals(s)))
.data("dependencies", collectDependencies)
.data("gradledependencies", gradleify(collectDependencies))
.data("baseName", baseName)
.data("fullClassName", fullclassName)
.data("classpath",
resolvedDependencies.stream()
.filter(t -> !t.isEmpty())
.collect(Collectors.toList()))
.data("userParams", String.join(" ", userParams))
.data("cwd", System.getProperty("user.dir"))
.render();
Util.writeString(destination, result);
}
private List<String> gradleify(List<String> collectDependencies) {
return collectDependencies.stream().map(item -> {
if (item.endsWith("@pom")) {
return "implementation platform ('" + item.substring(0, item.lastIndexOf("@pom")) + "')";
} else {
return "implementation '" + item + "'";
}
}).collect(Collectors.toList());
}
static String stripPrefix(String fileName) {
if (fileName.indexOf(".") > 0) {
return fileName.substring(0, fileName.lastIndexOf("."));
} else {
return fileName;
}
}
}
| [
"\"GITPOD_WORKSPACE_URL\"",
"\"GITPOD_WORKSPACE_URL\""
]
| []
| [
"GITPOD_WORKSPACE_URL"
]
| [] | ["GITPOD_WORKSPACE_URL"] | java | 1 | 0 | |
lime/scripts/qc_datasets/exam_smirnoff_fit.py | # =============================================================================
# imports
# =============================================================================
import os
import sys
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
import tensorflow as tf
tf.compat.v1.logging.set_verbosity(tf.compat.v1.logging.ERROR)
tf.autograph.set_verbosity(3)
from sklearn import metrics
import gin
import lime
import pandas as pd
import numpy as np
import qcportal as ptl
client = ptl.FractalClient()
from openforcefield.topology import Molecule
from openforcefield.topology import Topology
from openforcefield.typing.engines.smirnoff import ForceField
FF = ForceField('test_forcefields/smirnoff99Frosst.offxml')
import cmiles
from simtk import openmm
import random
HARTREE_TO_KJ_PER_MOL = 2625.5
BOHR_TO_NM = 0.0529177
HARTREE_PER_BOHR_TO_KJ_PER_MOL_PER_NM = 49614.77
TRANSLATION = {
6: 0,
7: 1,
8: 2,
16: 3,
15: 4,
9: 5,
17: 6,
35: 7,
53: 8,
1: 9
}
TRANSLATION_TO_ELEMENT = {
0: 6,
1: 7,
2: 8,
3: 16,
4: 9,
5: 17,
6: 53,
7: 1}
ds_qc = client.get_collection("OptimizationDataset", "OpenFF Full Optimization Benchmark 1")
ds_name = tf.data.Dataset.from_tensor_slices(list(ds_qc.data.records))
def data_generator():
for record_name in random.sample(list(ds_qc.data.records), 10):
try:
print(record_name, flush=True)
r = ds_qc.get_record(record_name, specification='default')
if r is not None:
traj = r.get_trajectory()
if traj is not None:
for snapshot in traj:
energy = tf.convert_to_tensor(
snapshot.properties.scf_total_energy * HARTREE_TO_KJ_PER_MOL,
dtype=tf.float32)
mol = snapshot.get_molecule()
# mol = snapshot.get_molecule().dict(encoding='json')
atoms = tf.convert_to_tensor(
[TRANSLATION[atomic_number] for atomic_number in mol.atomic_numbers],
dtype=tf.int64)
zeros = tf.zeros(
(
tf.shape(atoms, tf.int64)[0],
tf.shape(atoms, tf.int64)[0]
),
dtype=tf.float32)
adjacency_map = tf.tensor_scatter_nd_update(
tf.zeros(
(
tf.shape(atoms, tf.int64)[0],
tf.shape(atoms, tf.int64)[0]
),
dtype=tf.float32),
tf.convert_to_tensor(
np.array(mol.connectivity)[:, :2],
dtype=tf.int64),
tf.convert_to_tensor(
np.array(mol.connectivity)[:, 2],
dtype=tf.float32))
xyz = tf.convert_to_tensor(
mol.geometry * BOHR_TO_NM,
dtype=tf.float32)
jacobian = tf.convert_to_tensor(
snapshot.return_result\
* HARTREE_PER_BOHR_TO_KJ_PER_MOL_PER_NM,
dtype=tf.float32)
mol = cmiles.utils.load_molecule(mol.dict(encoding='json'))
top = Topology.from_molecules(Molecule.from_openeye(mol))
sys = FF.create_openmm_system(top)
angles = tf.convert_to_tensor(
[[x[0], x[1], x[2],
x[3]._value,
x[4]._value] for x in\
[sys.getForces(
)[0].getAngleParameters(idx)\
for idx in range(sys.getForces(
)[0].getNumAngles())]],
dtype=tf.float32)
bonds = tf.convert_to_tensor([[x[0], x[1],
x[2]._value,
x[3]._value] for x in\
[sys.getForces(
)[1].getBondParameters(idx)\
for idx in range(sys.getForces(
)[1].getNumBonds())]],
dtype=tf.float32)
torsions = tf.convert_to_tensor([
[x[0], x[1], x[2], x[3], x[4], x[5]._value, x[6]._value] for x in\
[sys.getForces(
)[3].getTorsionParameters(idx)\
for idx in range(sys.getForces(
)[3].getNumTorsions())]],
dtype=tf.float32)
particle_params = tf.convert_to_tensor([[
x[0]._value,
x[1]._value,
x[2]._value
] for x in\
[sys.getForces(
)[2].getParticleParameters(idx)\
for idx in range(sys.getForces(
)[2].getNumParticles())]])
yield(
atoms,
adjacency_map,
energy,
xyz,
jacobian,
angles,
bonds,
torsions,
particle_params,
sys)
except:
pass
# @tf.function
def params_to_potential(
q, sigma, epsilon,
e_l, e_k,
a_l, a_k,
t_l, t_k,
bond_idxs, angle_idxs, torsion_idxs,
coordinates,
atom_in_mol=tf.constant(False),
bond_in_mol=tf.constant(False),
attr_in_mol=tf.constant(False)):
n_atoms = tf.shape(q, tf.int64)[0]
n_angles = tf.shape(angle_idxs, tf.int64)[0]
n_torsions = tf.shape(torsion_idxs, tf.int64)[0]
n_bonds = tf.shape(bond_idxs, tf.int64)[0]
if tf.logical_not(tf.reduce_any(atom_in_mol)):
atom_in_mol = tf.tile(
[[True]],
[n_atoms, 1])
if tf.logical_not(tf.reduce_any(bond_in_mol)):
bond_in_mol = tf.tile(
[[True]],
[n_bonds, 1])
if tf.logical_not(tf.reduce_any(attr_in_mol)):
attr_in_mol = tf.constant([[True]])
per_mol_mask = tf.stop_gradient(tf.matmul(
tf.where(
atom_in_mol,
tf.ones_like(atom_in_mol, dtype=tf.float32),
tf.zeros_like(atom_in_mol, dtype=tf.float32),
name='per_mol_mask_0'),
tf.transpose(
tf.where(
atom_in_mol,
tf.ones_like(atom_in_mol, dtype=tf.float32),
tf.zeros_like(atom_in_mol, dtype=tf.float32),
name='per_mol_mask_1'))))
distance_matrix = gin.deterministic.md.get_distance_matrix(
coordinates)
bond_distances = tf.gather_nd(
distance_matrix,
bond_idxs)
angle_angles = gin.deterministic.md.get_angles_cos(
coordinates,
angle_idxs)
torsion_dihedrals = gin.deterministic.md.get_dihedrals_cos(
coordinates,
torsion_idxs)
# (n_atoms, n_atoms)
q_pair = tf.multiply(
q,
tf.transpose(
q))
# (n_atoms, n_atoms)
sigma_pair = tf.math.multiply(
tf.constant(0.5, dtype=tf.float32),
tf.math.add(
sigma,
tf.transpose(sigma)))
# (n_atoms, n_atoms)
epsilon_pair = tf.math.sqrt(
tf.math.multiply(
epsilon,
tf.transpose(epsilon)))
u_bond = 0.5 * tf.math.multiply(
e_k,
tf.math.pow(
tf.math.subtract(
bond_distances,
e_l),
tf.constant(2, dtype=tf.float32)))
u_angle = 0.5 * tf.math.multiply(
a_k,
tf.math.pow(
tf.math.subtract(
tf.math.acos(angle_angles),
a_l),
tf.constant(2, dtype=tf.float32)))
u_dihedral = tf.math.multiply(
t_k,
tf.math.pow(
tf.math.subtract(
torsion_dihedrals,
t_l),
tf.constant(2, dtype=tf.float32)))
# (n_angles, n_atoms)
angle_is_connected_to_atoms = tf.reduce_any(
[
tf.equal(
tf.tile(
tf.expand_dims(
tf.range(n_atoms),
0),
[n_angles, 1]),
tf.tile(
tf.expand_dims(
angle_idxs[:, 0],
1),
[1, n_atoms])),
tf.equal(
tf.tile(
tf.expand_dims(
tf.range(n_atoms),
0),
[n_angles, 1]),
tf.tile(
tf.expand_dims(
angle_idxs[:, 1],
1),
[1, n_atoms])),
tf.equal(
tf.tile(
tf.expand_dims(
tf.range(n_atoms),
0),
[n_angles, 1]),
tf.tile(
tf.expand_dims(
angle_idxs[:, 2],
1),
[1, n_atoms]))
],
axis=0)
# (n_torsions, n_atoms)
torsion_is_connected_to_atoms = tf.reduce_any(
[
tf.equal(
tf.tile(
tf.expand_dims(
tf.range(n_atoms),
0),
[n_torsions, 1]),
tf.tile(
tf.expand_dims(
torsion_idxs[:, 0],
1),
[1, n_atoms])),
tf.equal(
tf.tile(
tf.expand_dims(
tf.range(n_atoms),
0),
[n_torsions, 1]),
tf.tile(
tf.expand_dims(
torsion_idxs[:, 1],
1),
[1, n_atoms])),
tf.equal(
tf.tile(
tf.expand_dims(
tf.range(n_atoms),
0),
[n_torsions, 1]),
tf.tile(
tf.expand_dims(
torsion_idxs[:, 2],
1),
[1, n_atoms])),
tf.equal(
tf.tile(
tf.expand_dims(
tf.range(n_atoms),
0),
[n_torsions, 1]),
tf.tile(
tf.expand_dims(
torsion_idxs[:, 3],
1),
[1, n_atoms]))
],
axis=0)
angle_in_mol = tf.greater(
tf.matmul(
tf.where(
angle_is_connected_to_atoms,
tf.ones_like(
angle_is_connected_to_atoms,
tf.int64),
tf.zeros_like(
angle_is_connected_to_atoms,
tf.int64)),
tf.where(
atom_in_mol,
tf.ones_like(
atom_in_mol,
tf.int64),
tf.zeros_like(
atom_in_mol,
tf.int64))),
tf.constant(0, dtype=tf.int64))
torsion_in_mol = tf.greater(
tf.matmul(
tf.where(
torsion_is_connected_to_atoms,
tf.ones_like(
torsion_is_connected_to_atoms,
tf.int64),
tf.zeros_like(
torsion_is_connected_to_atoms,
tf.int64)),
tf.where(
atom_in_mol,
tf.ones_like(
atom_in_mol,
tf.int64),
tf.zeros_like(
atom_in_mol,
tf.int64))),
tf.constant(0, dtype=tf.int64))
u_pair_mask = tf.tensor_scatter_nd_update(
per_mol_mask,
bond_idxs,
tf.zeros(
shape=(
tf.shape(bond_idxs, tf.int32)[0]),
dtype=tf.float32))
u_pair_mask = tf.tensor_scatter_nd_update(
u_pair_mask,
tf.stack(
[
angle_idxs[:, 0],
angle_idxs[:, 2]
],
axis=1),
tf.zeros(
shape=(
tf.shape(angle_idxs, tf.int32)[0]),
dtype=tf.float32))
u_pair_mask = tf.linalg.set_diag(
u_pair_mask,
tf.zeros(
shape=tf.shape(u_pair_mask)[0],
dtype=tf.float32))
u_pair_mask = tf.linalg.band_part(
u_pair_mask,
0, -1)
_distance_matrix = tf.where(
tf.greater(
u_pair_mask,
tf.constant(0, dtype=tf.float32)),
distance_matrix,
tf.ones_like(distance_matrix))
_distance_matrix_inverse = tf.multiply(
u_pair_mask,
tf.pow(
tf.math.add(
_distance_matrix,
tf.constant(1e-5, dtype=tf.float32)),
tf.constant(-1, dtype=tf.float32)))
sigma_over_r = tf.multiply(
sigma_pair,
_distance_matrix_inverse)
u_coulomb = tf.multiply(
_distance_matrix_inverse,
tf.multiply(
138.93 * q_pair,
tf.tensor_scatter_nd_update(
tf.ones_like(q_pair),
tf.stack(
[
torsion_idxs[:, 0],
torsion_idxs[:, 3]
],
axis=1),
tf.constant(
0.833,
shape=(
tf.shape(torsion_idxs)[0],
),
dtype=tf.float32))))
u_lj = tf.multiply(
tf.where(
tf.less(
_distance_matrix,
0.1),
tf.zeros_like(epsilon_pair),
tf.multiply(
epsilon_pair,
tf.tensor_scatter_nd_update(
tf.ones_like(epsilon_pair),
tf.stack(
[
torsion_idxs[:, 0],
torsion_idxs[:, 3]
],
axis=1),
tf.constant(
0.5,
shape=(
tf.shape(torsion_idxs)[0],
),
dtype=tf.float32)))),
tf.math.subtract(
tf.pow(
sigma_over_r,
tf.constant(12, dtype=tf.float32)),
tf.pow(
sigma_over_r,
tf.constant(6, dtype=tf.float32))))
# print(tf.reduce_sum(u_coulomb))
u_pair = u_coulomb + u_lj
u_bond_tot = tf.matmul(
tf.transpose(
tf.where(
bond_in_mol,
tf.ones_like(bond_in_mol, dtype=tf.float32),
tf.zeros_like(bond_in_mol, dtype=tf.float32))),
tf.expand_dims(
u_bond,
axis=1))
u_angle_tot = tf.matmul(
tf.transpose(
tf.where(
angle_in_mol,
tf.ones_like(angle_in_mol, dtype=tf.float32),
tf.zeros_like(angle_in_mol, dtype=tf.float32))),
tf.expand_dims(
u_angle,
axis=1))
u_dihedral_tot = tf.matmul(
tf.transpose(
tf.where(
torsion_in_mol,
tf.ones_like(torsion_in_mol, dtype=tf.float32),
tf.zeros_like(torsion_in_mol, dtype=tf.float32))),
tf.expand_dims(
u_dihedral,
axis=1))
u_pair_tot = tf.matmul(
tf.transpose(
tf.where(
atom_in_mol,
tf.ones_like(atom_in_mol, dtype=tf.float32),
tf.zeros_like(atom_in_mol, dtype=tf.float32))),
tf.reduce_sum(
u_pair,
axis=1,
keepdims=True))
u_tot = tf.squeeze(
u_pair_tot + u_bond_tot + u_angle_tot + u_dihedral_tot)
return u_tot
def data_loader(idx):
atoms_path = 'data/atoms/' + str(idx.numpy()) + '.npy'
adjacency_map_path = 'data/adjacency_map/' + str(idx.numpy()) + '.npy'
energy_path = 'data/energy/' + str(idx.numpy()) + '.npy'
atoms = tf.convert_to_tensor(
np.load(atoms_path))
adjacency_map = tf.convert_to_tensor(
np.load(adjacency_map_path))
energy = tf.convert_to_tensor(
np.load(energy_path))
return atoms, adjacency_map, energy
traj = tf.ones(
shape=(1, 6),
dtype=tf.float32)
for atoms, adjacency_map, energy, xyz, jacobian, angles, bonds, torsions,\
particle_params, sys\
in data_generator():
'''
q, sigma, epsilon = tf.split(particle_params, 3, 1)
e_l = bonds[:, 2]
e_k = bonds[:, 3]
bond_idxs = tf.cast(bonds[:, :2], tf.int64)
a_l = angles[:, 3]
a_k = angles[:, 4]
angle_idxs = tf.cast(angles[:, :3], tf.int64)
# xyz = tf.Variable(xyz * BOHR_TO_ANGSTROM)
# jacobian = jacobian * HARTREE_PER_BOHR_TO_KCAL_PER_MOL_PER_ANGSTROM
xyz = tf.Variable(xyz)
with tf.GradientTape() as tape:
u = -params_to_potential(
q,
sigma,
epsilon,
e_l, e_k,
a_l, a_k,
tf.constant([0.0], dtype=tf.float32),
tf.constant([0.0], dtype=tf.float32),
bond_idxs, angle_idxs, tf.constant([[0, 0, 0, 0]], dtype=tf.int64),
xyz)
jacobian_hat = tape.gradient(u, xyz)
'''
for idx in range(sys.getNumForces()):
force = sys.getForce(idx)
force.setForceGroup(idx)
context = openmm.Context(sys, openmm.VerletIntegrator(0.001))
context.setPositions(xyz * 1.0)
force = sys.getForce(2)
force.setNonbondedMethod(openmm.NonbondedForce.NoCutoff)
force.updateParametersInContext(context)
# print(context.getState(getEnergy=True, groups=1<<2).getPotentialEnergy())
traj = tf.concat(
[
traj,
tf.concat(
[
context.getState(
getVelocities=True,
getForces=True).getForces(asNumpy=True)._value,
jacobian
],
axis=1)
],
axis=0)
np.save('traj', traj[1:].numpy())
| []
| []
| [
"TF_CPP_MIN_LOG_LEVEL"
]
| [] | ["TF_CPP_MIN_LOG_LEVEL"] | python | 1 | 0 | |
project/__init__.py | from flask import Flask
from flask_sqlalchemy import SQLAlchemy
from flask_bcrypt import Bcrypt
from flask_login import LoginManager
import os
import datetime
app = Flask(__name__)
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
login_manager = LoginManager()
login_manager.init_app(app)
app.config.from_object(os.environ['APP_SETTINGS'])
bcryptObj = Bcrypt(app)
localSystem = None
db = SQLAlchemy(app)
from project.users.views import users_blueprint
from project.home.views import home_blueprint
from project.schedule.views import schedule_blueprint
from project.movie.views import movie_blueprint
from project.studio.views import studio_blueprint
from project.admin.views import admin_blueprint
from project.forum.views import forum_blueprint
app.register_blueprint(users_blueprint)
app.register_blueprint(home_blueprint)
app.register_blueprint(schedule_blueprint)
app.register_blueprint(movie_blueprint)
app.register_blueprint(studio_blueprint)
app.register_blueprint(admin_blueprint)
app.register_blueprint(forum_blueprint)
from project.models import User
login_manager.login_view = "users.login"
@login_manager.user_loader
def load_user(user_id):
try:
user = User.query.filter(User.id == int(user_id)).first()
except:
user = None
return user | []
| []
| [
"APP_SETTINGS"
]
| [] | ["APP_SETTINGS"] | python | 1 | 0 | |
cogs/roles.py | import asyncio
import discord
import os
from discord.ext import commands
class Roles(commands.Cog):
def __init__(self, bot):
self.bot = bot
@commands.Cog.listener()
async def on_raw_reaction_add(self, payload):
guild = self.bot.get_guild(os.getenv("SERVER_ID"))
welcomeChannel = discord.utils.get(guild.channels, name='welcome')
if payload.channel_id != welcomeChannel.id:
return
zoneRole = discord.utils.get(guild.roles, name='Zone Wars')
wagerRole = discord.utils.get(guild.roles, name='Wagers')
turtleRole = discord.utils.get(guild.roles, name='Turtle Wars')
if str(payload.emoji) == '<:Wood:576383685042110475>':
user = guild.get_member(payload.user_id)
await user.add_roles(zoneRole)
elif str(payload.emoji) == '<:Stone:576383787424808961>':
user = guild.get_member(payload.user_id)
await user.add_roles(wagerRole)
elif str(payload.emoji) == '<:Metal:576383798292250651>':
user = guild.get_member(payload.user_id)
await user.add_roles(turtleRole)
@commands.Cog.listener()
async def on_raw_reaction_remove(self, payload):
guild = self.bot.get_guild(os.getenv("SERVER_ID"))
welcomeChannel = discord.utils.get(guild.channels, name='welcome')
if payload.channel_id != welcomeChannel.id:
return
zoneRole = discord.utils.get(guild.roles, name='Zone Wars')
wagerRole = discord.utils.get(guild.roles, name='Wagers')
turtleRole = discord.utils.get(guild.roles, name='Turtle Wars')
if str(payload.emoji) == '<:Wood:576383685042110475>':
user = guild.get_member(payload.user_id)
await user.remove_roles(zoneRole)
elif str(payload.emoji) == '<:Stone:576383787424808961>':
user = guild.get_member(payload.user_id)
await user.remove_roles(wagerRole)
elif str(payload.emoji) == '<:Metal:576383798292250651>':
user = guild.get_member(payload.user_id)
await user.remove_roles(turtleRole)
@commands.command()
@commands.has_any_role('Server Moderator')
async def reaction(self, ctx):
zoneRole = discord.utils.get(ctx.guild.roles, name='Zone Wars')
wagerRole = discord.utils.get(ctx.guild.roles, name='Wagers')
turtleRole = discord.utils.get(ctx.guild.roles, name='Turtle Wars')
woodEmoji = self.bot.get_emoji(576383685042110475)
stoneEmoji = self.bot.get_emoji(576383787424808961)
metalEmoji = self.bot.get_emoji(576383798292250651)
embed=discord.Embed(title="Notification Roles")
embed.color = discord.Color.gold()
embed.description = (f'Click the reaction(s) to toggle the role for notifications. This adds a role to your account so you may be alerted.\n\n {woodEmoji} {zoneRole.mention} - Receive alerts for **zone wars** related announcements\n\n {stoneEmoji} {wagerRole.mention} - Receive alerts for **wagers** related announcements\n\n {metalEmoji} {turtleRole.mention} - Receive alerts for **turtle wars** related announcements')
embed.set_footer(text='React to join, unreact to leave • If stuck, double click')
msg = await ctx.send(embed=embed)
await msg.add_reaction(':Wood:576383685042110475')
await msg.add_reaction(':Stone:576383787424808961')
await msg.add_reaction(':Metal:576383798292250651')
# Adding the cog to main script
def setup(bot):
bot.add_cog(Roles(bot)) | []
| []
| [
"SERVER_ID"
]
| [] | ["SERVER_ID"] | python | 1 | 0 | |
sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/models/cassandra_table.py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
class CassandraTable(Resource):
"""An Azure Cosmos DB Cassandra table.
Variables are only populated by the server, and will be ignored when
sending a request.
All required parameters must be populated in order to send to Azure.
:ivar id: The unique resource identifier of the database account.
:vartype id: str
:ivar name: The name of the database account.
:vartype name: str
:ivar type: The type of Azure resource.
:vartype type: str
:param location: The location of the resource group to which the resource
belongs.
:type location: str
:param tags:
:type tags: dict[str, str]
:param cassandra_table_id: Required. Name of the Cosmos DB Cassandra table
:type cassandra_table_id: str
:param default_ttl: Time to live of the Cosmos DB Cassandra table
:type default_ttl: int
:param schema: Schema of the Cosmos DB Cassandra table
:type schema: ~azure.mgmt.cosmosdb.models.CassandraSchema
"""
_validation = {
'id': {'readonly': True},
'name': {'readonly': True},
'type': {'readonly': True},
'cassandra_table_id': {'required': True},
}
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'name': {'key': 'name', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'cassandra_table_id': {'key': 'properties.id', 'type': 'str'},
'default_ttl': {'key': 'properties.defaultTtl', 'type': 'int'},
'schema': {'key': 'properties.schema', 'type': 'CassandraSchema'},
}
def __init__(self, **kwargs):
super(CassandraTable, self).__init__(**kwargs)
self.cassandra_table_id = kwargs.get('cassandra_table_id', None)
self.default_ttl = kwargs.get('default_ttl', None)
self.schema = kwargs.get('schema', None)
| []
| []
| []
| [] | [] | python | null | null | null |
manage.py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'projectscatter.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| []
| []
| []
| [] | [] | python | 0 | 0 | |
user-service/pkg/database/database.go | package database
import (
"fmt"
"github.com/jinzhu/gorm"
_ "github.com/jinzhu/gorm/dialects/postgres"
)
// CreateConnection - ...
func CreateConnection() (*gorm.DB, error) {
host := "localhost" // os.Getenv("DB_HOST")
databaseUser := "postgres" // os.Getenv("DB_USER")
databaseName := "container_management_system" // os.Getenv("DB_NAME")
databasePassword := "postgres-simple" // os.Getenv("DB_PASSWORD")
return gorm.Open(
"postgres",
fmt.Sprintf(
"host=%s user=%s dbname=%s sslmode=disable password=%s",
host, databaseUser, databaseName, databasePassword,
),
)
}
| [
"\"DB_HOST\"",
"\"DB_USER\"",
"\"DB_NAME\"",
"\"DB_PASSWORD\""
]
| []
| [
"DB_PASSWORD",
"DB_USER",
"DB_NAME",
"DB_HOST"
]
| [] | ["DB_PASSWORD", "DB_USER", "DB_NAME", "DB_HOST"] | go | 4 | 0 | |
test/python/spl/tk17/opt/.__splpy/packages/streamsx/scripts/extract.py | from __future__ import print_function
# Licensed Materials - Property of IBM
# Copyright IBM Corp. 2016,2017
import sys
import sysconfig
import inspect
if sys.version_info.major == 2:
import funcsigs
import imp
import glob
import os
import shutil
import argparse
import subprocess
import xml.etree.ElementTree as ET
import html
from streamsx.spl.spl import _OperatorType
from streamsx.spl.spl import _valid_op_parameter
############################################
# setup for function inspection
if sys.version_info.major == 3:
_inspect = inspect
else:
raise ValueError("Python version not supported.")
############################################
# Return the root of the com.ibm.streamsx.topology toolkit
def _topology_tk_dir():
dir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
for _ in range(5):
dir = os.path.dirname(dir)
return dir
def replaceTokenInFile(file, token, value):
f = open(file,'r')
contents = f.read()
f.close()
newcontents = contents.replace(token, value)
f = open(file,'w')
f.write(newcontents)
f.close()
def _optype(opobj):
if hasattr(opobj, '__splpy_optype'):
return opobj.__splpy_optype
return None
def _opfile(opobj):
return opobj.__splpy_file
def _opstyle(opobj):
return opobj.__splpy_style
def _opcallable(opobj):
return opobj.__splpy_callable
def _opdoc(opobj):
return opobj.__splpy_docpy
_INFO_XML_TEMPLATE="""<?xml version="1.0" encoding="UTF-8"?>
<toolkitInfoModel
xmlns="http://www.ibm.com/xmlns/prod/streams/spl/toolkitInfo"
xmlns:common="http://www.ibm.com/xmlns/prod/streams/spl/common">
<identity>
<name>__SPLPY_TOOLKIT_NAME__</name>
<description>Automatic generated toolkit description file.</description>
<version>1.0.0</version>
<requiredProductVersion>4.0.1.0</requiredProductVersion>
</identity>
<dependencies/>
<resources>
<messageSet name="TopologySplpyResource">
<lang default="true">en_US/TopologySplpyResource.xlf</lang>
<lang>de_DE/TopologySplpyResource.xlf</lang>
<lang>es_ES/TopologySplpyResource.xlf</lang>
<lang>fr_FR/TopologySplpyResource.xlf</lang>
<lang>it_IT/TopologySplpyResource.xlf</lang>
<lang>ja_JP/TopologySplpyResource.xlf</lang>
<lang>ko_KR/TopologySplpyResource.xlf</lang>
<lang>pt_BR/TopologySplpyResource.xlf</lang>
<lang>ru_RU/TopologySplpyResource.xlf</lang>
<lang>zh_CN/TopologySplpyResource.xlf</lang>
<lang>zh_TW/TopologySplpyResource.xlf</lang>
</messageSet>
</resources>
</toolkitInfoModel>
"""
# Create SPL operator parameters from the Python class
# (functions cannot have parameters)
# The parameters are taken from the signature of
# the __init__ method. In the spirit of Python
# the default for non-annotated function parameters
# is to map to operator parameters that take any type
# with a cardinality of 1. If the function parameter
# has a default value, then the operator parameter is optional
_OP_PARAM_TEMPLATE ="""
<parameter>
<name>__SPLPY__PARAM_NAME__SPLPY__</name>
<description></description>
<optional>__SPLPY__PARAM_OPT__SPLPY__</optional>
<rewriteAllowed>true</rewriteAllowed>
<expressionMode>AttributeFree</expressionMode>
<type></type>
<cardinality>1</cardinality>
</parameter>"""
class _Extractor(object):
def __init__(self):
self._cmd_args = self._parse_cmd_args()
self._tk_dir = self._cmd_args.directory
def _parse_cmd_args(self):
cmd_parser = argparse.ArgumentParser(description='Extract SPL operators from decorated Python classes and functions.')
cmd_parser.add_argument('-i', '--directory', required=True,
help='Toolkit directory')
cmd_parser.add_argument('--make-toolkit', action='store_true',
help='Index toolkit using spl-make-toolkit')
cmd_parser.add_argument('-v', '--verbose', action='store_true',
help='Print more diagnostics')
return cmd_parser.parse_args()
def _make_namespace_dir(self, ns):
nsdir = os.path.join(self._tk_dir, ns)
if os.path.isdir(nsdir):
return nsdir
os.mkdir(nsdir)
return nsdir
def _make_operator_dir(self, nsdir, name):
oppath = os.path.join(nsdir, name)
if (os.path.isdir(oppath)):
shutil.rmtree(oppath)
os.mkdir(oppath)
return oppath
# Process python objects in a module looking for SPL operators
# dynm - introspection for the modeul
# module - module name
# ops - list of potential operators (functions)
def _process_operators(self, dynm, module, streams_python_file, ops):
for opname, opobj in ops:
if inspect.isbuiltin(opobj):
continue
if opname.startswith('spl'):
continue
optype = _optype(opobj)
if optype is None:
continue
if optype == _OperatorType.Ignore:
continue
if streams_python_file != _opfile(opobj):
continue
self._common_tuple_operator(dynm, module, opname, opobj)
def _copy_globalization_resources(self):
'''Copy the language resource files for python api functions
This function copies the TopologySplpy Resource files from Topology toolkit directory
into the impl/nl folder of the project.
Returns: the list with the copied locale strings'''
rootDir = os.path.join(_topology_tk_dir(), "impl", "nl")
languageList = []
for dirName in os.listdir(rootDir):
srcDir = os.path.join(_topology_tk_dir(), "impl", "nl", dirName)
if (os.path.isdir(srcDir)) and (dirName != "include"):
dstDir = os.path.join(self._tk_dir, "impl", "nl", dirName)
try:
print("Copy globalization resources " + dirName)
os.makedirs(dstDir)
except OSError as e:
if (e.errno == 17) and (os.path.isdir(dstDir)):
if self._cmd_args.verbose:
print("Directory", dstDir, "exists")
else:
raise
srcFile = os.path.join(srcDir, "TopologySplpyResource.xlf")
if os.path.isfile(srcFile):
res = shutil.copy2(srcFile, dstDir)
languageList.append(dirName)
if self._cmd_args.verbose:
print("Written: " + res)
return languageList
#
# module - module for operator
# opname - name of the SPL operator
# opobj - decorated object defining operator
#
def _common_tuple_operator(self, dynm, module, opname, opobj) :
if (not hasattr(dynm, 'spl_namespace')) and hasattr(dynm, 'splNamespace'):
ns = getattr(dynm, 'splNamespace')()
else:
ns = getattr(dynm, 'spl_namespace')()
print(ns + "::" + opname)
# Print the summary of the class/function
_doc = inspect.getdoc(opobj)
if _doc is not None:
_doc = str.splitlines(_doc)[0]
print(" ", _doc)
nsdir = self._make_namespace_dir(ns)
opdir = self._make_operator_dir(nsdir, opname)
self._copy_template_dir("common")
self._copy_template_dir("icons")
self._copy_python_dir("packages")
self._copy_python_dir("include")
self._copy_CGT(opdir, ns, opname, opobj)
self._write_config(dynm, opdir, module, opname, opobj)
def _create_op_parameters(self, opmodel_xml, name, opObj):
opparam_xml = ''
if _opcallable(opObj) == 'class':
pmds = init_sig = _inspect.signature(opObj.__init__).parameters
itpmds = iter(pmds)
# first argument to __init__ is self (instance ref)
next(itpmds)
for pn in itpmds:
pmd = pmds[pn]
_valid_op_parameter(pn)
px = _OP_PARAM_TEMPLATE
px = px.replace('__SPLPY__PARAM_NAME__SPLPY__', pn)
px = px.replace('__SPLPY__PARAM_OPT__SPLPY__', 'false' if pmd.default== _inspect.Parameter.empty else 'true' )
opparam_xml = opparam_xml + px
replaceTokenInFile(opmodel_xml, '__SPLPY__PARAMETERS__SPLPY__', opparam_xml)
def _copy_CGT(self, opdir, ns, name, funcTuple):
cgtbase = _optype(funcTuple).spl_template
optemplate = os.path.join(_topology_tk_dir(), "opt", "python", "templates","operators", cgtbase)
opcgt_cpp = os.path.join(opdir, name + '_cpp.cgt')
shutil.copy(optemplate + '_cpp.cgt', opcgt_cpp)
shutil.copy(optemplate + '_h.cgt', os.path.join(opdir, name + '_h.cgt'))
opmodel_xml = os.path.join(opdir, name + '.xml')
shutil.copy(optemplate + '.xml', opmodel_xml)
replaceTokenInFile(opmodel_xml, "__SPLPY__MAJOR_VERSION__SPLPY__", str(sys.version_info[0]));
replaceTokenInFile(opmodel_xml, "__SPLPY__MINOR_VERSION__SPLPY__", str(sys.version_info[1]));
self._create_op_parameters(opmodel_xml, name, funcTuple)
self._create_op_spldoc(opmodel_xml, name, funcTuple)
self._create_ip_spldoc(opmodel_xml, name, funcTuple)
## Create SPL doc entries in the Operator model xml file.
##
def _create_op_spldoc(self, opmodel_xml, name, opobj):
opdoc = inspect.getdoc(opobj)
if opdoc is None:
opdoc = 'Callable: ' + name + "\n"
opdoc = html.escape(opdoc)
# Optionally include the Python source code
if _opdoc(opobj):
try:
_pysrc = inspect.getsource(opobj)
opdoc += "\n"
opdoc += "# Python\n";
for _line in str.splitlines(_pysrc):
opdoc += " "
opdoc += html.escape(_line)
opdoc += "\n"
except:
pass
replaceTokenInFile(opmodel_xml, "__SPLPY__DESCRIPTION__SPLPY__", opdoc);
def _create_ip_spldoc(self, opmodel_xml, name, opobj):
if _opstyle(opobj) == 'dictionary':
_p0doc = """
Tuple attribute values are passed by name to the Python callable using `\*\*kwargs`.
"""
elif _opstyle(opobj) == 'tuple':
_p0doc = """
Tuple attribute values are passed by position to the Python callable.
"""
else:
_p0doc = ''
replaceTokenInFile(opmodel_xml, "__SPLPY__INPORT_0_DESCRIPTION__SPLPY__", _p0doc);
# Write information about the Python function parameters.
#
def _write_style_info(self, cfgfile, opobj):
is_class = inspect.isclass(opobj)
if is_class:
opfn = opobj.__call__
else:
opfn = opobj
sig = _inspect.signature(opfn)
fixedCount = 0
if _opstyle(opobj) == 'tuple':
pmds = sig.parameters
itpmds = iter(pmds)
# Skip 'self' for classes
if is_class:
next(itpmds)
for pn in itpmds:
param = pmds[pn]
if param.kind == _inspect.Parameter.POSITIONAL_OR_KEYWORD:
fixedCount += 1
if param.kind == _inspect.Parameter.VAR_POSITIONAL:
fixedCount = -1
break
if param.kind == _inspect.Parameter.VAR_KEYWORD:
break
cfgfile.write('sub splpy_FixedParam { \''+ str(fixedCount) + "\'}\n")
cfgfile.write('sub splpy_ParamStyle { \''+ str(_opstyle(opobj)) + "\'}\n")
# Write out the configuration for the operator
# as a set of Perl functions that return useful values
# for the code generator
def _write_config(self, dynm, opdir, module, opname, opobj):
cfgpath = os.path.join(opdir, 'splpy_operator.pm')
cfgfile = open(cfgpath, 'w')
cfgfile.write('sub splpy_Module { \''+ module + "\'}\n")
cfgfile.write('sub splpy_OperatorCallable {\'' + _opcallable(opobj) + "\'}\n")
cfgfile.write('sub splpy_FunctionName {\'' + opname + "\'}\n")
cfgfile.write('sub splpy_OperatorType {\'' + _optype(opobj).name + "\'}\n")
self._write_style_info(cfgfile, opobj)
if hasattr(dynm, 'spl_pip_packages'):
pp = getattr(dynm, 'spl_pip_packages')()
if not isinstance(pp, list):
pp = list(pp)
else:
pp = []
cfgfile.write('sub splpy_Packages {(' + ','.join(["'{0}'".format(_) for _ in pp]) + ')}\n')
cfgfile.write("1;\n")
cfgfile.close()
# Copy a single file from the templates directory to the newly created operator directory
def _copy_template_dir(self, dir):
self._copy_python_dir(os.path.join("templates", dir))
def _copy_python_dir(self, dir):
cmn_src = os.path.join(_topology_tk_dir(), "opt", "python", dir);
cmn_dst = os.path.join(self._tk_dir, "opt", ".__splpy", os.path.basename(dir))
if (os.path.isdir(cmn_dst)):
shutil.rmtree(cmn_dst)
shutil.copytree(cmn_src, cmn_dst)
def _setup_info_xml(self, languageList):
'''Setup the info.xml file
This function prepares or checks the info.xml file in the project directory
- if the info.xml does not exist in the project directory, it copies the template info.xml into the project directory.
The project name is obtained from the project directory name
- If there is a info.xml file, the resource section is inspected. If the resource section has no valid message set
description for the TopologySplpy Resource a warning message is printed'''
infoXmlFile = os.path.join(self._tk_dir, 'info.xml')
print('Check info.xml:', infoXmlFile)
try:
TopologySplpyResourceMessageSetFound = False
TopologySplpyResourceLanguages = []
tree = ET.parse(infoXmlFile)
root = tree.getroot()
for resources in root.findall('{http://www.ibm.com/xmlns/prod/streams/spl/toolkitInfo}resources'):
if self._cmd_args.verbose:
print('Resource: ', resources.tag)
for messageSet in resources.findall('{http://www.ibm.com/xmlns/prod/streams/spl/toolkitInfo}messageSet'):
if self._cmd_args.verbose:
print('Message set:', messageSet.tag, messageSet.attrib)
if 'name' in messageSet.attrib:
if messageSet.attrib['name'] == 'TopologySplpyResource':
TopologySplpyResourceMessageSetFound = True
for lang in messageSet.findall('{http://www.ibm.com/xmlns/prod/streams/spl/toolkitInfo}lang'):
language = os.path.dirname(lang.text)
TopologySplpyResourceLanguages.append(language)
if TopologySplpyResourceMessageSetFound:
TopologySplpyResourceLanguages.sort()
languageList.sort()
copiedLanguagesSet = set(languageList)
resourceLanguageSet = set(TopologySplpyResourceLanguages)
if self._cmd_args.verbose:
print('copied language resources:\n', languageList)
print('TopologySplpyResource from info.xml:\n', TopologySplpyResourceLanguages)
if copiedLanguagesSet == resourceLanguageSet:
print('Resource section of info.xml verified')
else:
errstr = """"ERROR: Message set for the "TopologySplpyResource" is incomplete or invalid. Correct the resource section in info.xml file.
Sample info xml:\n""" + _INFO_XML_TEMPLATE
sys.exit(errstr)
else:
errstr = """"ERROR: Message set for the "TopologySplpyResource" is missing. Correct the resource section in info.xml file.
Sample info xml:\n""" + _INFO_XML_TEMPLATE
sys.exit(errstr)
except FileNotFoundError as e:
print("WARNING: File info.xml not found. Creating info.xml from template")
#Get default project name from project directory
projectRootDir = os.path.abspath(self._tk_dir) #os.path.abspath returns the path without trailing /
projectName = os.path.basename(projectRootDir)
infoXml=_INFO_XML_TEMPLATE.replace('__SPLPY_TOOLKIT_NAME__', projectName)
f = open(infoXmlFile, 'w')
f.write(infoXml)
f.close()
except SystemExit as e:
raise e
except:
errstr = """ERROR: File info.xml is invalid or not accessible
Sample info xml:\n""" + _INFO_XML_TEMPLATE
sys.exit(errstr)
def _extract_from_toolkit():
"""
Look at all the modules in opt/python/streams (opt/python/streams/*.py)
and extract any spl decorated function as an operator.
"""
extractor = _Extractor()
tk_dir = extractor._tk_dir
tk_streams = os.path.join(tk_dir, 'opt', 'python', 'streams')
print(tk_streams)
if not os.path.isdir(tk_streams):
# Nothing to do
return
sys.path.insert(1, tk_streams)
tk_packages = os.path.join(tk_dir, 'opt', 'python', 'packages')
if os.path.isdir(tk_packages):
sys.path.insert(1, tk_packages)
tk_modules = os.path.join(tk_dir, 'opt', 'python', 'modules')
if os.path.isdir(tk_modules):
sys.path.insert(1, tk_modules)
for mf in glob.glob(os.path.join(tk_streams, '*.py')):
print('Checking ', mf, 'for operators')
(name, suffix, mode, mtype) = inspect.getmoduleinfo(mf)
dynm = imp.load_source(name, mf)
streams_python_file = inspect.getsourcefile(dynm)
extractor._process_operators(dynm, name, streams_python_file, inspect.getmembers(dynm, inspect.isfunction))
extractor._process_operators(dynm, name, streams_python_file, inspect.getmembers(dynm, inspect.isclass))
langList = extractor._copy_globalization_resources()
if extractor._cmd_args.verbose:
print("Available languages for TopologySplpy resource:", langList)
extractor._setup_info_xml(langList)
# Now make the toolkit if required
if extractor._cmd_args.make_toolkit:
si = os.environ['STREAMS_INSTALL']
mktk = os.path.join(si, 'bin', 'spl-make-toolkit')
mktk_args = [mktk, '--directory', extractor._cmd_args.directory, '--make-operator']
subprocess.check_call(mktk_args)
| []
| []
| [
"STREAMS_INSTALL"
]
| [] | ["STREAMS_INSTALL"] | python | 1 | 0 | |
services/softLayer_virtual_guest_block_device_template_group_test.go | package services_test
import (
"os"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
slclientfakes "github.com/maximilien/softlayer-go/client/fakes"
common "github.com/maximilien/softlayer-go/common"
datatypes "github.com/maximilien/softlayer-go/data_types"
softlayer "github.com/maximilien/softlayer-go/softlayer"
)
var _ = Describe("SoftLayer_Virtual_Guest_Service", func() {
var (
username, apiKey string
err error
fakeClient *slclientfakes.FakeSoftLayerClient
vgbdtgService softlayer.SoftLayer_Virtual_Guest_Block_Device_Template_Group_Service
vgbdtGroup datatypes.SoftLayer_Virtual_Guest_Block_Device_Template_Group
)
BeforeEach(func() {
username = os.Getenv("SL_USERNAME")
Expect(username).ToNot(Equal(""))
apiKey = os.Getenv("SL_API_KEY")
Expect(apiKey).ToNot(Equal(""))
fakeClient = slclientfakes.NewFakeSoftLayerClient(username, apiKey)
Expect(fakeClient).ToNot(BeNil())
vgbdtgService, err = fakeClient.GetSoftLayer_Virtual_Guest_Block_Device_Template_Group_Service()
Expect(err).ToNot(HaveOccurred())
Expect(vgbdtgService).ToNot(BeNil())
vgbdtGroup = datatypes.SoftLayer_Virtual_Guest_Block_Device_Template_Group{}
})
Context("#GetName", func() {
It("returns the name for the service", func() {
name := vgbdtgService.GetName()
Expect(name).To(Equal("SoftLayer_Virtual_Guest_Block_Device_Template_Group"))
})
})
Context("#GetObject", func() {
BeforeEach(func() {
vgbdtGroup.Id = 200150
fakeClient.DoRawHttpRequestResponse, err = common.ReadJsonTestFixtures("services", "SoftLayer_Virtual_Guest_Block_Device_Template_Group_Service_getObject.json")
Expect(err).ToNot(HaveOccurred())
})
It("sucessfully retrieves SoftLayer_Virtual_Guest_Block_Device_Template_Group instance", func() {
vgbdtg, err := vgbdtgService.GetObject(vgbdtGroup.Id)
Expect(err).ToNot(HaveOccurred())
Expect(vgbdtg.AccountId).To(Equal(278444))
Expect(vgbdtg.CreateDate).ToNot(BeNil())
Expect(vgbdtg.Id).To(Equal(vgbdtGroup.Id))
Expect(vgbdtg.Name).To(Equal("BOSH-eCPI-packer-centos-2014-08-12T15:54:16Z"))
Expect(vgbdtg.Note).To(Equal("centos image created by packer at 2014-08-12T15:54:16Z"))
Expect(vgbdtg.ParentId).To(BeNil())
Expect(vgbdtg.PublicFlag).To(Equal(0))
Expect(vgbdtg.StatusId).To(Equal(1))
Expect(vgbdtg.Summary).To(Equal("centos image created by packer at 2014-08-12T15:54:16Z"))
Expect(vgbdtg.TransactionId).To(BeNil())
Expect(vgbdtg.UserRecordId).To(Equal(239954))
Expect(vgbdtg.GlobalIdentifier).To(Equal("8071601b-5ee1-483e-a9e8-6e5582dcb9f7"))
})
})
Context("#DeleteObject", func() {
BeforeEach(func() {
vgbdtGroup.Id = 1234567
})
It("sucessfully deletes the SoftLayer_Virtual_Guest_Block_Device_Template_Group instance", func() {
fakeClient.DoRawHttpRequestResponse = []byte("true")
deleted, err := vgbdtgService.DeleteObject(vgbdtGroup.Id)
Expect(err).ToNot(HaveOccurred())
Expect(deleted).To(BeTrue())
})
It("fails to delete the SoftLayer_Virtual_Guest_Block_Device_Template_Group instance", func() {
fakeClient.DoRawHttpRequestResponse = []byte("false")
deleted, err := vgbdtgService.DeleteObject(vgbdtGroup.Id)
Expect(err).To(HaveOccurred())
Expect(deleted).To(BeFalse())
})
})
Context("#GetDatacenters", func() {
BeforeEach(func() {
vgbdtGroup.Id = 1234567
fakeClient.DoRawHttpRequestResponse, err = common.ReadJsonTestFixtures("services", "SoftLayer_Virtual_Guest_Block_Device_Template_Group_Service_getDatacenters.json")
Expect(err).ToNot(HaveOccurred())
})
It("sucessfully retrieves an array of SoftLayer_Location array for virtual guest device template group", func() {
locations, err := vgbdtgService.GetDatacenters(vgbdtGroup.Id)
Expect(err).ToNot(HaveOccurred())
Expect(len(locations)).To(BeNumerically("==", 2))
Expect(locations[0].Id).To(Equal(265592))
Expect(locations[0].LongName).To(Equal("Amsterdam 1"))
Expect(locations[0].Name).To(Equal("ams01"))
Expect(locations[1].Id).To(Equal(154820))
Expect(locations[1].LongName).To(Equal("Dallas 6"))
Expect(locations[1].Name).To(Equal("dal06"))
})
})
Context("#GetSshKeys", func() {
BeforeEach(func() {
vgbdtGroup.Id = 1234567
fakeClient.DoRawHttpRequestResponse, err = common.ReadJsonTestFixtures("services", "SoftLayer_Virtual_Guest_Block_Device_Template_Group_Service_getSshKeys.json")
Expect(err).ToNot(HaveOccurred())
})
It("sucessfully retrieves an array of SoftLayer_Security_Ssh_Key array for virtual guest device template group", func() {
sshKeys, err := vgbdtgService.GetSshKeys(vgbdtGroup.Id)
Expect(err).ToNot(HaveOccurred())
Expect(len(sshKeys)).To(BeNumerically(">", 0))
for _, sshKey := range sshKeys {
Expect(sshKey.CreateDate).ToNot(BeNil())
Expect(sshKey.Fingerprint).To(Equal("f6:c2:9d:57:2f:74:be:a1:db:71:f2:e5:8e:0f:84:7e"))
Expect(sshKey.Id).To(Equal(84386))
Expect(sshKey.Key).ToNot(Equal(""))
Expect(sshKey.Label).To(Equal("TEST:softlayer-go"))
Expect(sshKey.ModifyDate).To(BeNil())
Expect(sshKey.Label).To(Equal("TEST:softlayer-go"))
}
})
})
Context("#GetStatus", func() {
BeforeEach(func() {
vgbdtGroup.Id = 1234567
fakeClient.DoRawHttpRequestResponse, err = common.ReadJsonTestFixtures("services", "SoftLayer_Virtual_Guest_Block_Device_Template_Group_Service_getStatus.json")
Expect(err).ToNot(HaveOccurred())
})
It("sucessfully retrieves SoftLayer_Virtual_Guest_Block_Device_Template_Group instance status", func() {
status, err := vgbdtgService.GetStatus(vgbdtGroup.Id)
Expect(err).ToNot(HaveOccurred())
Expect(status.Description).To(Equal("The Guest Block Device Template Group is available to all accounts"))
Expect(status.KeyName).To(Equal("ACTIVE"))
Expect(status.Name).To(Equal("Active"))
})
})
})
| [
"\"SL_USERNAME\"",
"\"SL_API_KEY\""
]
| []
| [
"SL_API_KEY",
"SL_USERNAME"
]
| [] | ["SL_API_KEY", "SL_USERNAME"] | go | 2 | 0 | |
test/integration/test_visual_recognition_v4.py | # coding: utf-8
import pytest
import ibm_watson
import os
import json
from unittest import TestCase
from ibm_watson.visual_recognition_v4 import AnalyzeEnums, FileWithMetadata, TrainingDataObject, Location
@pytest.mark.skipif(
os.getenv('VCAP_SERVICES') is None, reason='requires VCAP_SERVICES')
class IntegrationTestVisualRecognitionV3(TestCase):
visual_recognition = None
@classmethod
def setup_class(cls):
cls.visual_recognition = ibm_watson.VisualRecognitionV4('2019-02-11')
cls.visual_recognition.set_default_headers({
'X-Watson-Learning-Opt-Out':
'1',
'X-Watson-Test':
'1'
})
def test_01_colllections(self):
collection = self.visual_recognition.create_collection(
name='my_collection',
description='just for fun'
).get_result()
collection_id = collection.get('collection_id')
assert collection_id is not None
my_collection = self.visual_recognition.get_collection(collection_id=collection.get('collection_id')).get_result()
assert my_collection is not None
assert my_collection.get('name') == 'my_collection'
updated_collection = self.visual_recognition.update_collection(
collection_id=collection_id,
description='new description').get_result()
assert updated_collection is not None
collections = self.visual_recognition.list_collections().get_result().get('collections')
assert collections is not None
self.visual_recognition.delete_collection(collection_id=collection_id)
def test_02_images(self):
collection = self.visual_recognition.create_collection(
name='my_collection',
description='just for fun'
).get_result()
collection_id = collection.get('collection_id')
add_images = self.visual_recognition.add_images(
collection_id,
image_url=["https://upload.wikimedia.org/wikipedia/commons/3/33/KokoniPurebredDogsGreeceGreekCreamWhiteAdult.jpg", "https://upload.wikimedia.org/wikipedia/commons/0/07/K%C3%B6nigspudel_Apricot.JPG"],
).get_result()
assert add_images is not None
image_id = add_images.get('images')[0].get('image_id')
list_images = self.visual_recognition.list_images(collection_id).get_result()
assert list_images is not None
image_details = self.visual_recognition.get_image_details(collection_id, image_id).get_result()
assert image_details is not None
response = self.visual_recognition.get_jpeg_image(collection_id, image_id).get_result()
assert response.content is not None
self.visual_recognition.delete_image(collection_id, image_id)
self.visual_recognition.delete_collection(collection_id)
def test_03_analyze(self):
dog_path = os.path.join(os.path.dirname(__file__), '../../resources/dog.jpg')
giraffe_path = os.path.join(os.path.dirname(__file__),
'../../resources/my-giraffe.jpeg')
with open(dog_path, 'rb') as dog_file, open(giraffe_path, 'rb') as giraffe_files:
analyze_images = self.visual_recognition.analyze(
collection_ids=['684777e5-1f2d-40e3-987f-72d36557ef46'],
features=[AnalyzeEnums.Features.OBJECTS.value],
images_file=[
FileWithMetadata(dog_file),
FileWithMetadata(giraffe_files)
],
image_url=['https://upload.wikimedia.org/wikipedia/commons/thumb/4/47/American_Eskimo_Dog.jpg/1280px-American_Eskimo_Dog.jpg']).get_result()
assert analyze_images is not None
print(json.dumps(analyze_images, indent=2))
def test_04_training(self):
# create a classifier
my_collection = self.visual_recognition.create_collection(
name='my_test_collection',
description='testing for python'
).get_result()
collection_id = my_collection.get('collection_id')
assert collection_id is not None
# add images
with open(os.path.join(os.path.dirname(__file__), '../../resources/South_Africa_Luca_Galuzzi_2004.jpeg'), 'rb') as giraffe_info:
add_images_result = self.visual_recognition.add_images(
collection_id,
images_file=[FileWithMetadata(giraffe_info)],
).get_result()
assert add_images_result is not None
image_id = add_images_result.get('images')[0].get('image_id')
assert image_id is not None
# add image training data
training_data = self.visual_recognition.add_image_training_data(
collection_id,
image_id,
objects=[
TrainingDataObject(object='giraffe training data',
location=Location(64, 270, 755, 784))
]).get_result()
assert training_data is not None
# train collection
train_result = self.visual_recognition.train(collection_id).get_result()
assert train_result is not None
assert train_result.get('training_status') is not None
# training usage
training_usage = self.visual_recognition.get_training_usage(start_time='2019-11-01', end_time='2019-11-27').get_result()
assert training_usage is not None
# delete collection
self.visual_recognition.delete_collection(collection_id)
| []
| []
| [
"VCAP_SERVICES"
]
| [] | ["VCAP_SERVICES"] | python | 1 | 0 | |
unhelpful/crypto.py | """gather cryptocoin data from resources"""
import collections
import datetime
import enum
import logging
import random
import os
import requests
from . import exceptions
from .utilities import get_config
c_random = random.Random(os.environ.get('PYTHONHASHSEED'))
class CoinSource(enum.Enum):
coinmarketcap = 'coinmarketcap'
CoinQuote = collections.namedtuple(
'CoinQuote', ['name', 'symbol', 'price_usd', 'quote_time', 'quote_source']
)
def get_coinmarketcap_list(endpoint=get_config('COINMARKETCAP', 'endpoint')):
"""fetch top coins by marketcap from `coinmarketcap.com`
Args:
endpoint (str): endpoint address
Returns:
list: list from coinmarketcap
Raises:
requests.RequestException: connection/http errors
"""
req = requests.get(endpoint)
req.raise_for_status()
return req.json()
def get_coin_quotes(source=CoinSource.coinmarketcap):
"""get collection of cryptocurrency quotes
Args:
source (:enum:`CoinSource`): name/enum of resource to query
Returns:
list: collection of CoinQuote named tuples
Raises:
requests.RequestException: connection/http errors
"""
quotes = []
if CoinSource(source) == CoinSource.coinmarketcap:
logging.info('fetching quotes from `coinmarketcap`')
data = get_coinmarketcap_list()
for record in data:
quotes.append(
CoinQuote(
name=record['name'],
symbol=record['symbol'],
price_usd=float(record['price_usd']),
quote_time=datetime.datetime.fromtimestamp(int(record['last_updated'])),
quote_source=CoinSource.coinmarketcap.value,
)
)
return quotes
def get_random_quote(quote_list, max_range=int(get_config('COINMARKETCAP', 'top_coins'))):
"""pick a random cryptocoin to use in actual result
Args:
quote_list (list): List of CoinQuote
max_range (int): maximum for random.randint()
Returns:
CoinQuote: single CoinQuote picked randomly
"""
if max_range > len(quote_list):
logging.warning(
'`max_range` exceeds actual quote length, using list length: %s', len(quote_list)
)
max_range = len(quote_list)
if not max_range:
logging.warning('`max_range` missing, using full list length: %s', len(quote_list))
max_range = len(quote_list)
return quote_list[c_random.randint(0, max_range)]
| []
| []
| [
"PYTHONHASHSEED"
]
| [] | ["PYTHONHASHSEED"] | python | 1 | 0 | |
pkg/cli/options.go | /*
Copyright 2020 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package cli
import (
"errors"
"fmt"
"io/fs"
"os"
"path/filepath"
"runtime"
"strings"
"github.com/sirupsen/logrus"
"github.com/spf13/afero"
"github.com/spf13/cobra"
"sigs.k8s.io/kubebuilder/v3/pkg/config"
cfgv2 "sigs.k8s.io/kubebuilder/v3/pkg/config/v2"
cfgv3 "sigs.k8s.io/kubebuilder/v3/pkg/config/v3"
"sigs.k8s.io/kubebuilder/v3/pkg/plugin"
"sigs.k8s.io/kubebuilder/v3/pkg/plugins/external"
)
var retrievePluginsRoot = getPluginsRoot
// Option is a function used as arguments to New in order to configure the resulting CLI.
type Option func(*CLI) error
// WithCommandName is an Option that sets the CLI's root command name.
func WithCommandName(name string) Option {
return func(c *CLI) error {
c.commandName = name
return nil
}
}
// WithVersion is an Option that defines the version string of the CLI.
func WithVersion(version string) Option {
return func(c *CLI) error {
c.version = version
return nil
}
}
// WithDescription is an Option that sets the CLI's root description.
func WithDescription(description string) Option {
return func(c *CLI) error {
c.description = description
return nil
}
}
// WithPlugins is an Option that sets the CLI's plugins.
//
// Specifying any invalid plugin results in an error.
func WithPlugins(plugins ...plugin.Plugin) Option {
return func(c *CLI) error {
for _, p := range plugins {
key := plugin.KeyFor(p)
if _, isConflicting := c.plugins[key]; isConflicting {
return fmt.Errorf("two plugins have the same key: %q", key)
}
if err := plugin.Validate(p); err != nil {
return fmt.Errorf("broken pre-set plugin %q: %v", key, err)
}
c.plugins[key] = p
}
return nil
}
}
// WithDefaultPlugins is an Option that sets the CLI's default plugins.
//
// Specifying any invalid plugin results in an error.
func WithDefaultPlugins(projectVersion config.Version, plugins ...plugin.Plugin) Option {
return func(c *CLI) error {
if err := projectVersion.Validate(); err != nil {
return fmt.Errorf("broken pre-set project version %q for default plugins: %w", projectVersion, err)
}
if len(plugins) == 0 {
return fmt.Errorf("empty set of plugins provided for project version %q", projectVersion)
}
for _, p := range plugins {
if err := plugin.Validate(p); err != nil {
return fmt.Errorf("broken pre-set default plugin %q: %v", plugin.KeyFor(p), err)
}
if !plugin.SupportsVersion(p, projectVersion) {
return fmt.Errorf("default plugin %q doesn't support version %q", plugin.KeyFor(p), projectVersion)
}
c.defaultPlugins[projectVersion] = append(c.defaultPlugins[projectVersion], plugin.KeyFor(p))
}
return nil
}
}
// WithDefaultProjectVersion is an Option that sets the CLI's default project version.
//
// Setting an invalid version results in an error.
func WithDefaultProjectVersion(version config.Version) Option {
return func(c *CLI) error {
if err := version.Validate(); err != nil {
return fmt.Errorf("broken pre-set default project version %q: %v", version, err)
}
c.defaultProjectVersion = version
return nil
}
}
// WithExtraCommands is an Option that adds extra subcommands to the CLI.
//
// Adding extra commands that duplicate existing commands results in an error.
func WithExtraCommands(cmds ...*cobra.Command) Option {
return func(c *CLI) error {
// We don't know the commands defined by the CLI yet so we are not checking if the extra commands
// conflict with a pre-existing one yet. We do this after creating the base commands.
c.extraCommands = append(c.extraCommands, cmds...)
return nil
}
}
// WithExtraAlphaCommands is an Option that adds extra alpha subcommands to the CLI.
//
// Adding extra alpha commands that duplicate existing commands results in an error.
func WithExtraAlphaCommands(cmds ...*cobra.Command) Option {
return func(c *CLI) error {
// We don't know the commands defined by the CLI yet so we are not checking if the extra alpha commands
// conflict with a pre-existing one yet. We do this after creating the base commands.
c.extraAlphaCommands = append(c.extraAlphaCommands, cmds...)
return nil
}
}
// WithCompletion is an Option that adds the completion subcommand.
func WithCompletion() Option {
return func(c *CLI) error {
c.completionCommand = true
return nil
}
}
// parseExternalPluginArgs returns the program arguments.
func parseExternalPluginArgs() (args []string) {
args = make([]string, len(os.Args)-1)
copy(args, os.Args[1:])
return args
}
// getPluginsRoot detects the host system and gets the plugins root based on the host.
func getPluginsRoot(host string) (pluginsRoot string, err error) {
switch host {
case "darwin":
logrus.Debugf("Detected host is macOS.")
pluginsRoot = filepath.Join("Library", "ApplicationSupport", "kubebuilder", "plugins")
case "linux":
logrus.Debugf("Detected host is Linux.")
pluginsRoot = filepath.Join(".config", "kubebuilder", "plugins")
default:
// freebsd, openbsd, windows...
return "", fmt.Errorf("Host not supported: %v", host)
}
userHomeDir, err := getHomeDir()
if err != nil {
return "", fmt.Errorf("error retrieving home dir: %v", err)
}
pluginsRoot = filepath.Join(userHomeDir, pluginsRoot)
return pluginsRoot, nil
}
// DiscoverExternalPlugins discovers the external plugins in the plugins root directory
// and adds them to external.Plugin.
func DiscoverExternalPlugins(fs afero.Fs) (ps []plugin.Plugin, err error) {
pluginsRoot, err := retrievePluginsRoot(runtime.GOOS)
if err != nil {
logrus.Errorf("could not get plugins root: %v", err)
return nil, err
}
rootInfo, err := fs.Stat(pluginsRoot)
if err != nil {
if errors.Is(err, afero.ErrFileNotFound) {
logrus.Debugf("External plugins dir %q does not exist, skipping external plugin parsing", pluginsRoot)
return nil, nil
}
return nil, err
}
if !rootInfo.IsDir() {
logrus.Debugf("External plugins path %q is not a directory, skipping external plugin parsing", pluginsRoot)
return nil, nil
}
pluginInfos, err := afero.ReadDir(fs, pluginsRoot)
if err != nil {
return nil, err
}
for _, pluginInfo := range pluginInfos {
if !pluginInfo.IsDir() {
logrus.Debugf("%q is not a directory so skipping parsing", pluginInfo.Name())
continue
}
versions, err := afero.ReadDir(fs, filepath.Join(pluginsRoot, pluginInfo.Name()))
if err != nil {
return nil, err
}
for _, version := range versions {
if !version.IsDir() {
logrus.Debugf("%q is not a directory so skipping parsing", version.Name())
continue
}
pluginFiles, err := afero.ReadDir(fs, filepath.Join(pluginsRoot, pluginInfo.Name(), version.Name()))
if err != nil {
return nil, err
}
for _, pluginFile := range pluginFiles {
// find the executable that matches the same name as info.Name().
// if no match is found, compare the external plugin string name before dot
// and match it with info.Name() which is the external plugin root dir.
// for example: sample.sh --> sample, externalplugin.py --> externalplugin
trimmedPluginName := strings.Split(pluginFile.Name(), ".")
if trimmedPluginName[0] == "" {
return nil, fmt.Errorf("Invalid plugin name found %q", pluginFile.Name())
}
if pluginFile.Name() == pluginInfo.Name() || trimmedPluginName[0] == pluginInfo.Name() {
// check whether the external plugin is an executable.
if !isPluginExectuable(pluginFile.Mode()) {
return nil, fmt.Errorf("External plugin %q found in path is not an executable", pluginFile.Name())
}
ep := external.Plugin{
PName: pluginInfo.Name(),
Path: filepath.Join(pluginsRoot, pluginInfo.Name(), version.Name(), pluginFile.Name()),
PSupportedProjectVersions: []config.Version{cfgv2.Version, cfgv3.Version},
Args: parseExternalPluginArgs(),
}
if err := ep.PVersion.Parse(version.Name()); err != nil {
return nil, err
}
logrus.Printf("Adding external plugin: %s", ep.Name())
ps = append(ps, ep)
}
}
}
}
return ps, nil
}
// isPluginExectuable checks if a plugin is an executable based on the bitmask and returns true or false.
func isPluginExectuable(mode fs.FileMode) bool {
return mode&0111 != 0
}
// getHomeDir returns $XDG_CONFIG_HOME if set, otherwise $HOME.
func getHomeDir() (string, error) {
var err error
xdgHome := os.Getenv("XDG_CONFIG_HOME")
if xdgHome == "" {
xdgHome, err = os.UserHomeDir()
if err != nil {
return "", err
}
}
return xdgHome, nil
}
| [
"\"XDG_CONFIG_HOME\""
]
| []
| [
"XDG_CONFIG_HOME"
]
| [] | ["XDG_CONFIG_HOME"] | go | 1 | 0 | |
internal/log/log.go | package log
import (
"os"
"time"
"github.com/rs/zerolog"
"github.com/rs/zerolog/log"
)
var Logger zerolog.Logger
func init() {
loglevel := os.Getenv("GO_DRILL_LOG_LEVEL")
lvl, err := zerolog.ParseLevel(loglevel)
if err != nil {
log.Printf("invalid value '%s' given for GO_DRILL_LOG_LEVEL. ignoring", loglevel)
}
Logger = zerolog.New(zerolog.ConsoleWriter{Out: os.Stderr, TimeFormat: time.RFC822}).Level(lvl).With().Timestamp().Logger()
}
func Printf(format string, v ...interface{}) {
Logger.Printf(format, v...)
}
func Print(v ...interface{}) {
Logger.Print(v...)
}
| [
"\"GO_DRILL_LOG_LEVEL\""
]
| []
| [
"GO_DRILL_LOG_LEVEL"
]
| [] | ["GO_DRILL_LOG_LEVEL"] | go | 1 | 0 | |
python/pyspark/pandas/series.py | #
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
A wrapper class for Spark Column to behave similar to pandas Series.
"""
import datetime
import re
import inspect
import sys
from collections.abc import Mapping
from functools import partial, reduce
from typing import (
Any,
Callable,
Dict,
Generic,
IO,
Iterable,
List,
Optional,
Sequence,
Tuple,
Type,
Union,
cast,
no_type_check,
overload,
TYPE_CHECKING,
)
import numpy as np
import pandas as pd
from pandas.core.accessor import CachedAccessor
from pandas.io.formats.printing import pprint_thing
from pandas.api.types import ( # type: ignore[attr-defined]
is_list_like,
is_hashable,
CategoricalDtype,
)
from pandas.tseries.frequencies import DateOffset
from pyspark.sql import functions as F, Column, DataFrame as SparkDataFrame
from pyspark.sql.types import (
ArrayType,
BooleanType,
DecimalType,
DoubleType,
FloatType,
IntegerType,
IntegralType,
LongType,
NumericType,
Row,
StructType,
TimestampType,
)
from pyspark.sql.window import Window
from pyspark import pandas as ps # For running doctests and reference resolution in PyCharm.
from pyspark.pandas._typing import Axis, Dtype, Label, Name, Scalar, T
from pyspark.pandas.accessors import PandasOnSparkSeriesMethods
from pyspark.pandas.categorical import CategoricalAccessor
from pyspark.pandas.config import get_option
from pyspark.pandas.base import IndexOpsMixin
from pyspark.pandas.exceptions import SparkPandasIndexingError
from pyspark.pandas.frame import DataFrame
from pyspark.pandas.generic import Frame
from pyspark.pandas.internal import (
InternalField,
InternalFrame,
DEFAULT_SERIES_NAME,
NATURAL_ORDER_COLUMN_NAME,
SPARK_DEFAULT_INDEX_NAME,
SPARK_DEFAULT_SERIES_NAME,
)
from pyspark.pandas.missing.series import MissingPandasLikeSeries
from pyspark.pandas.plot import PandasOnSparkPlotAccessor
from pyspark.pandas.ml import corr
from pyspark.pandas.utils import (
combine_frames,
is_name_like_tuple,
is_name_like_value,
name_like_string,
same_anchor,
scol_for,
sql_conf,
validate_arguments_and_invoke_function,
validate_axis,
validate_bool_kwarg,
verify_temp_column_name,
SPARK_CONF_ARROW_ENABLED,
log_advice,
)
from pyspark.pandas.datetimes import DatetimeMethods
from pyspark.pandas.spark import functions as SF
from pyspark.pandas.spark.accessors import SparkSeriesMethods
from pyspark.pandas.strings import StringMethods
from pyspark.pandas.typedef import (
infer_return_type,
spark_type_to_pandas_dtype,
ScalarType,
SeriesType,
create_type_for_series_type,
)
if TYPE_CHECKING:
from pyspark.sql._typing import ColumnOrName
from pyspark.pandas.groupby import SeriesGroupBy
from pyspark.pandas.indexes import Index
from pyspark.pandas.spark.accessors import SparkIndexOpsMethods
# This regular expression pattern is complied and defined here to avoid to compile the same
# pattern every time it is used in _repr_ in Series.
# This pattern basically seeks the footer string from pandas'
REPR_PATTERN = re.compile(r"Length: (?P<length>[0-9]+)")
_flex_doc_SERIES = """
Return {desc} of series and other, element-wise (binary operator `{op_name}`).
Equivalent to ``{equiv}``
Parameters
----------
other : Series or scalar value
Returns
-------
Series
The result of the operation.
See Also
--------
Series.{reverse}
{series_examples}
"""
_add_example_SERIES = """
Examples
--------
>>> df = ps.DataFrame({'a': [2, 2, 4, np.nan],
... 'b': [2, np.nan, 2, np.nan]},
... index=['a', 'b', 'c', 'd'], columns=['a', 'b'])
>>> df
a b
a 2.0 2.0
b 2.0 NaN
c 4.0 2.0
d NaN NaN
>>> df.a.add(df.b)
a 4.0
b NaN
c 6.0
d NaN
dtype: float64
>>> df.a.radd(df.b)
a 4.0
b NaN
c 6.0
d NaN
dtype: float64
"""
_sub_example_SERIES = """
Examples
--------
>>> df = ps.DataFrame({'a': [2, 2, 4, np.nan],
... 'b': [2, np.nan, 2, np.nan]},
... index=['a', 'b', 'c', 'd'], columns=['a', 'b'])
>>> df
a b
a 2.0 2.0
b 2.0 NaN
c 4.0 2.0
d NaN NaN
>>> df.a.subtract(df.b)
a 0.0
b NaN
c 2.0
d NaN
dtype: float64
>>> df.a.rsub(df.b)
a 0.0
b NaN
c -2.0
d NaN
dtype: float64
"""
_mul_example_SERIES = """
Examples
--------
>>> df = ps.DataFrame({'a': [2, 2, 4, np.nan],
... 'b': [2, np.nan, 2, np.nan]},
... index=['a', 'b', 'c', 'd'], columns=['a', 'b'])
>>> df
a b
a 2.0 2.0
b 2.0 NaN
c 4.0 2.0
d NaN NaN
>>> df.a.multiply(df.b)
a 4.0
b NaN
c 8.0
d NaN
dtype: float64
>>> df.a.rmul(df.b)
a 4.0
b NaN
c 8.0
d NaN
dtype: float64
"""
_div_example_SERIES = """
Examples
--------
>>> df = ps.DataFrame({'a': [2, 2, 4, np.nan],
... 'b': [2, np.nan, 2, np.nan]},
... index=['a', 'b', 'c', 'd'], columns=['a', 'b'])
>>> df
a b
a 2.0 2.0
b 2.0 NaN
c 4.0 2.0
d NaN NaN
>>> df.a.divide(df.b)
a 1.0
b NaN
c 2.0
d NaN
dtype: float64
>>> df.a.rdiv(df.b)
a 1.0
b NaN
c 0.5
d NaN
dtype: float64
"""
_pow_example_SERIES = """
Examples
--------
>>> df = ps.DataFrame({'a': [2, 2, 4, np.nan],
... 'b': [2, np.nan, 2, np.nan]},
... index=['a', 'b', 'c', 'd'], columns=['a', 'b'])
>>> df
a b
a 2.0 2.0
b 2.0 NaN
c 4.0 2.0
d NaN NaN
>>> df.a.pow(df.b)
a 4.0
b NaN
c 16.0
d NaN
dtype: float64
>>> df.a.rpow(df.b)
a 4.0
b NaN
c 16.0
d NaN
dtype: float64
"""
_mod_example_SERIES = """
Examples
--------
>>> df = ps.DataFrame({'a': [2, 2, 4, np.nan],
... 'b': [2, np.nan, 2, np.nan]},
... index=['a', 'b', 'c', 'd'], columns=['a', 'b'])
>>> df
a b
a 2.0 2.0
b 2.0 NaN
c 4.0 2.0
d NaN NaN
>>> df.a.mod(df.b)
a 0.0
b NaN
c 0.0
d NaN
dtype: float64
>>> df.a.rmod(df.b)
a 0.0
b NaN
c 2.0
d NaN
dtype: float64
"""
_floordiv_example_SERIES = """
Examples
--------
>>> df = ps.DataFrame({'a': [2, 2, 4, np.nan],
... 'b': [2, np.nan, 2, np.nan]},
... index=['a', 'b', 'c', 'd'], columns=['a', 'b'])
>>> df
a b
a 2.0 2.0
b 2.0 NaN
c 4.0 2.0
d NaN NaN
>>> df.a.floordiv(df.b)
a 1.0
b NaN
c 2.0
d NaN
dtype: float64
>>> df.a.rfloordiv(df.b)
a 1.0
b NaN
c 0.0
d NaN
dtype: float64
"""
# Needed to disambiguate Series.str and str type
str_type = str
class Series(Frame, IndexOpsMixin, Generic[T]):
"""
pandas-on-Spark Series that corresponds to pandas Series logically. This holds Spark Column
internally.
:ivar _internal: an internal immutable Frame to manage metadata.
:type _internal: InternalFrame
:ivar _psdf: Parent's pandas-on-Spark DataFrame
:type _psdf: ps.DataFrame
Parameters
----------
data : array-like, dict, or scalar value, pandas Series
Contains data stored in Series
Note that if `data` is a pandas Series, other arguments should not be used.
index : array-like or Index (1d)
Values must be hashable and have the same length as `data`.
Non-unique index values are allowed. Will default to
RangeIndex (0, 1, 2, ..., n) if not provided. If both a dict and index
sequence are used, the index will override the keys found in the
dict.
dtype : numpy.dtype or None
If None, dtype will be inferred
copy : boolean, default False
Copy input data
"""
def __init__( # type: ignore[no-untyped-def]
self, data=None, index=None, dtype=None, name=None, copy=False, fastpath=False
):
assert data is not None
self._anchor: DataFrame
self._col_label: Label
if isinstance(data, DataFrame):
assert dtype is None
assert name is None
assert not copy
assert not fastpath
self._anchor = data
self._col_label = index
else:
if isinstance(data, pd.Series):
assert index is None
assert dtype is None
assert name is None
assert not copy
assert not fastpath
s = data
else:
s = pd.Series(
data=data, index=index, dtype=dtype, name=name, copy=copy, fastpath=fastpath
)
internal = InternalFrame.from_pandas(pd.DataFrame(s))
if s.name is None:
internal = internal.copy(column_labels=[None])
anchor = DataFrame(internal)
self._anchor = anchor
self._col_label = anchor._internal.column_labels[0]
object.__setattr__(anchor, "_psseries", {self._column_label: self})
@property
def _psdf(self) -> DataFrame:
return self._anchor
@property
def _internal(self) -> InternalFrame:
return self._psdf._internal.select_column(self._column_label)
@property
def _column_label(self) -> Optional[Label]:
return self._col_label
def _update_anchor(self, psdf: DataFrame) -> None:
assert psdf._internal.column_labels == [self._column_label], (
psdf._internal.column_labels,
[self._column_label],
)
self._anchor = psdf
object.__setattr__(psdf, "_psseries", {self._column_label: self})
def _with_new_scol(self, scol: Column, *, field: Optional[InternalField] = None) -> "Series":
"""
Copy pandas-on-Spark Series with the new Spark Column.
:param scol: the new Spark Column
:return: the copied Series
"""
name = name_like_string(self._column_label)
internal = self._internal.copy(
data_spark_columns=[scol.alias(name)],
data_fields=[
field if field is None or field.struct_field is None else field.copy(name=name)
],
)
return first_series(DataFrame(internal))
spark: "SparkIndexOpsMethods" = CachedAccessor( # type: ignore[assignment]
"spark", SparkSeriesMethods
)
@property
def dtypes(self) -> Dtype:
"""Return the dtype object of the underlying data.
>>> s = ps.Series(list('abc'))
>>> s.dtype == s.dtypes
True
"""
return self.dtype
@property
def axes(self) -> List["Index"]:
"""
Return a list of the row axis labels.
Examples
--------
>>> psser = ps.Series([1, 2, 3])
>>> psser.axes
[Int64Index([0, 1, 2], dtype='int64')]
"""
return [self.index]
# Arithmetic Operators
def add(self, other: Any) -> "Series":
return self + other
add.__doc__ = _flex_doc_SERIES.format(
desc="Addition",
op_name="+",
equiv="series + other",
reverse="radd",
series_examples=_add_example_SERIES,
)
def radd(self, other: Any) -> "Series":
return other + self
radd.__doc__ = _flex_doc_SERIES.format(
desc="Reverse Addition",
op_name="+",
equiv="other + series",
reverse="add",
series_examples=_add_example_SERIES,
)
def div(self, other: Any) -> "Series":
return self / other
div.__doc__ = _flex_doc_SERIES.format(
desc="Floating division",
op_name="/",
equiv="series / other",
reverse="rdiv",
series_examples=_div_example_SERIES,
)
divide = div
def rdiv(self, other: Any) -> "Series":
return other / self
rdiv.__doc__ = _flex_doc_SERIES.format(
desc="Reverse Floating division",
op_name="/",
equiv="other / series",
reverse="div",
series_examples=_div_example_SERIES,
)
def truediv(self, other: Any) -> "Series":
return self / other
truediv.__doc__ = _flex_doc_SERIES.format(
desc="Floating division",
op_name="/",
equiv="series / other",
reverse="rtruediv",
series_examples=_div_example_SERIES,
)
def rtruediv(self, other: Any) -> "Series":
return other / self
rtruediv.__doc__ = _flex_doc_SERIES.format(
desc="Reverse Floating division",
op_name="/",
equiv="other / series",
reverse="truediv",
series_examples=_div_example_SERIES,
)
def mul(self, other: Any) -> "Series":
return self * other
mul.__doc__ = _flex_doc_SERIES.format(
desc="Multiplication",
op_name="*",
equiv="series * other",
reverse="rmul",
series_examples=_mul_example_SERIES,
)
multiply = mul
def rmul(self, other: Any) -> "Series":
return other * self
rmul.__doc__ = _flex_doc_SERIES.format(
desc="Reverse Multiplication",
op_name="*",
equiv="other * series",
reverse="mul",
series_examples=_mul_example_SERIES,
)
def sub(self, other: Any) -> "Series":
return self - other
sub.__doc__ = _flex_doc_SERIES.format(
desc="Subtraction",
op_name="-",
equiv="series - other",
reverse="rsub",
series_examples=_sub_example_SERIES,
)
subtract = sub
def rsub(self, other: Any) -> "Series":
return other - self
rsub.__doc__ = _flex_doc_SERIES.format(
desc="Reverse Subtraction",
op_name="-",
equiv="other - series",
reverse="sub",
series_examples=_sub_example_SERIES,
)
def mod(self, other: Any) -> "Series":
return self % other
mod.__doc__ = _flex_doc_SERIES.format(
desc="Modulo",
op_name="%",
equiv="series % other",
reverse="rmod",
series_examples=_mod_example_SERIES,
)
def rmod(self, other: Any) -> "Series":
return other % self
rmod.__doc__ = _flex_doc_SERIES.format(
desc="Reverse Modulo",
op_name="%",
equiv="other % series",
reverse="mod",
series_examples=_mod_example_SERIES,
)
def pow(self, other: Any) -> "Series":
return self ** other
pow.__doc__ = _flex_doc_SERIES.format(
desc="Exponential power of series",
op_name="**",
equiv="series ** other",
reverse="rpow",
series_examples=_pow_example_SERIES,
)
def rpow(self, other: Any) -> "Series":
return other ** self
rpow.__doc__ = _flex_doc_SERIES.format(
desc="Reverse Exponential power",
op_name="**",
equiv="other ** series",
reverse="pow",
series_examples=_pow_example_SERIES,
)
def floordiv(self, other: Any) -> "Series":
return self // other
floordiv.__doc__ = _flex_doc_SERIES.format(
desc="Integer division",
op_name="//",
equiv="series // other",
reverse="rfloordiv",
series_examples=_floordiv_example_SERIES,
)
def rfloordiv(self, other: Any) -> "Series":
return other // self
rfloordiv.__doc__ = _flex_doc_SERIES.format(
desc="Reverse Integer division",
op_name="//",
equiv="other // series",
reverse="floordiv",
series_examples=_floordiv_example_SERIES,
)
# create accessor for pandas-on-Spark specific methods.
pandas_on_spark = CachedAccessor("pandas_on_spark", PandasOnSparkSeriesMethods)
# keep the name "koalas" for backward compatibility.
koalas = CachedAccessor("koalas", PandasOnSparkSeriesMethods)
# Comparison Operators
def eq(self, other: Any) -> "Series":
"""
Compare if the current value is equal to the other.
>>> df = ps.DataFrame({'a': [1, 2, 3, 4],
... 'b': [1, np.nan, 1, np.nan]},
... index=['a', 'b', 'c', 'd'], columns=['a', 'b'])
>>> df.a == 1
a True
b False
c False
d False
Name: a, dtype: bool
>>> df.b.eq(1)
a True
b False
c True
d False
Name: b, dtype: bool
"""
return self == other
equals = eq
def gt(self, other: Any) -> "Series":
"""
Compare if the current value is greater than the other.
>>> df = ps.DataFrame({'a': [1, 2, 3, 4],
... 'b': [1, np.nan, 1, np.nan]},
... index=['a', 'b', 'c', 'd'], columns=['a', 'b'])
>>> df.a > 1
a False
b True
c True
d True
Name: a, dtype: bool
>>> df.b.gt(1)
a False
b False
c False
d False
Name: b, dtype: bool
"""
return self > other
def ge(self, other: Any) -> "Series":
"""
Compare if the current value is greater than or equal to the other.
>>> df = ps.DataFrame({'a': [1, 2, 3, 4],
... 'b': [1, np.nan, 1, np.nan]},
... index=['a', 'b', 'c', 'd'], columns=['a', 'b'])
>>> df.a >= 2
a False
b True
c True
d True
Name: a, dtype: bool
>>> df.b.ge(2)
a False
b False
c False
d False
Name: b, dtype: bool
"""
return self >= other
def lt(self, other: Any) -> "Series":
"""
Compare if the current value is less than the other.
>>> df = ps.DataFrame({'a': [1, 2, 3, 4],
... 'b': [1, np.nan, 1, np.nan]},
... index=['a', 'b', 'c', 'd'], columns=['a', 'b'])
>>> df.a < 1
a False
b False
c False
d False
Name: a, dtype: bool
>>> df.b.lt(2)
a True
b False
c True
d False
Name: b, dtype: bool
"""
return self < other
def le(self, other: Any) -> "Series":
"""
Compare if the current value is less than or equal to the other.
>>> df = ps.DataFrame({'a': [1, 2, 3, 4],
... 'b': [1, np.nan, 1, np.nan]},
... index=['a', 'b', 'c', 'd'], columns=['a', 'b'])
>>> df.a <= 2
a True
b True
c False
d False
Name: a, dtype: bool
>>> df.b.le(2)
a True
b False
c True
d False
Name: b, dtype: bool
"""
return self <= other
def ne(self, other: Any) -> "Series":
"""
Compare if the current value is not equal to the other.
>>> df = ps.DataFrame({'a': [1, 2, 3, 4],
... 'b': [1, np.nan, 1, np.nan]},
... index=['a', 'b', 'c', 'd'], columns=['a', 'b'])
>>> df.a != 1
a False
b True
c True
d True
Name: a, dtype: bool
>>> df.b.ne(1)
a False
b True
c False
d True
Name: b, dtype: bool
"""
return self != other
def divmod(self, other: Any) -> Tuple["Series", "Series"]:
"""
Return Integer division and modulo of series and other, element-wise
(binary operator `divmod`).
Parameters
----------
other : Series or scalar value
Returns
-------
2-Tuple of Series
The result of the operation.
See Also
--------
Series.rdivmod
"""
return self.floordiv(other), self.mod(other)
def rdivmod(self, other: Any) -> Tuple["Series", "Series"]:
"""
Return Integer division and modulo of series and other, element-wise
(binary operator `rdivmod`).
Parameters
----------
other : Series or scalar value
Returns
-------
2-Tuple of Series
The result of the operation.
See Also
--------
Series.divmod
"""
return self.rfloordiv(other), self.rmod(other)
def between(self, left: Any, right: Any, inclusive: bool = True) -> "Series":
"""
Return boolean Series equivalent to left <= series <= right.
This function returns a boolean vector containing `True` wherever the
corresponding Series element is between the boundary values `left` and
`right`. NA values are treated as `False`.
Parameters
----------
left : scalar or list-like
Left boundary.
right : scalar or list-like
Right boundary.
inclusive : bool, default True
Include boundaries.
Returns
-------
Series
Series representing whether each element is between left and
right (inclusive).
See Also
--------
Series.gt : Greater than of series and other.
Series.lt : Less than of series and other.
Notes
-----
This function is equivalent to ``(left <= ser) & (ser <= right)``
Examples
--------
>>> s = ps.Series([2, 0, 4, 8, np.nan])
Boundary values are included by default:
>>> s.between(1, 4)
0 True
1 False
2 True
3 False
4 False
dtype: bool
With `inclusive` set to ``False`` boundary values are excluded:
>>> s.between(1, 4, inclusive=False)
0 True
1 False
2 False
3 False
4 False
dtype: bool
`left` and `right` can be any scalar value:
>>> s = ps.Series(['Alice', 'Bob', 'Carol', 'Eve'])
>>> s.between('Anna', 'Daniel')
0 False
1 True
2 True
3 False
dtype: bool
"""
if inclusive:
lmask = self >= left
rmask = self <= right
else:
lmask = self > left
rmask = self < right
return lmask & rmask
def cov(self, other: "Series", min_periods: Optional[int] = None) -> float:
"""
Compute covariance with Series, excluding missing values.
.. versionadded:: 3.3.0
Parameters
----------
other : Series
Series with which to compute the covariance.
min_periods : int, optional
Minimum number of observations needed to have a valid result.
Returns
-------
float
Covariance between Series and other
Examples
--------
>>> from pyspark.pandas.config import set_option, reset_option
>>> set_option("compute.ops_on_diff_frames", True)
>>> s1 = ps.Series([0.90010907, 0.13484424, 0.62036035])
>>> s2 = ps.Series([0.12528585, 0.26962463, 0.51111198])
>>> s1.cov(s2)
-0.016857626527158744
>>> reset_option("compute.ops_on_diff_frames")
"""
if not isinstance(other, Series):
raise TypeError("unsupported type: %s" % type(other))
if not np.issubdtype(self.dtype, np.number): # type: ignore[arg-type]
raise TypeError("unsupported dtype: %s" % self.dtype)
if not np.issubdtype(other.dtype, np.number): # type: ignore[arg-type]
raise TypeError("unsupported dtype: %s" % other.dtype)
min_periods = 1 if min_periods is None else min_periods
if same_anchor(self, other):
sdf = self._internal.spark_frame.select(self.spark.column, other.spark.column)
else:
combined = combine_frames(self.to_frame(), other.to_frame())
sdf = combined._internal.spark_frame.select(*combined._internal.data_spark_columns)
sdf = sdf.dropna()
if len(sdf.head(min_periods)) < min_periods:
return np.nan
else:
return sdf.select(F.covar_samp(*sdf.columns)).head(1)[0][0]
# TODO: NaN and None when ``arg`` is an empty dict
# TODO: Support ps.Series ``arg``
def map(
self, arg: Union[Dict, Callable[[Any], Any], pd.Series], na_action: Optional[str] = None
) -> "Series":
"""
Map values of Series according to input correspondence.
Used for substituting each value in a Series with another value,
that may be derived from a function, a ``dict``.
.. note:: make sure the size of the dictionary is not huge because it could
downgrade the performance or throw OutOfMemoryError due to a huge
expression within Spark. Consider the input as a functions as an
alternative instead in this case.
Parameters
----------
arg : function, dict or pd.Series
Mapping correspondence.
na_action :
If `ignore`, propagate NA values, without passing them to the mapping correspondence.
Returns
-------
Series
Same index as caller.
See Also
--------
Series.apply : For applying more complex functions on a Series.
DataFrame.applymap : Apply a function elementwise on a whole DataFrame.
Notes
-----
When ``arg`` is a dictionary, values in Series that are not in the
dictionary (as keys) are converted to ``None``. However, if the
dictionary is a ``dict`` subclass that defines ``__missing__`` (i.e.
provides a method for default values), then this default is used
rather than ``None``.
Examples
--------
>>> s = ps.Series(['cat', 'dog', None, 'rabbit'])
>>> s
0 cat
1 dog
2 None
3 rabbit
dtype: object
``map`` accepts a ``dict``. Values that are not found
in the ``dict`` are converted to ``None``, unless the dict has a default
value (e.g. ``defaultdict``):
>>> s.map({'cat': 'kitten', 'dog': 'puppy'})
0 kitten
1 puppy
2 None
3 None
dtype: object
It also accepts a pandas Series:
>>> pser = pd.Series(['kitten', 'puppy'], index=['cat', 'dog'])
>>> s.map(pser)
0 kitten
1 puppy
2 None
3 None
dtype: object
It also accepts a function:
>>> def format(x) -> str:
... return 'I am a {}'.format(x)
>>> s.map(format)
0 I am a cat
1 I am a dog
2 I am a None
3 I am a rabbit
dtype: object
To avoid applying the function to missing values (and keep them as NaN)
na_action='ignore' can be used:
>>> s.map('I am a {}'.format, na_action='ignore')
0 I am a cat
1 I am a dog
2 None
3 I am a rabbit
dtype: object
"""
if isinstance(arg, (dict, pd.Series)):
is_start = True
# In case dictionary is empty.
current = F.when(SF.lit(False), SF.lit(None).cast(self.spark.data_type))
for to_replace, value in arg.items():
if is_start:
current = F.when(self.spark.column == SF.lit(to_replace), value)
is_start = False
else:
current = current.when(self.spark.column == SF.lit(to_replace), value)
if hasattr(arg, "__missing__"):
tmp_val = arg[np._NoValue] # type: ignore[attr-defined]
# Remove in case it's set in defaultdict.
del arg[np._NoValue] # type: ignore[attr-defined]
current = current.otherwise(SF.lit(tmp_val))
else:
current = current.otherwise(SF.lit(None).cast(self.spark.data_type))
return self._with_new_scol(current)
else:
return self.pandas_on_spark.transform_batch(lambda pser: pser.map(arg, na_action))
@property
def shape(self) -> Tuple[int]:
"""Return a tuple of the shape of the underlying data."""
return (len(self),)
@property
def name(self) -> Name:
"""Return name of the Series."""
name = self._column_label
if name is not None and len(name) == 1:
return name[0]
else:
return name
@name.setter
def name(self, name: Name) -> None:
self.rename(name, inplace=True)
# TODO: Currently, changing index labels taking dictionary/Series is not supported.
def rename(
self, index: Optional[Union[Name, Callable[[Any], Any]]] = None, **kwargs: Any
) -> "Series":
"""
Alter Series index labels or name.
Parameters
----------
index : scalar or function, optional
Functions are transformations to apply to the index.
Scalar will alter the Series.name attribute.
inplace : bool, default False
Whether to return a new Series. If True then value of copy is
ignored.
Returns
-------
Series
Series with index labels or name altered.
Examples
--------
>>> s = ps.Series([1, 2, 3])
>>> s
0 1
1 2
2 3
dtype: int64
>>> s.rename("my_name") # scalar, changes Series.name
0 1
1 2
2 3
Name: my_name, dtype: int64
>>> s.rename(lambda x: x ** 2) # function, changes labels
0 1
1 2
4 3
dtype: int64
"""
if index is None:
pass
if callable(index):
if kwargs.get("inplace", False):
raise ValueError("inplace True is not supported yet for a function 'index'")
frame = self.to_frame()
new_index_name = verify_temp_column_name(frame, "__index_name__")
frame[new_index_name] = self.index.map(index)
frame.set_index(new_index_name, inplace=True)
frame.index.name = self.index.name
return first_series(frame).rename(self.name)
elif isinstance(index, (pd.Series, dict)):
raise ValueError("'index' of %s type is not supported yet" % type(index).__name__)
elif not is_hashable(index):
raise TypeError("Series.name must be a hashable type")
elif not isinstance(index, tuple):
index = (index,)
name = name_like_string(index)
scol = self.spark.column.alias(name)
field = self._internal.data_fields[0].copy(name=name)
internal = self._internal.copy(
column_labels=[index],
data_spark_columns=[scol],
data_fields=[field],
column_label_names=None,
)
psdf: DataFrame = DataFrame(internal)
if kwargs.get("inplace", False):
self._col_label = index
self._update_anchor(psdf)
return self
else:
return first_series(psdf)
def rename_axis(
self, mapper: Optional[Any] = None, index: Optional[Any] = None, inplace: bool = False
) -> Optional["Series"]:
"""
Set the name of the axis for the index or columns.
Parameters
----------
mapper, index : scalar, list-like, dict-like or function, optional
A scalar, list-like, dict-like or functions transformations to
apply to the index values.
inplace : bool, default False
Modifies the object directly, instead of creating a new Series.
Returns
-------
Series, or None if `inplace` is True.
See Also
--------
Series.rename : Alter Series index labels or name.
DataFrame.rename : Alter DataFrame index labels or name.
Index.rename : Set new names on index.
Examples
--------
>>> s = ps.Series(["dog", "cat", "monkey"], name="animal")
>>> s # doctest: +NORMALIZE_WHITESPACE
0 dog
1 cat
2 monkey
Name: animal, dtype: object
>>> s.rename_axis("index").sort_index() # doctest: +NORMALIZE_WHITESPACE
index
0 dog
1 cat
2 monkey
Name: animal, dtype: object
**MultiIndex**
>>> index = pd.MultiIndex.from_product([['mammal'],
... ['dog', 'cat', 'monkey']],
... names=['type', 'name'])
>>> s = ps.Series([4, 4, 2], index=index, name='num_legs')
>>> s # doctest: +NORMALIZE_WHITESPACE
type name
mammal dog 4
cat 4
monkey 2
Name: num_legs, dtype: int64
>>> s.rename_axis(index={'type': 'class'}).sort_index() # doctest: +NORMALIZE_WHITESPACE
class name
mammal cat 4
dog 4
monkey 2
Name: num_legs, dtype: int64
>>> s.rename_axis(index=str.upper).sort_index() # doctest: +NORMALIZE_WHITESPACE
TYPE NAME
mammal cat 4
dog 4
monkey 2
Name: num_legs, dtype: int64
"""
psdf = self.to_frame().rename_axis(mapper=mapper, index=index, inplace=False)
if inplace:
self._update_anchor(psdf)
return None
else:
return first_series(psdf)
@property
def index(self) -> "ps.Index":
"""The index (axis labels) Column of the Series.
See Also
--------
Index
"""
return self._psdf.index
@property
def is_unique(self) -> bool:
"""
Return boolean if values in the object are unique
Returns
-------
is_unique : boolean
>>> ps.Series([1, 2, 3]).is_unique
True
>>> ps.Series([1, 2, 2]).is_unique
False
>>> ps.Series([1, 2, 3, None]).is_unique
True
"""
scol = self.spark.column
# Here we check:
# 1. the distinct count without nulls and count without nulls for non-null values
# 2. count null values and see if null is a distinct value.
#
# This workaround is in order to calculate the distinct count including nulls in
# single pass. Note that COUNT(DISTINCT expr) in Spark is designed to ignore nulls.
return self._internal.spark_frame.select(
(F.count(scol) == F.countDistinct(scol))
& (F.count(F.when(scol.isNull(), 1).otherwise(None)) <= 1)
).collect()[0][0]
def reset_index(
self,
level: Optional[Union[int, Name, Sequence[Union[int, Name]]]] = None,
drop: bool = False,
name: Optional[Name] = None,
inplace: bool = False,
) -> Optional[Union["Series", DataFrame]]:
"""
Generate a new DataFrame or Series with the index reset.
This is useful when the index needs to be treated as a column,
or when the index is meaningless and needs to be reset
to the default before another operation.
Parameters
----------
level : int, str, tuple, or list, default optional
For a Series with a MultiIndex, only remove the specified levels from the index.
Removes all levels by default.
drop : bool, default False
Just reset the index, without inserting it as a column in the new DataFrame.
name : object, optional
The name to use for the column containing the original Series values.
Uses self.name by default. This argument is ignored when drop is True.
inplace : bool, default False
Modify the Series in place (do not create a new object).
Returns
-------
Series or DataFrame
When `drop` is False (the default), a DataFrame is returned.
The newly created columns will come first in the DataFrame,
followed by the original Series values.
When `drop` is True, a `Series` is returned.
In either case, if ``inplace=True``, no value is returned.
Examples
--------
>>> s = ps.Series([1, 2, 3, 4], index=pd.Index(['a', 'b', 'c', 'd'], name='idx'))
Generate a DataFrame with default index.
>>> s.reset_index()
idx 0
0 a 1
1 b 2
2 c 3
3 d 4
To specify the name of the new column use `name`.
>>> s.reset_index(name='values')
idx values
0 a 1
1 b 2
2 c 3
3 d 4
To generate a new Series with the default set `drop` to True.
>>> s.reset_index(drop=True)
0 1
1 2
2 3
3 4
dtype: int64
To update the Series in place, without generating a new one
set `inplace` to True. Note that it also requires ``drop=True``.
>>> s.reset_index(inplace=True, drop=True)
>>> s
0 1
1 2
2 3
3 4
dtype: int64
"""
inplace = validate_bool_kwarg(inplace, "inplace")
if inplace and not drop:
raise TypeError("Cannot reset_index inplace on a Series to create a DataFrame")
if drop:
psdf = self._psdf[[self.name]]
else:
psser = self
if name is not None:
psser = psser.rename(name)
psdf = psser.to_frame()
psdf = psdf.reset_index(level=level, drop=drop)
if drop:
if inplace:
self._update_anchor(psdf)
return None
else:
return first_series(psdf)
else:
return psdf
def to_frame(self, name: Optional[Name] = None) -> DataFrame:
"""
Convert Series to DataFrame.
Parameters
----------
name : object, default None
The passed name should substitute for the series name (if it has
one).
Returns
-------
DataFrame
DataFrame representation of Series.
Examples
--------
>>> s = ps.Series(["a", "b", "c"])
>>> s.to_frame()
0
0 a
1 b
2 c
>>> s = ps.Series(["a", "b", "c"], name="vals")
>>> s.to_frame()
vals
0 a
1 b
2 c
"""
if name is not None:
renamed = self.rename(name)
elif self._column_label is None:
renamed = self.rename(DEFAULT_SERIES_NAME)
else:
renamed = self
return DataFrame(renamed._internal)
to_dataframe = to_frame
def to_string(
self,
buf: Optional[IO[str]] = None,
na_rep: str = "NaN",
float_format: Optional[Callable[[float], str]] = None,
header: bool = True,
index: bool = True,
length: bool = False,
dtype: bool = False,
name: bool = False,
max_rows: Optional[int] = None,
) -> Optional[str]:
"""
Render a string representation of the Series.
.. note:: This method should only be used if the resulting pandas object is expected
to be small, as all the data is loaded into the driver's memory. If the input
is large, set max_rows parameter.
Parameters
----------
buf : StringIO-like, optional
buffer to write to
na_rep : string, optional
string representation of NAN to use, default 'NaN'
float_format : one-parameter function, optional
formatter function to apply to columns' elements if they are floats
default None
header : boolean, default True
Add the Series header (index name)
index : bool, optional
Add index (row) labels, default True
length : boolean, default False
Add the Series length
dtype : boolean, default False
Add the Series dtype
name : boolean, default False
Add the Series name if not None
max_rows : int, optional
Maximum number of rows to show before truncating. If None, show
all.
Returns
-------
formatted : string (if not buffer passed)
Examples
--------
>>> df = ps.DataFrame([(.2, .3), (.0, .6), (.6, .0), (.2, .1)], columns=['dogs', 'cats'])
>>> print(df['dogs'].to_string())
0 0.2
1 0.0
2 0.6
3 0.2
>>> print(df['dogs'].to_string(max_rows=2))
0 0.2
1 0.0
"""
# Make sure locals() call is at the top of the function so we don't capture local variables.
args = locals()
if max_rows is not None:
psseries = self.head(max_rows)
else:
psseries = self
return validate_arguments_and_invoke_function(
psseries._to_internal_pandas(), self.to_string, pd.Series.to_string, args
)
def to_clipboard(self, excel: bool = True, sep: Optional[str] = None, **kwargs: Any) -> None:
# Docstring defined below by reusing DataFrame.to_clipboard's.
args = locals()
psseries = self
return validate_arguments_and_invoke_function(
psseries._to_internal_pandas(), self.to_clipboard, pd.Series.to_clipboard, args
)
to_clipboard.__doc__ = DataFrame.to_clipboard.__doc__
def to_dict(self, into: Type = dict) -> Mapping:
"""
Convert Series to {label -> value} dict or dict-like object.
.. note:: This method should only be used if the resulting pandas DataFrame is expected
to be small, as all the data is loaded into the driver's memory.
Parameters
----------
into : class, default dict
The collections.abc.Mapping subclass to use as the return
object. Can be the actual class or an empty
instance of the mapping type you want. If you want a
collections.defaultdict, you must pass it initialized.
Returns
-------
collections.abc.Mapping
Key-value representation of Series.
Examples
--------
>>> s = ps.Series([1, 2, 3, 4])
>>> s_dict = s.to_dict()
>>> sorted(s_dict.items())
[(0, 1), (1, 2), (2, 3), (3, 4)]
>>> from collections import OrderedDict, defaultdict
>>> s.to_dict(OrderedDict)
OrderedDict([(0, 1), (1, 2), (2, 3), (3, 4)])
>>> dd = defaultdict(list)
>>> s.to_dict(dd) # doctest: +ELLIPSIS
defaultdict(<class 'list'>, {...})
"""
# Make sure locals() call is at the top of the function so we don't capture local variables.
args = locals()
psseries = self
return validate_arguments_and_invoke_function(
psseries._to_internal_pandas(), self.to_dict, pd.Series.to_dict, args
)
def to_latex(
self,
buf: Optional[IO[str]] = None,
columns: Optional[List[Name]] = None,
col_space: Optional[int] = None,
header: bool = True,
index: bool = True,
na_rep: str = "NaN",
formatters: Optional[
Union[List[Callable[[Any], str]], Dict[Name, Callable[[Any], str]]]
] = None,
float_format: Optional[Callable[[float], str]] = None,
sparsify: Optional[bool] = None,
index_names: bool = True,
bold_rows: bool = False,
column_format: Optional[str] = None,
longtable: Optional[bool] = None,
escape: Optional[bool] = None,
encoding: Optional[str] = None,
decimal: str = ".",
multicolumn: Optional[bool] = None,
multicolumn_format: Optional[str] = None,
multirow: Optional[bool] = None,
) -> Optional[str]:
args = locals()
psseries = self
return validate_arguments_and_invoke_function(
psseries._to_internal_pandas(), self.to_latex, pd.Series.to_latex, args
)
to_latex.__doc__ = DataFrame.to_latex.__doc__
def to_pandas(self) -> pd.Series:
"""
Return a pandas Series.
.. note:: This method should only be used if the resulting pandas object is expected
to be small, as all the data is loaded into the driver's memory.
Examples
--------
>>> df = ps.DataFrame([(.2, .3), (.0, .6), (.6, .0), (.2, .1)], columns=['dogs', 'cats'])
>>> df['dogs'].to_pandas()
0 0.2
1 0.0
2 0.6
3 0.2
Name: dogs, dtype: float64
"""
log_advice(
"`to_pandas` loads all data into the driver's memory. "
"It should only be used if the resulting pandas Series is expected to be small."
)
return self._to_pandas()
def _to_pandas(self) -> pd.Series:
"""
Same as `to_pandas()`, without issueing the advice log for internal usage.
"""
return self._to_internal_pandas().copy()
def to_list(self) -> List:
"""
Return a list of the values.
These are each a scalar type, which is a Python scalar
(for str, int, float) or a pandas scalar
(for Timestamp/Timedelta/Interval/Period)
.. note:: This method should only be used if the resulting list is expected
to be small, as all the data is loaded into the driver's memory.
"""
log_advice(
"`to_list` loads all data into the driver's memory. "
"It should only be used if the resulting list is expected to be small."
)
return self._to_internal_pandas().tolist()
tolist = to_list
def drop_duplicates(self, keep: str = "first", inplace: bool = False) -> Optional["Series"]:
"""
Return Series with duplicate values removed.
Parameters
----------
keep : {'first', 'last', ``False``}, default 'first'
Method to handle dropping duplicates:
- 'first' : Drop duplicates except for the first occurrence.
- 'last' : Drop duplicates except for the last occurrence.
- ``False`` : Drop all duplicates.
inplace : bool, default ``False``
If ``True``, performs operation inplace and returns None.
Returns
-------
Series
Series with duplicates dropped.
Examples
--------
Generate a Series with duplicated entries.
>>> s = ps.Series(['lama', 'cow', 'lama', 'beetle', 'lama', 'hippo'],
... name='animal')
>>> s.sort_index()
0 lama
1 cow
2 lama
3 beetle
4 lama
5 hippo
Name: animal, dtype: object
With the 'keep' parameter, the selection behaviour of duplicated values
can be changed. The value 'first' keeps the first occurrence for each
set of duplicated entries. The default value of keep is 'first'.
>>> s.drop_duplicates().sort_index()
0 lama
1 cow
3 beetle
5 hippo
Name: animal, dtype: object
The value 'last' for parameter 'keep' keeps the last occurrence for
each set of duplicated entries.
>>> s.drop_duplicates(keep='last').sort_index()
1 cow
3 beetle
4 lama
5 hippo
Name: animal, dtype: object
The value ``False`` for parameter 'keep' discards all sets of
duplicated entries. Setting the value of 'inplace' to ``True`` performs
the operation inplace and returns ``None``.
>>> s.drop_duplicates(keep=False, inplace=True)
>>> s.sort_index()
1 cow
3 beetle
5 hippo
Name: animal, dtype: object
"""
inplace = validate_bool_kwarg(inplace, "inplace")
psdf = self._psdf[[self.name]].drop_duplicates(keep=keep)
if inplace:
self._update_anchor(psdf)
return None
else:
return first_series(psdf)
def reindex(self, index: Optional[Any] = None, fill_value: Optional[Any] = None) -> "Series":
"""
Conform Series to new index with optional filling logic, placing
NA/NaN in locations having no value in the previous index. A new object
is produced.
Parameters
----------
index: array-like, optional
New labels / index to conform to, should be specified using keywords.
Preferably an Index object to avoid duplicating data
fill_value : scalar, default np.NaN
Value to use for missing values. Defaults to NaN, but can be any
"compatible" value.
Returns
-------
Series with changed index.
See Also
--------
Series.reset_index : Remove row labels or move them to new columns.
Examples
--------
Create a series with some fictional data.
>>> index = ['Firefox', 'Chrome', 'Safari', 'IE10', 'Konqueror']
>>> ser = ps.Series([200, 200, 404, 404, 301],
... index=index, name='http_status')
>>> ser
Firefox 200
Chrome 200
Safari 404
IE10 404
Konqueror 301
Name: http_status, dtype: int64
Create a new index and reindex the Series. By default
values in the new index that do not have corresponding
records in the Series are assigned ``NaN``.
>>> new_index= ['Safari', 'Iceweasel', 'Comodo Dragon', 'IE10',
... 'Chrome']
>>> ser.reindex(new_index).sort_index()
Chrome 200.0
Comodo Dragon NaN
IE10 404.0
Iceweasel NaN
Safari 404.0
Name: http_status, dtype: float64
We can fill in the missing values by passing a value to
the keyword ``fill_value``.
>>> ser.reindex(new_index, fill_value=0).sort_index()
Chrome 200
Comodo Dragon 0
IE10 404
Iceweasel 0
Safari 404
Name: http_status, dtype: int64
To further illustrate the filling functionality in
``reindex``, we will create a Series with a
monotonically increasing index (for example, a sequence
of dates).
>>> date_index = pd.date_range('1/1/2010', periods=6, freq='D')
>>> ser2 = ps.Series([100, 101, np.nan, 100, 89, 88],
... name='prices', index=date_index)
>>> ser2.sort_index()
2010-01-01 100.0
2010-01-02 101.0
2010-01-03 NaN
2010-01-04 100.0
2010-01-05 89.0
2010-01-06 88.0
Name: prices, dtype: float64
Suppose we decide to expand the series to cover a wider
date range.
>>> date_index2 = pd.date_range('12/29/2009', periods=10, freq='D')
>>> ser2.reindex(date_index2).sort_index()
2009-12-29 NaN
2009-12-30 NaN
2009-12-31 NaN
2010-01-01 100.0
2010-01-02 101.0
2010-01-03 NaN
2010-01-04 100.0
2010-01-05 89.0
2010-01-06 88.0
2010-01-07 NaN
Name: prices, dtype: float64
"""
return first_series(self.to_frame().reindex(index=index, fill_value=fill_value)).rename(
self.name
)
def reindex_like(self, other: Union["Series", "DataFrame"]) -> "Series":
"""
Return a Series with matching indices as other object.
Conform the object to the same index on all axes. Places NA/NaN in locations
having no value in the previous index.
Parameters
----------
other : Series or DataFrame
Its row and column indices are used to define the new indices
of this object.
Returns
-------
Series
Series with changed indices on each axis.
See Also
--------
DataFrame.set_index : Set row labels.
DataFrame.reset_index : Remove row labels or move them to new columns.
DataFrame.reindex : Change to new indices or expand indices.
Notes
-----
Same as calling
``.reindex(index=other.index, ...)``.
Examples
--------
>>> s1 = ps.Series([24.3, 31.0, 22.0, 35.0],
... index=pd.date_range(start='2014-02-12',
... end='2014-02-15', freq='D'),
... name="temp_celsius")
>>> s1
2014-02-12 24.3
2014-02-13 31.0
2014-02-14 22.0
2014-02-15 35.0
Name: temp_celsius, dtype: float64
>>> s2 = ps.Series(["low", "low", "medium"],
... index=pd.DatetimeIndex(['2014-02-12', '2014-02-13',
... '2014-02-15']),
... name="winspeed")
>>> s2
2014-02-12 low
2014-02-13 low
2014-02-15 medium
Name: winspeed, dtype: object
>>> s2.reindex_like(s1).sort_index()
2014-02-12 low
2014-02-13 low
2014-02-14 None
2014-02-15 medium
Name: winspeed, dtype: object
"""
if isinstance(other, (Series, DataFrame)):
return self.reindex(index=other.index)
else:
raise TypeError("other must be a pandas-on-Spark Series or DataFrame")
def fillna(
self,
value: Optional[Any] = None,
method: Optional[str] = None,
axis: Optional[Axis] = None,
inplace: bool = False,
limit: Optional[int] = None,
) -> Optional["Series"]:
"""Fill NA/NaN values.
.. note:: the current implementation of 'method' parameter in fillna uses Spark's Window
without specifying partition specification. This leads to move all data into
single partition in single machine and could cause serious
performance degradation. Avoid this method against very large dataset.
Parameters
----------
value : scalar, dict, Series
Value to use to fill holes. alternately a dict/Series of values
specifying which value to use for each column.
DataFrame is not supported.
method : {'backfill', 'bfill', 'pad', 'ffill', None}, default None
Method to use for filling holes in reindexed Series pad / ffill: propagate last valid
observation forward to next valid backfill / bfill:
use NEXT valid observation to fill gap
axis : {0 or `index`}
1 and `columns` are not supported.
inplace : boolean, default False
Fill in place (do not create a new object)
limit : int, default None
If method is specified, this is the maximum number of consecutive NaN values to
forward/backward fill. In other words, if there is a gap with more than this number of
consecutive NaNs, it will only be partially filled. If method is not specified,
this is the maximum number of entries along the entire axis where NaNs will be filled.
Must be greater than 0 if not None
Returns
-------
Series
Series with NA entries filled.
Examples
--------
>>> s = ps.Series([np.nan, 2, 3, 4, np.nan, 6], name='x')
>>> s
0 NaN
1 2.0
2 3.0
3 4.0
4 NaN
5 6.0
Name: x, dtype: float64
Replace all NaN elements with 0s.
>>> s.fillna(0)
0 0.0
1 2.0
2 3.0
3 4.0
4 0.0
5 6.0
Name: x, dtype: float64
We can also propagate non-null values forward or backward.
>>> s.fillna(method='ffill')
0 NaN
1 2.0
2 3.0
3 4.0
4 4.0
5 6.0
Name: x, dtype: float64
>>> s = ps.Series([np.nan, 'a', 'b', 'c', np.nan], name='x')
>>> s.fillna(method='ffill')
0 None
1 a
2 b
3 c
4 c
Name: x, dtype: object
"""
psser = self._fillna(value=value, method=method, axis=axis, limit=limit)
if method is not None:
psser = DataFrame(psser._psdf._internal.resolved_copy)._psser_for(self._column_label)
inplace = validate_bool_kwarg(inplace, "inplace")
if inplace:
self._psdf._update_internal_frame(psser._psdf._internal, requires_same_anchor=False)
return None
else:
return psser.copy()
def _fillna(
self,
value: Optional[Any] = None,
method: Optional[str] = None,
axis: Optional[Axis] = None,
limit: Optional[int] = None,
part_cols: Sequence["ColumnOrName"] = (),
) -> "Series":
axis = validate_axis(axis)
if axis != 0:
raise NotImplementedError("fillna currently only works for axis=0 or axis='index'")
if (value is None) and (method is None):
raise ValueError("Must specify a fillna 'value' or 'method' parameter.")
if (method is not None) and (method not in ["ffill", "pad", "backfill", "bfill"]):
raise ValueError("Expecting 'pad', 'ffill', 'backfill' or 'bfill'.")
scol = self.spark.column
if not self.spark.nullable and not isinstance(
self.spark.data_type, (FloatType, DoubleType)
):
return self._psdf.copy()._psser_for(self._column_label)
cond = self.isnull().spark.column
if value is not None:
if not isinstance(value, (float, int, str, bool)):
raise TypeError("Unsupported type %s" % type(value).__name__)
if limit is not None:
raise NotImplementedError("limit parameter for value is not support now")
scol = F.when(cond, value).otherwise(scol)
else:
if method in ["ffill", "pad"]:
func = F.last
end = Window.currentRow - 1
if limit is not None:
begin = Window.currentRow - limit
else:
begin = Window.unboundedPreceding
elif method in ["bfill", "backfill"]:
func = F.first
begin = Window.currentRow + 1
if limit is not None:
end = Window.currentRow + limit
else:
end = Window.unboundedFollowing
window = (
Window.partitionBy(*part_cols)
.orderBy(NATURAL_ORDER_COLUMN_NAME)
.rowsBetween(begin, end)
)
scol = F.when(cond, func(scol, True).over(window)).otherwise(scol)
return DataFrame(
self._psdf._internal.with_new_spark_column(
self._column_label, scol.alias(name_like_string(self.name)) # TODO: dtype?
)
)._psser_for(self._column_label)
def dropna(self, axis: Axis = 0, inplace: bool = False, **kwargs: Any) -> Optional["Series"]:
"""
Return a new Series with missing values removed.
Parameters
----------
axis : {0 or 'index'}, default 0
There is only one axis to drop values from.
inplace : bool, default False
If True, do operation inplace and return None.
**kwargs
Not in use.
Returns
-------
Series
Series with NA entries dropped from it.
Examples
--------
>>> ser = ps.Series([1., 2., np.nan])
>>> ser
0 1.0
1 2.0
2 NaN
dtype: float64
Drop NA values from a Series.
>>> ser.dropna()
0 1.0
1 2.0
dtype: float64
Keep the Series with valid entries in the same variable.
>>> ser.dropna(inplace=True)
>>> ser
0 1.0
1 2.0
dtype: float64
"""
inplace = validate_bool_kwarg(inplace, "inplace")
# TODO: last two examples from pandas produce different results.
psdf = self._psdf[[self.name]].dropna(axis=axis, inplace=False)
if inplace:
self._update_anchor(psdf)
return None
else:
return first_series(psdf)
def clip(self, lower: Union[float, int] = None, upper: Union[float, int] = None) -> "Series":
"""
Trim values at input threshold(s).
Assigns values outside boundary to boundary values.
Parameters
----------
lower : float or int, default None
Minimum threshold value. All values below this threshold will be set to it.
upper : float or int, default None
Maximum threshold value. All values above this threshold will be set to it.
Returns
-------
Series
Series with the values outside the clip boundaries replaced
Examples
--------
>>> ps.Series([0, 2, 4]).clip(1, 3)
0 1
1 2
2 3
dtype: int64
Notes
-----
One difference between this implementation and pandas is that running
`pd.Series(['a', 'b']).clip(0, 1)` will crash with "TypeError: '<=' not supported between
instances of 'str' and 'int'" while `ps.Series(['a', 'b']).clip(0, 1)` will output the
original Series, simply ignoring the incompatible types.
"""
if is_list_like(lower) or is_list_like(upper):
raise TypeError(
"List-like value are not supported for 'lower' and 'upper' at the " + "moment"
)
if lower is None and upper is None:
return self
if isinstance(self.spark.data_type, NumericType):
scol = self.spark.column
if lower is not None:
scol = F.when(scol < lower, lower).otherwise(scol)
if upper is not None:
scol = F.when(scol > upper, upper).otherwise(scol)
return self._with_new_scol(
scol.alias(self._internal.data_spark_column_names[0]),
field=self._internal.data_fields[0],
)
else:
return self
def drop(
self,
labels: Optional[Union[Name, List[Name]]] = None,
index: Optional[Union[Name, List[Name]]] = None,
level: Optional[int] = None,
) -> "Series":
"""
Return Series with specified index labels removed.
Remove elements of a Series based on specifying the index labels.
When using a multi-index, labels on different levels can be removed by specifying the level.
Parameters
----------
labels : single label or list-like
Index labels to drop.
index : None
Redundant for application on Series, but index can be used instead of labels.
level : int or level name, optional
For MultiIndex, level for which the labels will be removed.
Returns
-------
Series
Series with specified index labels removed.
See Also
--------
Series.dropna
Examples
--------
>>> s = ps.Series(data=np.arange(3), index=['A', 'B', 'C'])
>>> s
A 0
B 1
C 2
dtype: int64
Drop single label A
>>> s.drop('A')
B 1
C 2
dtype: int64
Drop labels B and C
>>> s.drop(labels=['B', 'C'])
A 0
dtype: int64
With 'index' rather than 'labels' returns exactly same result.
>>> s.drop(index='A')
B 1
C 2
dtype: int64
>>> s.drop(index=['B', 'C'])
A 0
dtype: int64
Also support for MultiIndex
>>> midx = pd.MultiIndex([['lama', 'cow', 'falcon'],
... ['speed', 'weight', 'length']],
... [[0, 0, 0, 1, 1, 1, 2, 2, 2],
... [0, 1, 2, 0, 1, 2, 0, 1, 2]])
>>> s = ps.Series([45, 200, 1.2, 30, 250, 1.5, 320, 1, 0.3],
... index=midx)
>>> s
lama speed 45.0
weight 200.0
length 1.2
cow speed 30.0
weight 250.0
length 1.5
falcon speed 320.0
weight 1.0
length 0.3
dtype: float64
>>> s.drop(labels='weight', level=1)
lama speed 45.0
length 1.2
cow speed 30.0
length 1.5
falcon speed 320.0
length 0.3
dtype: float64
>>> s.drop(('lama', 'weight'))
lama speed 45.0
length 1.2
cow speed 30.0
weight 250.0
length 1.5
falcon speed 320.0
weight 1.0
length 0.3
dtype: float64
>>> s.drop([('lama', 'speed'), ('falcon', 'weight')])
lama weight 200.0
length 1.2
cow speed 30.0
weight 250.0
length 1.5
falcon speed 320.0
length 0.3
dtype: float64
"""
return first_series(self._drop(labels=labels, index=index, level=level))
def _drop(
self,
labels: Optional[Union[Name, List[Name]]] = None,
index: Optional[Union[Name, List[Name]]] = None,
level: Optional[int] = None,
) -> DataFrame:
if labels is not None:
if index is not None:
raise ValueError("Cannot specify both 'labels' and 'index'")
return self._drop(index=labels, level=level)
if index is not None:
internal = self._internal
if level is None:
level = 0
if level >= internal.index_level:
raise ValueError("'level' should be less than the number of indexes")
if is_name_like_tuple(index):
index_list = [cast(Label, index)]
elif is_name_like_value(index):
index_list = [(index,)]
elif all(is_name_like_value(idxes, allow_tuple=False) for idxes in index):
index_list = [(idex,) for idex in index]
elif not all(is_name_like_tuple(idxes) for idxes in index):
raise ValueError(
"If the given index is a list, it "
"should only contains names as all tuples or all non tuples "
"that contain index names"
)
else:
index_list = cast(List[Label], index)
drop_index_scols = []
for idxes in index_list:
try:
index_scols = [
internal.index_spark_columns[lvl] == idx
for lvl, idx in enumerate(idxes, level)
]
except IndexError:
raise KeyError(
"Key length ({}) exceeds index depth ({})".format(
internal.index_level, len(idxes)
)
)
drop_index_scols.append(reduce(lambda x, y: x & y, index_scols))
cond = ~reduce(lambda x, y: x | y, drop_index_scols)
return DataFrame(internal.with_filter(cond))
else:
raise ValueError("Need to specify at least one of 'labels' or 'index'")
def head(self, n: int = 5) -> "Series":
"""
Return the first n rows.
This function returns the first n rows for the object based on position.
It is useful for quickly testing if your object has the right type of data in it.
Parameters
----------
n : Integer, default = 5
Returns
-------
The first n rows of the caller object.
Examples
--------
>>> df = ps.DataFrame({'animal':['alligator', 'bee', 'falcon', 'lion']})
>>> df.animal.head(2) # doctest: +NORMALIZE_WHITESPACE
0 alligator
1 bee
Name: animal, dtype: object
"""
return first_series(self.to_frame().head(n)).rename(self.name)
def last(self, offset: Union[str, DateOffset]) -> "Series":
"""
Select final periods of time series data based on a date offset.
When having a Series with dates as index, this function can
select the last few elements based on a date offset.
Parameters
----------
offset : str or DateOffset
The offset length of the data that will be selected. For instance,
'3D' will display all the rows having their index within the last 3 days.
Returns
-------
Series
A subset of the caller.
Raises
------
TypeError
If the index is not a :class:`DatetimeIndex`
Examples
--------
>>> index = pd.date_range('2018-04-09', periods=4, freq='2D')
>>> psser = ps.Series([1, 2, 3, 4], index=index)
>>> psser
2018-04-09 1
2018-04-11 2
2018-04-13 3
2018-04-15 4
dtype: int64
Get the rows for the last 3 days:
>>> psser.last('3D')
2018-04-13 3
2018-04-15 4
dtype: int64
Notice the data for 3 last calendar days were returned, not the last
3 observed days in the dataset, and therefore data for 2018-04-11 was
not returned.
"""
return first_series(self.to_frame().last(offset)).rename(self.name)
def first(self, offset: Union[str, DateOffset]) -> "Series":
"""
Select first periods of time series data based on a date offset.
When having a Series with dates as index, this function can
select the first few elements based on a date offset.
Parameters
----------
offset : str or DateOffset
The offset length of the data that will be selected. For instance,
'3D' will display all the rows having their index within the first 3 days.
Returns
-------
Series
A subset of the caller.
Raises
------
TypeError
If the index is not a :class:`DatetimeIndex`
Examples
--------
>>> index = pd.date_range('2018-04-09', periods=4, freq='2D')
>>> psser = ps.Series([1, 2, 3, 4], index=index)
>>> psser
2018-04-09 1
2018-04-11 2
2018-04-13 3
2018-04-15 4
dtype: int64
Get the rows for the first 3 days:
>>> psser.first('3D')
2018-04-09 1
2018-04-11 2
dtype: int64
Notice the data for 3 first calendar days were returned, not the first
3 observed days in the dataset, and therefore data for 2018-04-13 was
not returned.
"""
return first_series(self.to_frame().first(offset)).rename(self.name)
# TODO: Categorical type isn't supported (due to PySpark's limitation) and
# some doctests related with timestamps were not added.
def unique(self) -> "Series":
"""
Return unique values of Series object.
Uniques are returned in order of appearance. Hash table-based unique,
therefore does NOT sort.
.. note:: This method returns newly created Series whereas pandas returns
the unique values as a NumPy array.
Returns
-------
Returns the unique values as a Series.
See Also
--------
Index.unique
groupby.SeriesGroupBy.unique
Examples
--------
>>> psser = ps.Series([2, 1, 3, 3], name='A')
>>> psser.unique().sort_values() # doctest: +NORMALIZE_WHITESPACE, +ELLIPSIS
<BLANKLINE>
... 1
... 2
... 3
Name: A, dtype: int64
>>> ps.Series([pd.Timestamp('2016-01-01') for _ in range(3)]).unique()
0 2016-01-01
dtype: datetime64[ns]
>>> psser.name = ('x', 'a')
>>> psser.unique().sort_values() # doctest: +NORMALIZE_WHITESPACE, +ELLIPSIS
<BLANKLINE>
... 1
... 2
... 3
Name: (x, a), dtype: int64
"""
sdf = self._internal.spark_frame.select(self.spark.column).distinct()
internal = InternalFrame(
spark_frame=sdf,
index_spark_columns=None,
column_labels=[self._column_label],
data_spark_columns=[scol_for(sdf, self._internal.data_spark_column_names[0])],
data_fields=[self._internal.data_fields[0]],
column_label_names=self._internal.column_label_names,
)
return first_series(DataFrame(internal))
def sort_values(
self, ascending: bool = True, inplace: bool = False, na_position: str = "last"
) -> Optional["Series"]:
"""
Sort by the values.
Sort a Series in ascending or descending order by some criterion.
Parameters
----------
ascending : bool or list of bool, default True
Sort ascending vs. descending. Specify list for multiple sort
orders. If this is a list of bools, must match the length of
the by.
inplace : bool, default False
if True, perform operation in-place
na_position : {'first', 'last'}, default 'last'
`first` puts NaNs at the beginning, `last` puts NaNs at the end
Returns
-------
sorted_obj : Series ordered by values.
Examples
--------
>>> s = ps.Series([np.nan, 1, 3, 10, 5])
>>> s
0 NaN
1 1.0
2 3.0
3 10.0
4 5.0
dtype: float64
Sort values ascending order (default behaviour)
>>> s.sort_values(ascending=True)
1 1.0
2 3.0
4 5.0
3 10.0
0 NaN
dtype: float64
Sort values descending order
>>> s.sort_values(ascending=False)
3 10.0
4 5.0
2 3.0
1 1.0
0 NaN
dtype: float64
Sort values inplace
>>> s.sort_values(ascending=False, inplace=True)
>>> s
3 10.0
4 5.0
2 3.0
1 1.0
0 NaN
dtype: float64
Sort values putting NAs first
>>> s.sort_values(na_position='first')
0 NaN
1 1.0
2 3.0
4 5.0
3 10.0
dtype: float64
Sort a series of strings
>>> s = ps.Series(['z', 'b', 'd', 'a', 'c'])
>>> s
0 z
1 b
2 d
3 a
4 c
dtype: object
>>> s.sort_values()
3 a
1 b
4 c
2 d
0 z
dtype: object
"""
inplace = validate_bool_kwarg(inplace, "inplace")
psdf = self._psdf[[self.name]]._sort(
by=[self.spark.column], ascending=ascending, na_position=na_position
)
if inplace:
self._update_anchor(psdf)
return None
else:
return first_series(psdf)
def sort_index(
self,
axis: Axis = 0,
level: Optional[Union[int, List[int]]] = None,
ascending: bool = True,
inplace: bool = False,
kind: str = None,
na_position: str = "last",
) -> Optional["Series"]:
"""
Sort object by labels (along an axis)
Parameters
----------
axis : index, columns to direct sorting. Currently, only axis = 0 is supported.
level : int or level name or list of ints or list of level names
if not None, sort on values in specified index level(s)
ascending : boolean, default True
Sort ascending vs. descending
inplace : bool, default False
if True, perform operation in-place
kind : str, default None
pandas-on-Spark does not allow specifying the sorting algorithm at the moment,
default None
na_position : {‘first’, ‘last’}, default ‘last’
first puts NaNs at the beginning, last puts NaNs at the end. Not implemented for
MultiIndex.
Returns
-------
sorted_obj : Series
Examples
--------
>>> df = ps.Series([2, 1, np.nan], index=['b', 'a', np.nan])
>>> df.sort_index()
a 1.0
b 2.0
NaN NaN
dtype: float64
>>> df.sort_index(ascending=False)
b 2.0
a 1.0
NaN NaN
dtype: float64
>>> df.sort_index(na_position='first')
NaN NaN
a 1.0
b 2.0
dtype: float64
>>> df.sort_index(inplace=True)
>>> df
a 1.0
b 2.0
NaN NaN
dtype: float64
>>> df = ps.Series(range(4), index=[['b', 'b', 'a', 'a'], [1, 0, 1, 0]], name='0')
>>> df.sort_index()
a 0 3
1 2
b 0 1
1 0
Name: 0, dtype: int64
>>> df.sort_index(level=1) # doctest: +SKIP
a 0 3
b 0 1
a 1 2
b 1 0
Name: 0, dtype: int64
>>> df.sort_index(level=[1, 0])
a 0 3
b 0 1
a 1 2
b 1 0
Name: 0, dtype: int64
"""
inplace = validate_bool_kwarg(inplace, "inplace")
psdf = self._psdf[[self.name]].sort_index(
axis=axis, level=level, ascending=ascending, kind=kind, na_position=na_position
)
if inplace:
self._update_anchor(psdf)
return None
else:
return first_series(psdf)
def swaplevel(
self, i: Union[int, Name] = -2, j: Union[int, Name] = -1, copy: bool = True
) -> "Series":
"""
Swap levels i and j in a MultiIndex.
Default is to swap the two innermost levels of the index.
Parameters
----------
i, j : int, str
Level of the indices to be swapped. Can pass level name as string.
copy : bool, default True
Whether to copy underlying data. Must be True.
Returns
-------
Series
Series with levels swapped in MultiIndex.
Examples
--------
>>> midx = pd.MultiIndex.from_arrays([['a', 'b'], [1, 2]], names = ['word', 'number'])
>>> midx # doctest: +SKIP
MultiIndex([('a', 1),
('b', 2)],
names=['word', 'number'])
>>> psser = ps.Series(['x', 'y'], index=midx)
>>> psser
word number
a 1 x
b 2 y
dtype: object
>>> psser.swaplevel()
number word
1 a x
2 b y
dtype: object
>>> psser.swaplevel(0, 1)
number word
1 a x
2 b y
dtype: object
>>> psser.swaplevel('number', 'word')
number word
1 a x
2 b y
dtype: object
"""
assert copy is True
return first_series(self.to_frame().swaplevel(i, j, axis=0)).rename(self.name)
def swapaxes(self, i: Axis, j: Axis, copy: bool = True) -> "Series":
"""
Interchange axes and swap values axes appropriately.
Parameters
----------
i: {0 or 'index', 1 or 'columns'}. The axis to swap.
j: {0 or 'index', 1 or 'columns'}. The axis to swap.
copy : bool, default True.
Returns
-------
Series
Examples
--------
>>> psser = ps.Series([1, 2, 3], index=["x", "y", "z"])
>>> psser
x 1
y 2
z 3
dtype: int64
>>>
>>> psser.swapaxes(0, 0)
x 1
y 2
z 3
dtype: int64
"""
assert copy is True
i = validate_axis(i)
j = validate_axis(j)
if not i == j == 0:
raise ValueError("Axis must be 0 for Series")
return self.copy()
def add_prefix(self, prefix: str) -> "Series":
"""
Prefix labels with string `prefix`.
For Series, the row labels are prefixed.
For DataFrame, the column labels are prefixed.
Parameters
----------
prefix : str
The string to add before each label.
Returns
-------
Series
New Series with updated labels.
See Also
--------
Series.add_suffix: Suffix column labels with string `suffix`.
DataFrame.add_suffix: Suffix column labels with string `suffix`.
DataFrame.add_prefix: Prefix column labels with string `prefix`.
Examples
--------
>>> s = ps.Series([1, 2, 3, 4])
>>> s
0 1
1 2
2 3
3 4
dtype: int64
>>> s.add_prefix('item_')
item_0 1
item_1 2
item_2 3
item_3 4
dtype: int64
"""
assert isinstance(prefix, str)
internal = self._internal.resolved_copy
sdf = internal.spark_frame.select(
[
F.concat(SF.lit(prefix), index_spark_column).alias(index_spark_column_name)
for index_spark_column, index_spark_column_name in zip(
internal.index_spark_columns, internal.index_spark_column_names
)
]
+ internal.data_spark_columns
)
return first_series(
DataFrame(internal.with_new_sdf(sdf, index_fields=([None] * internal.index_level)))
)
def add_suffix(self, suffix: str) -> "Series":
"""
Suffix labels with string suffix.
For Series, the row labels are suffixed.
For DataFrame, the column labels are suffixed.
Parameters
----------
suffix : str
The string to add after each label.
Returns
-------
Series
New Series with updated labels.
See Also
--------
Series.add_prefix: Prefix row labels with string `prefix`.
DataFrame.add_prefix: Prefix column labels with string `prefix`.
DataFrame.add_suffix: Suffix column labels with string `suffix`.
Examples
--------
>>> s = ps.Series([1, 2, 3, 4])
>>> s
0 1
1 2
2 3
3 4
dtype: int64
>>> s.add_suffix('_item')
0_item 1
1_item 2
2_item 3
3_item 4
dtype: int64
"""
assert isinstance(suffix, str)
internal = self._internal.resolved_copy
sdf = internal.spark_frame.select(
[
F.concat(index_spark_column, SF.lit(suffix)).alias(index_spark_column_name)
for index_spark_column, index_spark_column_name in zip(
internal.index_spark_columns, internal.index_spark_column_names
)
]
+ internal.data_spark_columns
)
return first_series(
DataFrame(internal.with_new_sdf(sdf, index_fields=([None] * internal.index_level)))
)
def corr(self, other: "Series", method: str = "pearson") -> float:
"""
Compute correlation with `other` Series, excluding missing values.
Parameters
----------
other : Series
method : {'pearson', 'spearman'}
* pearson : standard correlation coefficient
* spearman : Spearman rank correlation
Returns
-------
correlation : float
Examples
--------
>>> df = ps.DataFrame({'s1': [.2, .0, .6, .2],
... 's2': [.3, .6, .0, .1]})
>>> s1 = df.s1
>>> s2 = df.s2
>>> s1.corr(s2, method='pearson') # doctest: +ELLIPSIS
-0.851064...
>>> s1.corr(s2, method='spearman') # doctest: +ELLIPSIS
-0.948683...
Notes
-----
There are behavior differences between pandas-on-Spark and pandas.
* the `method` argument only accepts 'pearson', 'spearman'
* the data should not contain NaNs. pandas-on-Spark will return an error.
* pandas-on-Spark doesn't support the following argument(s).
* `min_periods` argument is not supported
"""
# This implementation is suboptimal because it computes more than necessary,
# but it should be a start
columns = ["__corr_arg1__", "__corr_arg2__"]
psdf = self._psdf.assign(__corr_arg1__=self, __corr_arg2__=other)[columns]
psdf.columns = columns
c = corr(psdf, method=method)
return c.loc[tuple(columns)]
def nsmallest(self, n: int = 5) -> "Series":
"""
Return the smallest `n` elements.
Parameters
----------
n : int, default 5
Return this many ascending sorted values.
Returns
-------
Series
The `n` smallest values in the Series, sorted in increasing order.
See Also
--------
Series.nlargest: Get the `n` largest elements.
Series.sort_values: Sort Series by values.
Series.head: Return the first `n` rows.
Notes
-----
Faster than ``.sort_values().head(n)`` for small `n` relative to
the size of the ``Series`` object.
In pandas-on-Spark, thanks to Spark's lazy execution and query optimizer,
the two would have same performance.
Examples
--------
>>> data = [1, 2, 3, 4, np.nan ,6, 7, 8]
>>> s = ps.Series(data)
>>> s
0 1.0
1 2.0
2 3.0
3 4.0
4 NaN
5 6.0
6 7.0
7 8.0
dtype: float64
The `n` largest elements where ``n=5`` by default.
>>> s.nsmallest()
0 1.0
1 2.0
2 3.0
3 4.0
5 6.0
dtype: float64
>>> s.nsmallest(3)
0 1.0
1 2.0
2 3.0
dtype: float64
"""
return self.sort_values(ascending=True).head(n)
def nlargest(self, n: int = 5) -> "Series":
"""
Return the largest `n` elements.
Parameters
----------
n : int, default 5
Returns
-------
Series
The `n` largest values in the Series, sorted in decreasing order.
See Also
--------
Series.nsmallest: Get the `n` smallest elements.
Series.sort_values: Sort Series by values.
Series.head: Return the first `n` rows.
Notes
-----
Faster than ``.sort_values(ascending=False).head(n)`` for small `n`
relative to the size of the ``Series`` object.
In pandas-on-Spark, thanks to Spark's lazy execution and query optimizer,
the two would have same performance.
Examples
--------
>>> data = [1, 2, 3, 4, np.nan ,6, 7, 8]
>>> s = ps.Series(data)
>>> s
0 1.0
1 2.0
2 3.0
3 4.0
4 NaN
5 6.0
6 7.0
7 8.0
dtype: float64
The `n` largest elements where ``n=5`` by default.
>>> s.nlargest()
7 8.0
6 7.0
5 6.0
3 4.0
2 3.0
dtype: float64
>>> s.nlargest(n=3)
7 8.0
6 7.0
5 6.0
dtype: float64
"""
return self.sort_values(ascending=False).head(n)
def append(
self, to_append: "Series", ignore_index: bool = False, verify_integrity: bool = False
) -> "Series":
"""
Concatenate two or more Series.
Parameters
----------
to_append : Series or list/tuple of Series
ignore_index : boolean, default False
If True, do not use the index labels.
verify_integrity : boolean, default False
If True, raise Exception on creating index with duplicates
Returns
-------
appended : Series
Examples
--------
>>> s1 = ps.Series([1, 2, 3])
>>> s2 = ps.Series([4, 5, 6])
>>> s3 = ps.Series([4, 5, 6], index=[3,4,5])
>>> s1.append(s2)
0 1
1 2
2 3
0 4
1 5
2 6
dtype: int64
>>> s1.append(s3)
0 1
1 2
2 3
3 4
4 5
5 6
dtype: int64
With ignore_index set to True:
>>> s1.append(s2, ignore_index=True)
0 1
1 2
2 3
3 4
4 5
5 6
dtype: int64
"""
return first_series(
self.to_frame().append(to_append.to_frame(), ignore_index, verify_integrity)
).rename(self.name)
def sample(
self,
n: Optional[int] = None,
frac: Optional[float] = None,
replace: bool = False,
random_state: Optional[int] = None,
) -> "Series":
return first_series(
self.to_frame().sample(n=n, frac=frac, replace=replace, random_state=random_state)
).rename(self.name)
sample.__doc__ = DataFrame.sample.__doc__
@no_type_check
def hist(self, bins=10, **kwds):
return self.plot.hist(bins, **kwds)
hist.__doc__ = PandasOnSparkPlotAccessor.hist.__doc__
def apply(self, func: Callable, args: Sequence[Any] = (), **kwds: Any) -> "Series":
"""
Invoke function on values of Series.
Can be a Python function that only works on the Series.
.. note:: this API executes the function once to infer the type which is
potentially expensive, for instance, when the dataset is created after
aggregations or sorting.
To avoid this, specify return type in ``func``, for instance, as below:
>>> def square(x) -> np.int32:
... return x ** 2
pandas-on-Spark uses return type hint and does not try to infer the type.
Parameters
----------
func : function
Python function to apply. Note that type hint for return type is required.
args : tuple
Positional arguments passed to func after the series value.
**kwds
Additional keyword arguments passed to func.
Returns
-------
Series
See Also
--------
Series.aggregate : Only perform aggregating type operations.
Series.transform : Only perform transforming type operations.
DataFrame.apply : The equivalent function for DataFrame.
Examples
--------
Create a Series with typical summer temperatures for each city.
>>> s = ps.Series([20, 21, 12],
... index=['London', 'New York', 'Helsinki'])
>>> s
London 20
New York 21
Helsinki 12
dtype: int64
Square the values by defining a function and passing it as an
argument to ``apply()``.
>>> def square(x) -> np.int64:
... return x ** 2
>>> s.apply(square)
London 400
New York 441
Helsinki 144
dtype: int64
Define a custom function that needs additional positional
arguments and pass these additional arguments using the
``args`` keyword
>>> def subtract_custom_value(x, custom_value) -> np.int64:
... return x - custom_value
>>> s.apply(subtract_custom_value, args=(5,))
London 15
New York 16
Helsinki 7
dtype: int64
Define a custom function that takes keyword arguments
and pass these arguments to ``apply``
>>> def add_custom_values(x, **kwargs) -> np.int64:
... for month in kwargs:
... x += kwargs[month]
... return x
>>> s.apply(add_custom_values, june=30, july=20, august=25)
London 95
New York 96
Helsinki 87
dtype: int64
Use a function from the Numpy library
>>> def numpy_log(col) -> np.float64:
... return np.log(col)
>>> s.apply(numpy_log)
London 2.995732
New York 3.044522
Helsinki 2.484907
dtype: float64
You can omit the type hint and let pandas-on-Spark infer its type.
>>> s.apply(np.log)
London 2.995732
New York 3.044522
Helsinki 2.484907
dtype: float64
"""
assert callable(func), "the first argument should be a callable function."
try:
spec = inspect.getfullargspec(func)
return_sig = spec.annotations.get("return", None)
should_infer_schema = return_sig is None
except TypeError:
# Falls back to schema inference if it fails to get signature.
should_infer_schema = True
def apply_each(s: Any) -> pd.Series:
return s.apply(func, args=args, **kwds)
if should_infer_schema:
return self.pandas_on_spark._transform_batch(apply_each, None)
else:
sig_return = infer_return_type(func)
if not isinstance(sig_return, ScalarType):
raise ValueError(
"Expected the return type of this function to be of scalar type, "
"but found type {}".format(sig_return)
)
return_type = sig_return
return self.pandas_on_spark._transform_batch(apply_each, return_type)
# TODO: not all arguments are implemented comparing to pandas' for now.
def aggregate(self, func: Union[str, List[str]]) -> Union[Scalar, "Series"]:
"""Aggregate using one or more operations over the specified axis.
Parameters
----------
func : str or a list of str
function name(s) as string apply to series.
Returns
-------
scalar, Series
The return can be:
- scalar : when Series.agg is called with single function
- Series : when Series.agg is called with several functions
Notes
-----
`agg` is an alias for `aggregate`. Use the alias.
See Also
--------
Series.apply : Invoke function on a Series.
Series.transform : Only perform transforming type operations.
Series.groupby : Perform operations over groups.
DataFrame.aggregate : The equivalent function for DataFrame.
Examples
--------
>>> s = ps.Series([1, 2, 3, 4])
>>> s.agg('min')
1
>>> s.agg(['min', 'max']).sort_index()
max 4
min 1
dtype: int64
"""
if isinstance(func, list):
return first_series(self.to_frame().aggregate(func)).rename(self.name)
elif isinstance(func, str):
return getattr(self, func)()
else:
raise TypeError("func must be a string or list of strings")
agg = aggregate
def transpose(self, *args: Any, **kwargs: Any) -> "Series":
"""
Return the transpose, which is by definition self.
Examples
--------
It returns the same object as the transpose of the given series object, which is by
definition self.
>>> s = ps.Series([1, 2, 3])
>>> s
0 1
1 2
2 3
dtype: int64
>>> s.transpose()
0 1
1 2
2 3
dtype: int64
"""
return self.copy()
T = property(transpose)
def transform(
self, func: Union[Callable, List[Callable]], axis: Axis = 0, *args: Any, **kwargs: Any
) -> Union["Series", DataFrame]:
"""
Call ``func`` producing the same type as `self` with transformed values
and that has the same axis length as input.
.. note:: this API executes the function once to infer the type which is
potentially expensive, for instance, when the dataset is created after
aggregations or sorting.
To avoid this, specify return type in ``func``, for instance, as below:
>>> def square(x) -> np.int32:
... return x ** 2
pandas-on-Spark uses return type hint and does not try to infer the type.
Parameters
----------
func : function or list
A function or a list of functions to use for transforming the data.
axis : int, default 0 or 'index'
Can only be set to 0 at the moment.
*args
Positional arguments to pass to `func`.
**kwargs
Keyword arguments to pass to `func`.
Returns
-------
An instance of the same type with `self` that must have the same length as input.
See Also
--------
Series.aggregate : Only perform aggregating type operations.
Series.apply : Invoke function on Series.
DataFrame.transform : The equivalent function for DataFrame.
Examples
--------
>>> s = ps.Series(range(3))
>>> s
0 0
1 1
2 2
dtype: int64
>>> def sqrt(x) -> float:
... return np.sqrt(x)
>>> s.transform(sqrt)
0 0.000000
1 1.000000
2 1.414214
dtype: float64
Even though the resulting instance must have the same length as the
input, it is possible to provide several input functions:
>>> def exp(x) -> float:
... return np.exp(x)
>>> s.transform([sqrt, exp])
sqrt exp
0 0.000000 1.000000
1 1.000000 2.718282
2 1.414214 7.389056
You can omit the type hint and let pandas-on-Spark infer its type.
>>> s.transform([np.sqrt, np.exp])
sqrt exp
0 0.000000 1.000000
1 1.000000 2.718282
2 1.414214 7.389056
"""
axis = validate_axis(axis)
if axis != 0:
raise NotImplementedError('axis should be either 0 or "index" currently.')
if isinstance(func, list):
applied = []
for f in func:
applied.append(self.apply(f, args=args, **kwargs).rename(f.__name__))
internal = self._internal.with_new_columns(applied)
return DataFrame(internal)
else:
return self.apply(func, args=args, **kwargs)
def round(self, decimals: int = 0) -> "Series":
"""
Round each value in a Series to the given number of decimals.
Parameters
----------
decimals : int
Number of decimal places to round to (default: 0).
If decimals is negative, it specifies the number of
positions to the left of the decimal point.
Returns
-------
Series object
See Also
--------
DataFrame.round
Examples
--------
>>> df = ps.Series([0.028208, 0.038683, 0.877076], name='x')
>>> df
0 0.028208
1 0.038683
2 0.877076
Name: x, dtype: float64
>>> df.round(2)
0 0.03
1 0.04
2 0.88
Name: x, dtype: float64
"""
if not isinstance(decimals, int):
raise TypeError("decimals must be an integer")
scol = F.round(self.spark.column, decimals)
return self._with_new_scol(
scol,
field=(
self._internal.data_fields[0].copy(nullable=True)
if not isinstance(self.spark.data_type, DecimalType)
else None
),
)
# TODO: add 'interpolation' parameter.
def quantile(
self, q: Union[float, Iterable[float]] = 0.5, accuracy: int = 10000
) -> Union[Scalar, "Series"]:
"""
Return value at the given quantile.
.. note:: Unlike pandas', the quantile in pandas-on-Spark is an approximated quantile
based upon approximate percentile computation because computing quantile across
a large dataset is extremely expensive.
Parameters
----------
q : float or array-like, default 0.5 (50% quantile)
0 <= q <= 1, the quantile(s) to compute.
accuracy : int, optional
Default accuracy of approximation. Larger value means better accuracy.
The relative error can be deduced by 1.0 / accuracy.
Returns
-------
float or Series
If the current object is a Series and ``q`` is an array, a Series will be
returned where the index is ``q`` and the values are the quantiles, otherwise
a float will be returned.
Examples
--------
>>> s = ps.Series([1, 2, 3, 4, 5])
>>> s.quantile(.5)
3.0
>>> (s + 1).quantile(.5)
4.0
>>> s.quantile([.25, .5, .75])
0.25 2.0
0.50 3.0
0.75 4.0
dtype: float64
>>> (s + 1).quantile([.25, .5, .75])
0.25 3.0
0.50 4.0
0.75 5.0
dtype: float64
"""
if isinstance(q, Iterable):
return first_series(
cast(
"ps.DataFrame",
self.to_frame().quantile(q=q, axis=0, numeric_only=False, accuracy=accuracy),
)
).rename(self.name)
else:
if not isinstance(accuracy, int):
raise TypeError(
"accuracy must be an integer; however, got [%s]" % type(accuracy).__name__
)
if not isinstance(q, float):
raise TypeError(
"q must be a float or an array of floats; however, [%s] found." % type(q)
)
q_float = q
if q_float < 0.0 or q_float > 1.0:
raise ValueError("percentiles should all be in the interval [0, 1].")
def quantile(psser: Series) -> Column:
spark_type = psser.spark.data_type
spark_column = psser.spark.column
if isinstance(spark_type, (BooleanType, NumericType)):
return F.percentile_approx(spark_column.cast(DoubleType()), q_float, accuracy)
else:
raise TypeError(
"Could not convert {} ({}) to numeric".format(
spark_type_to_pandas_dtype(spark_type), spark_type.simpleString()
)
)
return self._reduce_for_stat_function(quantile, name="quantile")
# TODO: add axis, numeric_only, pct, na_option parameter
def rank(self, method: str = "average", ascending: bool = True) -> "Series":
"""
Compute numerical data ranks (1 through n) along axis. Equal values are
assigned a rank that is the average of the ranks of those values.
.. note:: the current implementation of rank uses Spark's Window without
specifying partition specification. This leads to move all data into
single partition in single machine and could cause serious
performance degradation. Avoid this method against very large dataset.
Parameters
----------
method : {'average', 'min', 'max', 'first', 'dense'}
* average: average rank of group
* min: lowest rank in group
* max: highest rank in group
* first: ranks assigned in order they appear in the array
* dense: like 'min', but rank always increases by 1 between groups
ascending : boolean, default True
False for ranks by high (1) to low (N)
Returns
-------
ranks : same type as caller
Examples
--------
>>> s = ps.Series([1, 2, 2, 3], name='A')
>>> s
0 1
1 2
2 2
3 3
Name: A, dtype: int64
>>> s.rank()
0 1.0
1 2.5
2 2.5
3 4.0
Name: A, dtype: float64
If method is set to 'min', it use lowest rank in group.
>>> s.rank(method='min')
0 1.0
1 2.0
2 2.0
3 4.0
Name: A, dtype: float64
If method is set to 'max', it use highest rank in group.
>>> s.rank(method='max')
0 1.0
1 3.0
2 3.0
3 4.0
Name: A, dtype: float64
If method is set to 'first', it is assigned rank in order without groups.
>>> s.rank(method='first')
0 1.0
1 2.0
2 3.0
3 4.0
Name: A, dtype: float64
If method is set to 'dense', it leaves no gaps in group.
>>> s.rank(method='dense')
0 1.0
1 2.0
2 2.0
3 3.0
Name: A, dtype: float64
"""
return self._rank(method, ascending).spark.analyzed
def _rank(
self,
method: str = "average",
ascending: bool = True,
*,
part_cols: Sequence["ColumnOrName"] = (),
) -> "Series":
if method not in ["average", "min", "max", "first", "dense"]:
msg = "method must be one of 'average', 'min', 'max', 'first', 'dense'"
raise ValueError(msg)
if self._internal.index_level > 1:
raise NotImplementedError("rank do not support MultiIndex now")
if ascending:
asc_func = Column.asc
else:
asc_func = Column.desc
if method == "first":
window = (
Window.orderBy(
asc_func(self.spark.column),
asc_func(F.col(NATURAL_ORDER_COLUMN_NAME)),
)
.partitionBy(*part_cols)
.rowsBetween(Window.unboundedPreceding, Window.currentRow)
)
scol = F.row_number().over(window)
elif method == "dense":
window = (
Window.orderBy(asc_func(self.spark.column))
.partitionBy(*part_cols)
.rowsBetween(Window.unboundedPreceding, Window.currentRow)
)
scol = F.dense_rank().over(window)
else:
if method == "average":
stat_func = F.mean
elif method == "min":
stat_func = F.min
elif method == "max":
stat_func = F.max
window1 = (
Window.orderBy(asc_func(self.spark.column))
.partitionBy(*part_cols)
.rowsBetween(Window.unboundedPreceding, Window.currentRow)
)
window2 = Window.partitionBy(
cast("List[ColumnOrName]", [self.spark.column]) + list(part_cols)
).rowsBetween(Window.unboundedPreceding, Window.unboundedFollowing)
scol = stat_func(F.row_number().over(window1)).over(window2)
return self._with_new_scol(scol.cast(DoubleType()))
def filter(
self,
items: Optional[Sequence[Any]] = None,
like: Optional[str] = None,
regex: Optional[str] = None,
axis: Optional[Axis] = None,
) -> "Series":
axis = validate_axis(axis)
if axis == 1:
raise ValueError("Series does not support columns axis.")
return first_series(
self.to_frame().filter(items=items, like=like, regex=regex, axis=axis),
).rename(self.name)
filter.__doc__ = DataFrame.filter.__doc__
def describe(self, percentiles: Optional[List[float]] = None) -> "Series":
return first_series(self.to_frame().describe(percentiles)).rename(self.name)
describe.__doc__ = DataFrame.describe.__doc__
def diff(self, periods: int = 1) -> "Series":
"""
First discrete difference of element.
Calculates the difference of a Series element compared with another element in the
DataFrame (default is the element in the same column of the previous row).
.. note:: the current implementation of diff uses Spark's Window without
specifying partition specification. This leads to move all data into
single partition in single machine and could cause serious
performance degradation. Avoid this method against very large dataset.
Parameters
----------
periods : int, default 1
Periods to shift for calculating difference, accepts negative values.
Returns
-------
diffed : Series
Examples
--------
>>> df = ps.DataFrame({'a': [1, 2, 3, 4, 5, 6],
... 'b': [1, 1, 2, 3, 5, 8],
... 'c': [1, 4, 9, 16, 25, 36]}, columns=['a', 'b', 'c'])
>>> df
a b c
0 1 1 1
1 2 1 4
2 3 2 9
3 4 3 16
4 5 5 25
5 6 8 36
>>> df.b.diff()
0 NaN
1 0.0
2 1.0
3 1.0
4 2.0
5 3.0
Name: b, dtype: float64
Difference with previous value
>>> df.c.diff(periods=3)
0 NaN
1 NaN
2 NaN
3 15.0
4 21.0
5 27.0
Name: c, dtype: float64
Difference with following value
>>> df.c.diff(periods=-1)
0 -3.0
1 -5.0
2 -7.0
3 -9.0
4 -11.0
5 NaN
Name: c, dtype: float64
"""
return self._diff(periods).spark.analyzed
def _diff(self, periods: int, *, part_cols: Sequence["ColumnOrName"] = ()) -> "Series":
if not isinstance(periods, int):
raise TypeError("periods should be an int; however, got [%s]" % type(periods).__name__)
window = (
Window.partitionBy(*part_cols)
.orderBy(NATURAL_ORDER_COLUMN_NAME)
.rowsBetween(-periods, -periods)
)
scol = self.spark.column - F.lag(self.spark.column, periods).over(window)
return self._with_new_scol(scol, field=self._internal.data_fields[0].copy(nullable=True))
def idxmax(self, skipna: bool = True) -> Union[Tuple, Any]:
"""
Return the row label of the maximum value.
If multiple values equal the maximum, the first row label with that
value is returned.
Parameters
----------
skipna : bool, default True
Exclude NA/null values. If the entire Series is NA, the result
will be NA.
Returns
-------
Index
Label of the maximum value.
Raises
------
ValueError
If the Series is empty.
See Also
--------
Series.idxmin : Return index *label* of the first occurrence
of minimum of values.
Examples
--------
>>> s = ps.Series(data=[1, None, 4, 3, 5],
... index=['A', 'B', 'C', 'D', 'E'])
>>> s
A 1.0
B NaN
C 4.0
D 3.0
E 5.0
dtype: float64
>>> s.idxmax()
'E'
If `skipna` is False and there is an NA value in the data,
the function returns ``nan``.
>>> s.idxmax(skipna=False)
nan
In case of multi-index, you get a tuple:
>>> index = pd.MultiIndex.from_arrays([
... ['a', 'a', 'b', 'b'], ['c', 'd', 'e', 'f']], names=('first', 'second'))
>>> s = ps.Series(data=[1, None, 4, 5], index=index)
>>> s
first second
a c 1.0
d NaN
b e 4.0
f 5.0
dtype: float64
>>> s.idxmax()
('b', 'f')
If multiple values equal the maximum, the first row label with that
value is returned.
>>> s = ps.Series([1, 100, 1, 100, 1, 100], index=[10, 3, 5, 2, 1, 8])
>>> s
10 1
3 100
5 1
2 100
1 1
8 100
dtype: int64
>>> s.idxmax()
3
"""
sdf = self._internal.spark_frame
scol = self.spark.column
index_scols = self._internal.index_spark_columns
if skipna:
sdf = sdf.orderBy(scol.desc_nulls_last(), NATURAL_ORDER_COLUMN_NAME)
else:
sdf = sdf.orderBy(scol.desc_nulls_first(), NATURAL_ORDER_COLUMN_NAME)
results = sdf.select([scol] + index_scols).take(1)
if len(results) == 0:
raise ValueError("attempt to get idxmin of an empty sequence")
if results[0][0] is None:
# This will only happens when skipna is False because we will
# place nulls first.
return np.nan
values = list(results[0][1:])
if len(values) == 1:
return values[0]
else:
return tuple(values)
def idxmin(self, skipna: bool = True) -> Union[Tuple, Any]:
"""
Return the row label of the minimum value.
If multiple values equal the minimum, the first row label with that
value is returned.
Parameters
----------
skipna : bool, default True
Exclude NA/null values. If the entire Series is NA, the result
will be NA.
Returns
-------
Index
Label of the minimum value.
Raises
------
ValueError
If the Series is empty.
See Also
--------
Series.idxmax : Return index *label* of the first occurrence
of maximum of values.
Notes
-----
This method is the Series version of ``ndarray.argmin``. This method
returns the label of the minimum, while ``ndarray.argmin`` returns
the position. To get the position, use ``series.values.argmin()``.
Examples
--------
>>> s = ps.Series(data=[1, None, 4, 0],
... index=['A', 'B', 'C', 'D'])
>>> s
A 1.0
B NaN
C 4.0
D 0.0
dtype: float64
>>> s.idxmin()
'D'
If `skipna` is False and there is an NA value in the data,
the function returns ``nan``.
>>> s.idxmin(skipna=False)
nan
In case of multi-index, you get a tuple:
>>> index = pd.MultiIndex.from_arrays([
... ['a', 'a', 'b', 'b'], ['c', 'd', 'e', 'f']], names=('first', 'second'))
>>> s = ps.Series(data=[1, None, 4, 0], index=index)
>>> s
first second
a c 1.0
d NaN
b e 4.0
f 0.0
dtype: float64
>>> s.idxmin()
('b', 'f')
If multiple values equal the minimum, the first row label with that
value is returned.
>>> s = ps.Series([1, 100, 1, 100, 1, 100], index=[10, 3, 5, 2, 1, 8])
>>> s
10 1
3 100
5 1
2 100
1 1
8 100
dtype: int64
>>> s.idxmin()
10
"""
sdf = self._internal.spark_frame
scol = self.spark.column
index_scols = self._internal.index_spark_columns
if skipna:
sdf = sdf.orderBy(scol.asc_nulls_last(), NATURAL_ORDER_COLUMN_NAME)
else:
sdf = sdf.orderBy(scol.asc_nulls_first(), NATURAL_ORDER_COLUMN_NAME)
results = sdf.select([scol] + index_scols).take(1)
if len(results) == 0:
raise ValueError("attempt to get idxmin of an empty sequence")
if results[0][0] is None:
# This will only happens when skipna is False because we will
# place nulls first.
return np.nan
values = list(results[0][1:])
if len(values) == 1:
return values[0]
else:
return tuple(values)
def pop(self, item: Name) -> Union["Series", Scalar]:
"""
Return item and drop from series.
Parameters
----------
item : label
Label of index to be popped.
Returns
-------
Value that is popped from series.
Examples
--------
>>> s = ps.Series(data=np.arange(3), index=['A', 'B', 'C'])
>>> s
A 0
B 1
C 2
dtype: int64
>>> s.pop('A')
0
>>> s
B 1
C 2
dtype: int64
>>> s = ps.Series(data=np.arange(3), index=['A', 'A', 'C'])
>>> s
A 0
A 1
C 2
dtype: int64
>>> s.pop('A')
A 0
A 1
dtype: int64
>>> s
C 2
dtype: int64
Also support for MultiIndex
>>> midx = pd.MultiIndex([['lama', 'cow', 'falcon'],
... ['speed', 'weight', 'length']],
... [[0, 0, 0, 1, 1, 1, 2, 2, 2],
... [0, 1, 2, 0, 1, 2, 0, 1, 2]])
>>> s = ps.Series([45, 200, 1.2, 30, 250, 1.5, 320, 1, 0.3],
... index=midx)
>>> s
lama speed 45.0
weight 200.0
length 1.2
cow speed 30.0
weight 250.0
length 1.5
falcon speed 320.0
weight 1.0
length 0.3
dtype: float64
>>> s.pop('lama')
speed 45.0
weight 200.0
length 1.2
dtype: float64
>>> s
cow speed 30.0
weight 250.0
length 1.5
falcon speed 320.0
weight 1.0
length 0.3
dtype: float64
Also support for MultiIndex with several indexs.
>>> midx = pd.MultiIndex([['a', 'b', 'c'],
... ['lama', 'cow', 'falcon'],
... ['speed', 'weight', 'length']],
... [[0, 0, 0, 0, 0, 0, 1, 1, 1],
... [0, 0, 0, 1, 1, 1, 2, 2, 2],
... [0, 1, 2, 0, 1, 2, 0, 0, 2]]
... )
>>> s = ps.Series([45, 200, 1.2, 30, 250, 1.5, 320, 1, 0.3],
... index=midx)
>>> s
a lama speed 45.0
weight 200.0
length 1.2
cow speed 30.0
weight 250.0
length 1.5
b falcon speed 320.0
speed 1.0
length 0.3
dtype: float64
>>> s.pop(('a', 'lama'))
speed 45.0
weight 200.0
length 1.2
dtype: float64
>>> s
a cow speed 30.0
weight 250.0
length 1.5
b falcon speed 320.0
speed 1.0
length 0.3
dtype: float64
>>> s.pop(('b', 'falcon', 'speed'))
(b, falcon, speed) 320.0
(b, falcon, speed) 1.0
dtype: float64
"""
if not is_name_like_value(item):
raise TypeError("'key' should be string or tuple that contains strings")
if not is_name_like_tuple(item):
item = (item,)
if self._internal.index_level < len(item):
raise KeyError(
"Key length ({}) exceeds index depth ({})".format(
len(item), self._internal.index_level
)
)
internal = self._internal
scols = internal.index_spark_columns[len(item) :] + [self.spark.column]
rows = [internal.spark_columns[level] == index for level, index in enumerate(item)]
sdf = internal.spark_frame.filter(reduce(lambda x, y: x & y, rows)).select(scols)
psdf = self._drop(item)
self._update_anchor(psdf)
if self._internal.index_level == len(item):
# if spark_frame has one column and one data, return data only without frame
pdf = sdf.limit(2).toPandas()
length = len(pdf)
if length == 1:
val = pdf[internal.data_spark_column_names[0]].iloc[0]
if isinstance(self.dtype, CategoricalDtype):
return self.dtype.categories[val]
else:
return val
item_string = name_like_string(item)
sdf = sdf.withColumn(SPARK_DEFAULT_INDEX_NAME, SF.lit(str(item_string)))
internal = InternalFrame(
spark_frame=sdf,
index_spark_columns=[scol_for(sdf, SPARK_DEFAULT_INDEX_NAME)],
column_labels=[self._column_label],
data_fields=[self._internal.data_fields[0]],
)
return first_series(DataFrame(internal))
else:
internal = internal.copy(
spark_frame=sdf,
index_spark_columns=[
scol_for(sdf, col) for col in internal.index_spark_column_names[len(item) :]
],
index_fields=internal.index_fields[len(item) :],
index_names=self._internal.index_names[len(item) :],
data_spark_columns=[scol_for(sdf, internal.data_spark_column_names[0])],
)
return first_series(DataFrame(internal))
def copy(self, deep: bool = True) -> "Series":
"""
Make a copy of this object's indices and data.
Parameters
----------
deep : bool, default True
this parameter is not supported but just dummy parameter to match pandas.
Returns
-------
copy : Series
Examples
--------
>>> s = ps.Series([1, 2], index=["a", "b"])
>>> s
a 1
b 2
dtype: int64
>>> s_copy = s.copy()
>>> s_copy
a 1
b 2
dtype: int64
"""
return first_series(DataFrame(self._internal))
def mode(self, dropna: bool = True) -> "Series":
"""
Return the mode(s) of the dataset.
Always returns Series even if only one value is returned.
Parameters
----------
dropna : bool, default True
Don't consider counts of NaN/NaT.
Returns
-------
Series
Modes of the Series.
Examples
--------
>>> s = ps.Series([0, 0, 1, 1, 1, np.nan, np.nan, np.nan])
>>> s
0 0.0
1 0.0
2 1.0
3 1.0
4 1.0
5 NaN
6 NaN
7 NaN
dtype: float64
>>> s.mode()
0 1.0
dtype: float64
If there are several same modes, all items are shown
>>> s = ps.Series([0, 0, 1, 1, 1, 2, 2, 2, 3, 3, 3,
... np.nan, np.nan, np.nan])
>>> s
0 0.0
1 0.0
2 1.0
3 1.0
4 1.0
5 2.0
6 2.0
7 2.0
8 3.0
9 3.0
10 3.0
11 NaN
12 NaN
13 NaN
dtype: float64
>>> s.mode().sort_values() # doctest: +NORMALIZE_WHITESPACE, +ELLIPSIS
<BLANKLINE>
... 1.0
... 2.0
... 3.0
dtype: float64
With 'dropna' set to 'False', we can also see NaN in the result
>>> s.mode(False).sort_values() # doctest: +NORMALIZE_WHITESPACE, +ELLIPSIS
<BLANKLINE>
... 1.0
... 2.0
... 3.0
... NaN
dtype: float64
"""
ser_count = self.value_counts(dropna=dropna, sort=False)
sdf_count = ser_count._internal.spark_frame
most_value = ser_count.max()
sdf_most_value = sdf_count.filter("count == {}".format(most_value))
sdf = sdf_most_value.select(
F.col(SPARK_DEFAULT_INDEX_NAME).alias(SPARK_DEFAULT_SERIES_NAME)
)
internal = InternalFrame(spark_frame=sdf, index_spark_columns=None, column_labels=[None])
return first_series(DataFrame(internal))
def keys(self) -> "ps.Index":
"""
Return alias for index.
Returns
-------
Index
Index of the Series.
Examples
--------
>>> midx = pd.MultiIndex([['lama', 'cow', 'falcon'],
... ['speed', 'weight', 'length']],
... [[0, 0, 0, 1, 1, 1, 2, 2, 2],
... [0, 1, 2, 0, 1, 2, 0, 1, 2]])
>>> psser = ps.Series([45, 200, 1.2, 30, 250, 1.5, 320, 1, 0.3], index=midx)
>>> psser.keys() # doctest: +SKIP
MultiIndex([( 'lama', 'speed'),
( 'lama', 'weight'),
( 'lama', 'length'),
( 'cow', 'speed'),
( 'cow', 'weight'),
( 'cow', 'length'),
('falcon', 'speed'),
('falcon', 'weight'),
('falcon', 'length')],
)
"""
return self.index
# TODO: introduce 'method', 'limit', 'in_place'; fully support 'regex'
def replace(
self,
to_replace: Optional[Union[Any, List, Tuple, Dict]] = None,
value: Optional[Union[List, Tuple]] = None,
regex: Union[str, bool] = False,
) -> "Series":
"""
Replace values given in to_replace with value.
Values of the Series are replaced with other values dynamically.
.. note:: For partial pattern matching, the replacement is against the whole string,
which is different from pandas'. That's by the nature of underlying Spark API.
Parameters
----------
to_replace : str, list, tuple, dict, Series, int, float, or None
How to find the values that will be replaced.
* numeric, str:
- numeric: numeric values equal to to_replace will be replaced with value
- str: string exactly matching to_replace will be replaced with value
* list of str or numeric:
- if to_replace and value are both lists or tuples, they must be the same length.
- str and numeric rules apply as above.
* dict:
- Dicts can be used to specify different replacement values for different
existing values.
For example, {'a': 'b', 'y': 'z'} replaces the value ‘a’ with ‘b’ and ‘y’
with ‘z’. To use a dict in this way the value parameter should be None.
- For a DataFrame a dict can specify that different values should be replaced
in different columns. For example, {'a': 1, 'b': 'z'} looks for the value 1
in column ‘a’ and the value ‘z’ in column ‘b’ and replaces these values with
whatever is specified in value.
The value parameter should not be None in this case.
You can treat this as a special case of passing two lists except that you are
specifying the column to search in.
See the examples section for examples of each of these.
value : scalar, dict, list, tuple, str default None
Value to replace any values matching to_replace with.
For a DataFrame a dict of values can be used to specify which value to use
for each column (columns not in the dict will not be filled).
Regular expressions, strings and lists or dicts of such objects are also allowed.
regex: bool or str, default False
Whether to interpret to_replace and/or value as regular expressions.
If this is True then to_replace must be a string.
Alternatively, this could be a regular expression in which case to_replace must be None.
Returns
-------
Series
Object after replacement.
Examples
--------
Scalar `to_replace` and `value`
>>> s = ps.Series([0, 1, 2, 3, 4])
>>> s
0 0
1 1
2 2
3 3
4 4
dtype: int64
>>> s.replace(0, 5)
0 5
1 1
2 2
3 3
4 4
dtype: int64
List-like `to_replace`
>>> s.replace([0, 4], 5000)
0 5000
1 1
2 2
3 3
4 5000
dtype: int64
>>> s.replace([1, 2, 3], [10, 20, 30])
0 0
1 10
2 20
3 30
4 4
dtype: int64
Dict-like `to_replace`
>>> s.replace({1: 1000, 2: 2000, 3: 3000, 4: 4000})
0 0
1 1000
2 2000
3 3000
4 4000
dtype: int64
Also support for MultiIndex
>>> midx = pd.MultiIndex([['lama', 'cow', 'falcon'],
... ['speed', 'weight', 'length']],
... [[0, 0, 0, 1, 1, 1, 2, 2, 2],
... [0, 1, 2, 0, 1, 2, 0, 1, 2]])
>>> s = ps.Series([45, 200, 1.2, 30, 250, 1.5, 320, 1, 0.3],
... index=midx)
>>> s
lama speed 45.0
weight 200.0
length 1.2
cow speed 30.0
weight 250.0
length 1.5
falcon speed 320.0
weight 1.0
length 0.3
dtype: float64
>>> s.replace(45, 450)
lama speed 450.0
weight 200.0
length 1.2
cow speed 30.0
weight 250.0
length 1.5
falcon speed 320.0
weight 1.0
length 0.3
dtype: float64
>>> s.replace([45, 30, 320], 500)
lama speed 500.0
weight 200.0
length 1.2
cow speed 500.0
weight 250.0
length 1.5
falcon speed 500.0
weight 1.0
length 0.3
dtype: float64
>>> s.replace({45: 450, 30: 300})
lama speed 450.0
weight 200.0
length 1.2
cow speed 300.0
weight 250.0
length 1.5
falcon speed 320.0
weight 1.0
length 0.3
dtype: float64
Regular expression `to_replace`
>>> psser = ps.Series(['bat', 'foo', 'bait', 'abc', 'bar', 'zoo'])
>>> psser.replace(to_replace=r'^ba.$', value='new', regex=True)
0 new
1 foo
2 bait
3 abc
4 new
5 zoo
dtype: object
>>> psser.replace(value='new', regex=r'^.oo$')
0 bat
1 new
2 bait
3 abc
4 bar
5 new
dtype: object
For partial pattern matching, the replacement is against the whole string
>>> psser.replace('ba', 'xx', regex=True)
0 xx
1 foo
2 xx
3 abc
4 xx
5 zoo
dtype: object
"""
if isinstance(regex, str):
if to_replace is not None:
raise ValueError("'to_replace' must be 'None' if 'regex' is not a bool")
to_replace = regex
regex = True
elif not isinstance(regex, bool):
raise NotImplementedError("'regex' of %s type is not supported" % type(regex).__name__)
elif regex is True:
assert isinstance(
to_replace, str
), "If 'regex' is True then 'to_replace' must be a string"
if to_replace is None:
return self.fillna(method="ffill")
if not isinstance(to_replace, (str, list, tuple, dict, int, float)):
raise TypeError("'to_replace' should be one of str, list, tuple, dict, int, float")
to_replace = list(to_replace) if isinstance(to_replace, tuple) else to_replace
value = list(value) if isinstance(value, tuple) else value
if isinstance(to_replace, list) and isinstance(value, list):
if not len(to_replace) == len(value):
raise ValueError(
"Replacement lists must match in length. Expecting {} got {}".format(
len(to_replace), len(value)
)
)
to_replace = {k: v for k, v in zip(to_replace, value)}
if isinstance(to_replace, dict):
is_start = True
if len(to_replace) == 0:
current = self.spark.column
else:
for to_replace_, value in to_replace.items():
cond = (
(F.isnan(self.spark.column) | self.spark.column.isNull())
if pd.isna(to_replace_)
else (self.spark.column == SF.lit(to_replace_))
)
if is_start:
current = F.when(cond, value)
is_start = False
else:
current = current.when(cond, value)
current = current.otherwise(self.spark.column)
else:
if regex:
# to_replace must be a string
cond = self.spark.column.rlike(to_replace)
else:
cond = self.spark.column.isin(to_replace)
# to_replace may be a scalar
if np.array(pd.isna(to_replace)).any():
cond = cond | F.isnan(self.spark.column) | self.spark.column.isNull()
current = F.when(cond, value).otherwise(self.spark.column)
return self._with_new_scol(current) # TODO: dtype?
def update(self, other: "Series") -> None:
"""
Modify Series in place using non-NA values from passed Series. Aligns on index.
Parameters
----------
other : Series
Examples
--------
>>> from pyspark.pandas.config import set_option, reset_option
>>> set_option("compute.ops_on_diff_frames", True)
>>> s = ps.Series([1, 2, 3])
>>> s.update(ps.Series([4, 5, 6]))
>>> s.sort_index()
0 4
1 5
2 6
dtype: int64
>>> s = ps.Series(['a', 'b', 'c'])
>>> s.update(ps.Series(['d', 'e'], index=[0, 2]))
>>> s.sort_index()
0 d
1 b
2 e
dtype: object
>>> s = ps.Series([1, 2, 3])
>>> s.update(ps.Series([4, 5, 6, 7, 8]))
>>> s.sort_index()
0 4
1 5
2 6
dtype: int64
>>> s = ps.Series([1, 2, 3], index=[10, 11, 12])
>>> s
10 1
11 2
12 3
dtype: int64
>>> s.update(ps.Series([4, 5, 6]))
>>> s.sort_index()
10 1
11 2
12 3
dtype: int64
>>> s.update(ps.Series([4, 5, 6], index=[11, 12, 13]))
>>> s.sort_index()
10 1
11 4
12 5
dtype: int64
If ``other`` contains NaNs the corresponding values are not updated
in the original Series.
>>> s = ps.Series([1, 2, 3])
>>> s.update(ps.Series([4, np.nan, 6]))
>>> s.sort_index()
0 4.0
1 2.0
2 6.0
dtype: float64
>>> reset_option("compute.ops_on_diff_frames")
"""
if not isinstance(other, Series):
raise TypeError("'other' must be a Series")
if same_anchor(self, other):
scol = (
F.when(other.spark.column.isNotNull(), other.spark.column)
.otherwise(self.spark.column)
.alias(self._psdf._internal.spark_column_name_for(self._column_label))
)
internal = self._psdf._internal.with_new_spark_column(
self._column_label, scol # TODO: dtype?
)
self._psdf._update_internal_frame(internal)
else:
combined = combine_frames(self._psdf, other._psdf, how="leftouter")
this_scol = combined["this"]._internal.spark_column_for(self._column_label)
that_scol = combined["that"]._internal.spark_column_for(other._column_label)
scol = (
F.when(that_scol.isNotNull(), that_scol)
.otherwise(this_scol)
.alias(self._psdf._internal.spark_column_name_for(self._column_label))
)
internal = combined["this"]._internal.with_new_spark_column(
self._column_label, scol # TODO: dtype?
)
self._psdf._update_internal_frame(internal.resolved_copy, requires_same_anchor=False)
def where(self, cond: "Series", other: Any = np.nan) -> "Series":
"""
Replace values where the condition is False.
Parameters
----------
cond : boolean Series
Where cond is True, keep the original value. Where False,
replace with corresponding value from other.
other : scalar, Series
Entries where cond is False are replaced with corresponding value from other.
Returns
-------
Series
Examples
--------
>>> from pyspark.pandas.config import set_option, reset_option
>>> set_option("compute.ops_on_diff_frames", True)
>>> s1 = ps.Series([0, 1, 2, 3, 4])
>>> s2 = ps.Series([100, 200, 300, 400, 500])
>>> s1.where(s1 > 0).sort_index()
0 NaN
1 1.0
2 2.0
3 3.0
4 4.0
dtype: float64
>>> s1.where(s1 > 1, 10).sort_index()
0 10
1 10
2 2
3 3
4 4
dtype: int64
>>> s1.where(s1 > 1, s1 + 100).sort_index()
0 100
1 101
2 2
3 3
4 4
dtype: int64
>>> s1.where(s1 > 1, s2).sort_index()
0 100
1 200
2 2
3 3
4 4
dtype: int64
>>> reset_option("compute.ops_on_diff_frames")
"""
assert isinstance(cond, Series)
# We should check the DataFrame from both `cond` and `other`.
should_try_ops_on_diff_frame = not same_anchor(cond, self) or (
isinstance(other, Series) and not same_anchor(other, self)
)
if should_try_ops_on_diff_frame:
# Try to perform it with 'compute.ops_on_diff_frame' option.
psdf = self.to_frame()
tmp_cond_col = verify_temp_column_name(psdf, "__tmp_cond_col__")
tmp_other_col = verify_temp_column_name(psdf, "__tmp_other_col__")
psdf[tmp_cond_col] = cond
psdf[tmp_other_col] = other
# above logic makes a Spark DataFrame looks like below:
# +-----------------+---+----------------+-----------------+
# |__index_level_0__| 0|__tmp_cond_col__|__tmp_other_col__|
# +-----------------+---+----------------+-----------------+
# | 0| 0| false| 100|
# | 1| 1| false| 200|
# | 3| 3| true| 400|
# | 2| 2| true| 300|
# | 4| 4| true| 500|
# +-----------------+---+----------------+-----------------+
condition = (
F.when(
psdf[tmp_cond_col].spark.column,
psdf._psser_for(psdf._internal.column_labels[0]).spark.column,
)
.otherwise(psdf[tmp_other_col].spark.column)
.alias(psdf._internal.data_spark_column_names[0])
)
internal = psdf._internal.with_new_columns(
[condition], column_labels=self._internal.column_labels
)
return first_series(DataFrame(internal))
else:
if isinstance(other, Series):
other = other.spark.column
condition = (
F.when(cond.spark.column, self.spark.column)
.otherwise(other)
.alias(self._internal.data_spark_column_names[0])
)
return self._with_new_scol(condition)
def mask(self, cond: "Series", other: Any = np.nan) -> "Series":
"""
Replace values where the condition is True.
Parameters
----------
cond : boolean Series
Where cond is False, keep the original value. Where True,
replace with corresponding value from other.
other : scalar, Series
Entries where cond is True are replaced with corresponding value from other.
Returns
-------
Series
Examples
--------
>>> from pyspark.pandas.config import set_option, reset_option
>>> set_option("compute.ops_on_diff_frames", True)
>>> s1 = ps.Series([0, 1, 2, 3, 4])
>>> s2 = ps.Series([100, 200, 300, 400, 500])
>>> s1.mask(s1 > 0).sort_index()
0 0.0
1 NaN
2 NaN
3 NaN
4 NaN
dtype: float64
>>> s1.mask(s1 > 1, 10).sort_index()
0 0
1 1
2 10
3 10
4 10
dtype: int64
>>> s1.mask(s1 > 1, s1 + 100).sort_index()
0 0
1 1
2 102
3 103
4 104
dtype: int64
>>> s1.mask(s1 > 1, s2).sort_index()
0 0
1 1
2 300
3 400
4 500
dtype: int64
>>> reset_option("compute.ops_on_diff_frames")
"""
return self.where(~cond, other)
def xs(self, key: Name, level: Optional[int] = None) -> "Series":
"""
Return cross-section from the Series.
This method takes a `key` argument to select data at a particular
level of a MultiIndex.
Parameters
----------
key : label or tuple of label
Label contained in the index, or partially in a MultiIndex.
level : object, defaults to first n levels (n=1 or len(key))
In case of a key partially contained in a MultiIndex, indicate
which levels are used. Levels can be referred by label or position.
Returns
-------
Series
Cross-section from the original Series
corresponding to the selected index levels.
Examples
--------
>>> midx = pd.MultiIndex([['a', 'b', 'c'],
... ['lama', 'cow', 'falcon'],
... ['speed', 'weight', 'length']],
... [[0, 0, 0, 1, 1, 1, 2, 2, 2],
... [0, 0, 0, 1, 1, 1, 2, 2, 2],
... [0, 1, 2, 0, 1, 2, 0, 1, 2]])
>>> s = ps.Series([45, 200, 1.2, 30, 250, 1.5, 320, 1, 0.3],
... index=midx)
>>> s
a lama speed 45.0
weight 200.0
length 1.2
b cow speed 30.0
weight 250.0
length 1.5
c falcon speed 320.0
weight 1.0
length 0.3
dtype: float64
Get values at specified index
>>> s.xs('a')
lama speed 45.0
weight 200.0
length 1.2
dtype: float64
Get values at several indexes
>>> s.xs(('a', 'lama'))
speed 45.0
weight 200.0
length 1.2
dtype: float64
Get values at specified index and level
>>> s.xs('lama', level=1)
a speed 45.0
weight 200.0
length 1.2
dtype: float64
"""
if not isinstance(key, tuple):
key = (key,)
if level is None:
level = 0
internal = self._internal
scols = (
internal.index_spark_columns[:level]
+ internal.index_spark_columns[level + len(key) :]
+ [self.spark.column]
)
rows = [internal.spark_columns[lvl] == index for lvl, index in enumerate(key, level)]
sdf = internal.spark_frame.filter(reduce(lambda x, y: x & y, rows)).select(scols)
if internal.index_level == len(key):
# if spark_frame has one column and one data, return data only without frame
pdf = sdf.limit(2).toPandas()
length = len(pdf)
if length == 1:
return pdf[self._internal.data_spark_column_names[0]].iloc[0]
index_spark_column_names = (
internal.index_spark_column_names[:level]
+ internal.index_spark_column_names[level + len(key) :]
)
index_names = internal.index_names[:level] + internal.index_names[level + len(key) :]
index_fields = internal.index_fields[:level] + internal.index_fields[level + len(key) :]
internal = internal.copy(
spark_frame=sdf,
index_spark_columns=[scol_for(sdf, col) for col in index_spark_column_names],
index_names=index_names,
index_fields=index_fields,
data_spark_columns=[scol_for(sdf, internal.data_spark_column_names[0])],
)
return first_series(DataFrame(internal))
def pct_change(self, periods: int = 1) -> "Series":
"""
Percentage change between the current and a prior element.
.. note:: the current implementation of this API uses Spark's Window without
specifying partition specification. This leads to move all data into
single partition in single machine and could cause serious
performance degradation. Avoid this method against very large dataset.
Parameters
----------
periods : int, default 1
Periods to shift for forming percent change.
Returns
-------
Series
Examples
--------
>>> psser = ps.Series([90, 91, 85], index=[2, 4, 1])
>>> psser
2 90
4 91
1 85
dtype: int64
>>> psser.pct_change()
2 NaN
4 0.011111
1 -0.065934
dtype: float64
>>> psser.sort_index().pct_change()
1 NaN
2 0.058824
4 0.011111
dtype: float64
>>> psser.pct_change(periods=2)
2 NaN
4 NaN
1 -0.055556
dtype: float64
"""
scol = self.spark.column
window = Window.orderBy(NATURAL_ORDER_COLUMN_NAME).rowsBetween(-periods, -periods)
prev_row = F.lag(scol, periods).over(window)
return self._with_new_scol((scol - prev_row) / prev_row).spark.analyzed
def combine_first(self, other: "Series") -> "Series":
"""
Combine Series values, choosing the calling Series's values first.
Parameters
----------
other : Series
The value(s) to be combined with the `Series`.
Returns
-------
Series
The result of combining the Series with the other object.
See Also
--------
Series.combine : Perform elementwise operation on two Series
using a given function.
Notes
-----
Result index will be the union of the two indexes.
Examples
--------
>>> s1 = ps.Series([1, np.nan])
>>> s2 = ps.Series([3, 4])
>>> with ps.option_context("compute.ops_on_diff_frames", True):
... s1.combine_first(s2)
0 1.0
1 4.0
dtype: float64
"""
if not isinstance(other, ps.Series):
raise TypeError("`combine_first` only allows `Series` for parameter `other`")
if same_anchor(self, other):
this = self.spark.column
that = other.spark.column
combined = self._psdf
else:
combined = combine_frames(self._psdf, other._psdf)
this = combined["this"]._internal.spark_column_for(self._column_label)
that = combined["that"]._internal.spark_column_for(other._column_label)
# If `self` has missing value, use value of `other`
cond = F.when(this.isNull(), that).otherwise(this)
# If `self` and `other` come from same frame, the anchor should be kept
if same_anchor(self, other):
return self._with_new_scol(cond) # TODO: dtype?
index_scols = combined._internal.index_spark_columns
sdf = combined._internal.spark_frame.select(
*index_scols, cond.alias(self._internal.data_spark_column_names[0])
).distinct()
internal = self._internal.with_new_sdf(
sdf, index_fields=combined._internal.index_fields, data_fields=[None] # TODO: dtype?
)
return first_series(DataFrame(internal))
def dot(self, other: Union["Series", DataFrame]) -> Union[Scalar, "Series"]:
"""
Compute the dot product between the Series and the columns of other.
This method computes the dot product between the Series and another
one, or the Series and each columns of a DataFrame.
It can also be called using `self @ other` in Python >= 3.5.
.. note:: This API is slightly different from pandas when indexes from both Series
are not aligned and config 'compute.eager_check' is False. pandas raises an exception;
however, pandas-on-Spark just proceeds and performs by ignoring mismatches with NaN
permissively.
>>> pdf1 = pd.Series([1, 2, 3], index=[0, 1, 2])
>>> pdf2 = pd.Series([1, 2, 3], index=[0, 1, 3])
>>> pdf1.dot(pdf2) # doctest: +SKIP
...
ValueError: matrices are not aligned
>>> psdf1 = ps.Series([1, 2, 3], index=[0, 1, 2])
>>> psdf2 = ps.Series([1, 2, 3], index=[0, 1, 3])
>>> with ps.option_context("compute.eager_check", False):
... psdf1.dot(psdf2) # doctest: +SKIP
...
5
Parameters
----------
other : Series, DataFrame.
The other object to compute the dot product with its columns.
Returns
-------
scalar, Series
Return the dot product of the Series and other if other is a
Series, the Series of the dot product of Series and each rows of
other if other is a DataFrame.
Notes
-----
The Series and other has to share the same index if other is a Series
or a DataFrame.
Examples
--------
>>> s = ps.Series([0, 1, 2, 3])
>>> s.dot(s)
14
>>> s @ s
14
>>> psdf = ps.DataFrame({'x': [0, 1, 2, 3], 'y': [0, -1, -2, -3]})
>>> psdf
x y
0 0 0
1 1 -1
2 2 -2
3 3 -3
>>> with ps.option_context("compute.ops_on_diff_frames", True):
... s.dot(psdf)
...
x 14
y -14
dtype: int64
"""
if not same_anchor(self, other):
if get_option("compute.eager_check") and not self.index.sort_values().equals(
other.index.sort_values()
):
raise ValueError("matrices are not aligned")
elif len(self.index) != len(other.index):
raise ValueError("matrices are not aligned")
if isinstance(other, DataFrame):
other_copy: DataFrame = other.copy()
column_labels = other_copy._internal.column_labels
self_column_label = verify_temp_column_name(other_copy, "__self_column__")
other_copy[self_column_label] = self
self_psser = other_copy._psser_for(self_column_label)
product_pssers = [
cast(Series, other_copy._psser_for(label) * self_psser) for label in column_labels
]
dot_product_psser = DataFrame(
other_copy._internal.with_new_columns(product_pssers, column_labels=column_labels)
).sum()
return cast(Series, dot_product_psser).rename(self.name)
else:
assert isinstance(other, Series)
return (self * other).sum()
def __matmul__(self, other: Union["Series", DataFrame]) -> Union[Scalar, "Series"]:
"""
Matrix multiplication using binary `@` operator in Python>=3.5.
"""
return self.dot(other)
def repeat(self, repeats: Union[int, "Series"]) -> "Series":
"""
Repeat elements of a Series.
Returns a new Series where each element of the current Series
is repeated consecutively a given number of times.
Parameters
----------
repeats : int or Series
The number of repetitions for each element. This should be a
non-negative integer. Repeating 0 times will return an empty
Series.
Returns
-------
Series
Newly created Series with repeated elements.
See Also
--------
Index.repeat : Equivalent function for Index.
Examples
--------
>>> s = ps.Series(['a', 'b', 'c'])
>>> s
0 a
1 b
2 c
dtype: object
>>> s.repeat(2)
0 a
1 b
2 c
0 a
1 b
2 c
dtype: object
>>> ps.Series([1, 2, 3]).repeat(0)
Series([], dtype: int64)
"""
if not isinstance(repeats, (int, Series)):
raise TypeError(
"`repeats` argument must be integer or Series, but got {}".format(type(repeats))
)
if isinstance(repeats, Series):
if not same_anchor(self, repeats):
psdf = self.to_frame()
temp_repeats = verify_temp_column_name(psdf, "__temp_repeats__")
psdf[temp_repeats] = repeats
return (
psdf._psser_for(psdf._internal.column_labels[0])
.repeat(psdf[temp_repeats])
.rename(self.name)
)
else:
scol = F.explode(
F.array_repeat(self.spark.column, repeats.astype("int32").spark.column)
).alias(name_like_string(self.name))
sdf = self._internal.spark_frame.select(self._internal.index_spark_columns + [scol])
internal = self._internal.copy(
spark_frame=sdf,
index_spark_columns=[
scol_for(sdf, col) for col in self._internal.index_spark_column_names
],
data_spark_columns=[scol_for(sdf, name_like_string(self.name))],
)
return first_series(DataFrame(internal))
else:
if repeats < 0:
raise ValueError("negative dimensions are not allowed")
psdf = self._psdf[[self.name]]
if repeats == 0:
return first_series(DataFrame(psdf._internal.with_filter(SF.lit(False))))
else:
return first_series(cast("ps.DataFrame", ps.concat([psdf] * repeats)))
def asof(self, where: Union[Any, List]) -> Union[Scalar, "Series"]:
"""
Return the last row(s) without any NaNs before `where`.
The last row (for each element in `where`, if list) without any
NaN is taken.
If there is no good value, NaN is returned.
.. note:: This API is dependent on :meth:`Index.is_monotonic_increasing`
which is expensive.
Parameters
----------
where : index or array-like of indices
Returns
-------
scalar or Series
The return can be:
* scalar : when `self` is a Series and `where` is a scalar
* Series: when `self` is a Series and `where` is an array-like
Return scalar or Series
Notes
-----
Indices are assumed to be sorted. Raises if this is not the case and config
'compute.eager_check' is True. If 'compute.eager_check' is False pandas-on-Spark just
proceeds and performs by ignoring the indeces's order
Examples
--------
>>> s = ps.Series([1, 2, np.nan, 4], index=[10, 20, 30, 40])
>>> s
10 1.0
20 2.0
30 NaN
40 4.0
dtype: float64
A scalar `where`.
>>> s.asof(20)
2.0
For a sequence `where`, a Series is returned. The first value is
NaN, because the first element of `where` is before the first
index value.
>>> s.asof([5, 20]).sort_index()
5 NaN
20 2.0
dtype: float64
Missing values are not considered. The following is ``2.0``, not
NaN, even though NaN is at the index location for ``30``.
>>> s.asof(30)
2.0
>>> s = ps.Series([1, 2, np.nan, 4], index=[10, 30, 20, 40])
>>> with ps.option_context("compute.eager_check", False):
... s.asof(20)
...
1.0
"""
should_return_series = True
if isinstance(self.index, ps.MultiIndex):
raise ValueError("asof is not supported for a MultiIndex")
if isinstance(where, (ps.Index, ps.Series, DataFrame)):
raise ValueError("where cannot be an Index, Series or a DataFrame")
if get_option("compute.eager_check") and not self.index.is_monotonic_increasing:
raise ValueError("asof requires a sorted index")
if not is_list_like(where):
should_return_series = False
where = [where]
internal = self._internal.resolved_copy
index_scol = internal.index_spark_columns[0]
index_type = internal.spark_type_for(index_scol)
spark_column = internal.data_spark_columns[0]
monotonically_increasing_id_column = verify_temp_column_name(
internal.spark_frame, "__monotonically_increasing_id__"
)
cond = [
F.max_by(
spark_column,
F.when(
(index_scol <= SF.lit(index).cast(index_type)) & spark_column.isNotNull()
if pd.notna(index)
# If index is nan and the value of the col is not null
# then return monotonically_increasing_id .This will let max by
# to return last index value , which is the behaviour of pandas
else spark_column.isNotNull(),
monotonically_increasing_id_column,
),
)
for index in where
]
sdf = internal.spark_frame.withColumn(
monotonically_increasing_id_column, F.monotonically_increasing_id()
).select(cond)
if not should_return_series:
with sql_conf({SPARK_CONF_ARROW_ENABLED: False}):
# Disable Arrow to keep row ordering.
result = sdf.limit(1).toPandas().iloc[0, 0]
return result if result is not None else np.nan
# The data is expected to be small so it's fine to transpose/use default index.
with ps.option_context("compute.default_index_type", "distributed", "compute.max_rows", 1):
if len(where) == len(set(where)) and not isinstance(index_type, TimestampType):
psdf: DataFrame = DataFrame(sdf)
psdf.columns = pd.Index(where)
return first_series(psdf.transpose()).rename(self.name)
else:
# If `where` has duplicate items, leverage the pandas directly
# since pandas API on Spark doesn't support the duplicate column name.
pdf: pd.DataFrame = sdf.limit(1).toPandas()
pdf.columns = pd.Index(where)
return first_series(DataFrame(pdf.transpose())).rename(self.name)
def mad(self) -> float:
"""
Return the mean absolute deviation of values.
Examples
--------
>>> s = ps.Series([1, 2, 3, 4])
>>> s
0 1
1 2
2 3
3 4
dtype: int64
>>> s.mad()
1.0
"""
sdf = self._internal.spark_frame
spark_column = self.spark.column
avg = unpack_scalar(sdf.select(F.avg(spark_column)))
mad = unpack_scalar(sdf.select(F.avg(F.abs(spark_column - avg))))
return mad
def unstack(self, level: int = -1) -> DataFrame:
"""
Unstack, a.k.a. pivot, Series with MultiIndex to produce DataFrame.
The level involved will automatically get sorted.
Notes
-----
Unlike pandas, pandas-on-Spark doesn't check whether an index is duplicated or not
because the checking of duplicated index requires scanning whole data which
can be quite expensive.
Parameters
----------
level : int, str, or list of these, default last level
Level(s) to unstack, can pass level name.
Returns
-------
DataFrame
Unstacked Series.
Examples
--------
>>> s = ps.Series([1, 2, 3, 4],
... index=pd.MultiIndex.from_product([['one', 'two'],
... ['a', 'b']]))
>>> s
one a 1
b 2
two a 3
b 4
dtype: int64
>>> s.unstack(level=-1).sort_index()
a b
one 1 2
two 3 4
>>> s.unstack(level=0).sort_index()
one two
a 1 3
b 2 4
"""
if not isinstance(self.index, ps.MultiIndex):
raise ValueError("Series.unstack only support for a MultiIndex")
index_nlevels = self.index.nlevels
if level > 0 and (level > index_nlevels - 1):
raise IndexError(
"Too many levels: Index has only {} levels, not {}".format(index_nlevels, level + 1)
)
elif level < 0 and (level < -index_nlevels):
raise IndexError(
"Too many levels: Index has only {} levels, {} is not a valid level number".format(
index_nlevels, level
)
)
internal = self._internal.resolved_copy
index_map = list(
zip(internal.index_spark_column_names, internal.index_names, internal.index_fields)
)
pivot_col, column_label_names, _ = index_map.pop(level)
index_scol_names, index_names, index_fields = zip(*index_map)
col = internal.data_spark_column_names[0]
sdf = internal.spark_frame
sdf = sdf.groupby(list(index_scol_names)).pivot(pivot_col).agg(F.first(scol_for(sdf, col)))
internal = InternalFrame(
spark_frame=sdf,
index_spark_columns=[scol_for(sdf, col) for col in index_scol_names],
index_names=list(index_names),
index_fields=list(index_fields),
column_label_names=[column_label_names],
)
internal = internal.copy(
data_fields=[
field.copy(dtype=self._internal.data_fields[0].dtype)
for field in internal.data_fields
]
)
return DataFrame(internal)
def item(self) -> Scalar:
"""
Return the first element of the underlying data as a Python scalar.
Returns
-------
scalar
The first element of Series.
Raises
------
ValueError
If the data is not length-1.
Examples
--------
>>> psser = ps.Series([10])
>>> psser.item()
10
"""
return self.head(2)._to_internal_pandas().item()
def iteritems(self) -> Iterable[Tuple[Name, Any]]:
"""
Lazily iterate over (index, value) tuples.
This method returns an iterable tuple (index, value). This is
convenient if you want to create a lazy iterator.
.. note:: Unlike pandas', the iteritems in pandas-on-Spark returns generator rather
zip object
Returns
-------
iterable
Iterable of tuples containing the (index, value) pairs from a
Series.
See Also
--------
DataFrame.items : Iterate over (column name, Series) pairs.
DataFrame.iterrows : Iterate over DataFrame rows as (index, Series) pairs.
Examples
--------
>>> s = ps.Series(['A', 'B', 'C'])
>>> for index, value in s.items():
... print("Index : {}, Value : {}".format(index, value))
Index : 0, Value : A
Index : 1, Value : B
Index : 2, Value : C
"""
internal_index_columns = self._internal.index_spark_column_names
internal_data_column = self._internal.data_spark_column_names[0]
def extract_kv_from_spark_row(row: Row) -> Tuple[Name, Any]:
k = (
row[internal_index_columns[0]]
if len(internal_index_columns) == 1
else tuple(row[c] for c in internal_index_columns)
)
v = row[internal_data_column]
return k, v
for k, v in map(
extract_kv_from_spark_row, self._internal.resolved_copy.spark_frame.toLocalIterator()
):
yield k, v
def items(self) -> Iterable[Tuple[Name, Any]]:
"""This is an alias of ``iteritems``."""
return self.iteritems()
def droplevel(self, level: Union[int, Name, List[Union[int, Name]]]) -> "Series":
"""
Return Series with requested index level(s) removed.
Parameters
----------
level : int, str, or list-like
If a string is given, must be the name of a level
If list-like, elements must be names or positional indexes
of levels.
Returns
-------
Series
Series with requested index level(s) removed.
Examples
--------
>>> psser = ps.Series(
... [1, 2, 3],
... index=pd.MultiIndex.from_tuples(
... [("x", "a"), ("x", "b"), ("y", "c")], names=["level_1", "level_2"]
... ),
... )
>>> psser
level_1 level_2
x a 1
b 2
y c 3
dtype: int64
Removing specific index level by level
>>> psser.droplevel(0)
level_2
a 1
b 2
c 3
dtype: int64
Removing specific index level by name
>>> psser.droplevel("level_2")
level_1
x 1
x 2
y 3
dtype: int64
"""
return first_series(self.to_frame().droplevel(level=level, axis=0)).rename(self.name)
def tail(self, n: int = 5) -> "Series":
"""
Return the last `n` rows.
This function returns last `n` rows from the object based on
position. It is useful for quickly verifying data, for example,
after sorting or appending rows.
For negative values of `n`, this function returns all rows except
the first `n` rows, equivalent to ``df[n:]``.
Parameters
----------
n : int, default 5
Number of rows to select.
Returns
-------
type of caller
The last `n` rows of the caller object.
See Also
--------
DataFrame.head : The first `n` rows of the caller object.
Examples
--------
>>> psser = ps.Series([1, 2, 3, 4, 5])
>>> psser
0 1
1 2
2 3
3 4
4 5
dtype: int64
>>> psser.tail(3) # doctest: +SKIP
2 3
3 4
4 5
dtype: int64
"""
return first_series(self.to_frame().tail(n=n)).rename(self.name)
def explode(self) -> "Series":
"""
Transform each element of a list-like to a row.
Returns
-------
Series
Exploded lists to rows; index will be duplicated for these rows.
See Also
--------
Series.str.split : Split string values on specified separator.
Series.unstack : Unstack, a.k.a. pivot, Series with MultiIndex
to produce DataFrame.
DataFrame.melt : Unpivot a DataFrame from wide format to long format.
DataFrame.explode : Explode a DataFrame from list-like
columns to long format.
Examples
--------
>>> psser = ps.Series([[1, 2, 3], [], [3, 4]])
>>> psser
0 [1, 2, 3]
1 []
2 [3, 4]
dtype: object
>>> psser.explode() # doctest: +SKIP
0 1.0
0 2.0
0 3.0
1 NaN
2 3.0
2 4.0
dtype: float64
"""
if not isinstance(self.spark.data_type, ArrayType):
return self.copy()
scol = F.explode_outer(self.spark.column).alias(name_like_string(self._column_label))
internal = self._internal.with_new_columns([scol], keep_order=False)
return first_series(DataFrame(internal))
def argsort(self) -> "Series":
"""
Return the integer indices that would sort the Series values.
Unlike pandas, the index order is not preserved in the result.
Returns
-------
Series
Positions of values within the sort order with -1 indicating
nan values.
Examples
--------
>>> psser = ps.Series([3, 3, 4, 1, 6, 2, 3, 7, 8, 7, 10])
>>> psser
0 3
1 3
2 4
3 1
4 6
5 2
6 3
7 7
8 8
9 7
10 10
dtype: int64
>>> psser.argsort().sort_index()
0 3
1 5
2 0
3 1
4 6
5 2
6 4
7 7
8 9
9 8
10 10
dtype: int64
"""
notnull = self.loc[self.notnull()]
sdf_for_index = notnull._internal.spark_frame.select(notnull._internal.index_spark_columns)
tmp_join_key = verify_temp_column_name(sdf_for_index, "__tmp_join_key__")
sdf_for_index = InternalFrame.attach_distributed_sequence_column(
sdf_for_index, tmp_join_key
)
# sdf_for_index:
# +----------------+-----------------+
# |__tmp_join_key__|__index_level_0__|
# +----------------+-----------------+
# | 0| 0|
# | 1| 1|
# | 2| 2|
# | 3| 3|
# | 4| 4|
# +----------------+-----------------+
sdf_for_data = notnull._internal.spark_frame.select(
notnull.spark.column.alias("values"), NATURAL_ORDER_COLUMN_NAME
)
sdf_for_data = InternalFrame.attach_distributed_sequence_column(
sdf_for_data, SPARK_DEFAULT_SERIES_NAME
)
# sdf_for_data:
# +---+------+-----------------+
# | 0|values|__natural_order__|
# +---+------+-----------------+
# | 0| 3| 25769803776|
# | 1| 3| 51539607552|
# | 2| 4| 77309411328|
# | 3| 1| 103079215104|
# | 4| 2| 128849018880|
# +---+------+-----------------+
sdf_for_data = sdf_for_data.sort(
scol_for(sdf_for_data, "values"), NATURAL_ORDER_COLUMN_NAME
).drop("values", NATURAL_ORDER_COLUMN_NAME)
tmp_join_key = verify_temp_column_name(sdf_for_data, "__tmp_join_key__")
sdf_for_data = InternalFrame.attach_distributed_sequence_column(sdf_for_data, tmp_join_key)
# sdf_for_index: sdf_for_data:
# +----------------+-----------------+ +----------------+---+
# |__tmp_join_key__|__index_level_0__| |__tmp_join_key__| 0|
# +----------------+-----------------+ +----------------+---+
# | 0| 0| | 0| 3|
# | 1| 1| | 1| 4|
# | 2| 2| | 2| 0|
# | 3| 3| | 3| 1|
# | 4| 4| | 4| 2|
# +----------------+-----------------+ +----------------+---+
sdf = sdf_for_index.join(sdf_for_data, on=tmp_join_key).drop(tmp_join_key)
internal = self._internal.with_new_sdf(
spark_frame=sdf,
data_columns=[SPARK_DEFAULT_SERIES_NAME],
index_fields=[
InternalField(dtype=field.dtype) for field in self._internal.index_fields
],
data_fields=[None],
)
psser = first_series(DataFrame(internal))
return cast(
Series,
ps.concat([psser, self.loc[self.isnull()].spark.transform(lambda _: SF.lit(-1))]),
)
def argmax(self) -> int:
"""
Return int position of the largest value in the Series.
If the maximum is achieved in multiple locations,
the first row position is returned.
Returns
-------
int
Row position of the maximum value.
Examples
--------
Consider dataset containing cereal calories
>>> s = ps.Series({'Corn Flakes': 100.0, 'Almond Delight': 110.0,
... 'Cinnamon Toast Crunch': 120.0, 'Cocoa Puff': 110.0})
>>> s # doctest: +SKIP
Corn Flakes 100.0
Almond Delight 110.0
Cinnamon Toast Crunch 120.0
Cocoa Puff 110.0
dtype: float64
>>> s.argmax() # doctest: +SKIP
2
"""
sdf = self._internal.spark_frame.select(self.spark.column, NATURAL_ORDER_COLUMN_NAME)
max_value = sdf.select(
F.max(scol_for(sdf, self._internal.data_spark_column_names[0])),
F.first(NATURAL_ORDER_COLUMN_NAME),
).head()
if max_value[1] is None:
raise ValueError("attempt to get argmax of an empty sequence")
elif max_value[0] is None:
return -1
# We should remember the natural sequence started from 0
seq_col_name = verify_temp_column_name(sdf, "__distributed_sequence_column__")
sdf = InternalFrame.attach_distributed_sequence_column(
sdf.drop(NATURAL_ORDER_COLUMN_NAME), seq_col_name
)
# If the maximum is achieved in multiple locations, the first row position is returned.
return sdf.filter(
scol_for(sdf, self._internal.data_spark_column_names[0]) == max_value[0]
).head()[0]
def argmin(self) -> int:
"""
Return int position of the smallest value in the Series.
If the minimum is achieved in multiple locations,
the first row position is returned.
Returns
-------
int
Row position of the minimum value.
Examples
--------
Consider dataset containing cereal calories
>>> s = ps.Series({'Corn Flakes': 100.0, 'Almond Delight': 110.0,
... 'Cinnamon Toast Crunch': 120.0, 'Cocoa Puff': 110.0})
>>> s # doctest: +SKIP
Corn Flakes 100.0
Almond Delight 110.0
Cinnamon Toast Crunch 120.0
Cocoa Puff 110.0
dtype: float64
>>> s.argmin() # doctest: +SKIP
0
"""
sdf = self._internal.spark_frame.select(self.spark.column, NATURAL_ORDER_COLUMN_NAME)
min_value = sdf.select(
F.min(scol_for(sdf, self._internal.data_spark_column_names[0])),
F.first(NATURAL_ORDER_COLUMN_NAME),
).head()
if min_value[1] is None:
raise ValueError("attempt to get argmin of an empty sequence")
elif min_value[0] is None:
return -1
# We should remember the natural sequence started from 0
seq_col_name = verify_temp_column_name(sdf, "__distributed_sequence_column__")
sdf = InternalFrame.attach_distributed_sequence_column(
sdf.drop(NATURAL_ORDER_COLUMN_NAME), seq_col_name
)
# If the minimum is achieved in multiple locations, the first row position is returned.
return sdf.filter(
scol_for(sdf, self._internal.data_spark_column_names[0]) == min_value[0]
).head()[0]
def compare(
self, other: "Series", keep_shape: bool = False, keep_equal: bool = False
) -> DataFrame:
"""
Compare to another Series and show the differences.
.. note:: This API is slightly different from pandas when indexes from both Series
are not identical and config 'compute.eager_check' is False. pandas raises an exception;
however, pandas-on-Spark just proceeds and performs by ignoring mismatches.
>>> psser1 = ps.Series([1, 2, 3, 4, 5], index=pd.Index([1, 2, 3, 4, 5]))
>>> psser2 = ps.Series([1, 2, 3, 4, 5], index=pd.Index([1, 2, 4, 3, 6]))
>>> psser1.compare(psser2) # doctest: +SKIP
...
ValueError: Can only compare identically-labeled Series objects
>>> with ps.option_context("compute.eager_check", False):
... psser1.compare(psser2) # doctest: +SKIP
...
self other
3 3.0 4.0
4 4.0 3.0
5 5.0 NaN
6 NaN 5.0
Parameters
----------
other : Series
Object to compare with.
keep_shape : bool, default False
If true, all rows and columns are kept.
Otherwise, only the ones with different values are kept.
keep_equal : bool, default False
If true, the result keeps values that are equal.
Otherwise, equal values are shown as NaNs.
Returns
-------
DataFrame
Notes
-----
Matching NaNs will not appear as a difference.
Examples
--------
>>> from pyspark.pandas.config import set_option, reset_option
>>> set_option("compute.ops_on_diff_frames", True)
>>> s1 = ps.Series(["a", "b", "c", "d", "e"])
>>> s2 = ps.Series(["a", "a", "c", "b", "e"])
Align the differences on columns
>>> s1.compare(s2).sort_index()
self other
1 b a
3 d b
Keep all original rows
>>> s1.compare(s2, keep_shape=True).sort_index()
self other
0 None None
1 b a
2 None None
3 d b
4 None None
Keep all original rows and also all original values
>>> s1.compare(s2, keep_shape=True, keep_equal=True).sort_index()
self other
0 a a
1 b a
2 c c
3 d b
4 e e
>>> reset_option("compute.ops_on_diff_frames")
"""
combined: DataFrame
if same_anchor(self, other):
self_column_label = verify_temp_column_name(other.to_frame(), "__self_column__")
other_column_label = verify_temp_column_name(self.to_frame(), "__other_column__")
combined = DataFrame(
self._internal.with_new_columns(
[self.rename(self_column_label), other.rename(other_column_label)]
)
)
else:
if get_option("compute.eager_check") and not self.index.equals(other.index):
raise ValueError("Can only compare identically-labeled Series objects")
combined = combine_frames(self.to_frame(), other.to_frame())
this_column_label = "self"
that_column_label = "other"
if keep_equal and keep_shape:
combined.columns = pd.Index([this_column_label, that_column_label])
return combined
this_data_scol = combined._internal.data_spark_columns[0]
that_data_scol = combined._internal.data_spark_columns[1]
index_scols = combined._internal.index_spark_columns
sdf = combined._internal.spark_frame
if keep_shape:
this_scol = (
F.when(this_data_scol == that_data_scol, None)
.otherwise(this_data_scol)
.alias(this_column_label)
)
this_field = combined._internal.data_fields[0].copy(
name=this_column_label, nullable=True
)
that_scol = (
F.when(this_data_scol == that_data_scol, None)
.otherwise(that_data_scol)
.alias(that_column_label)
)
that_field = combined._internal.data_fields[1].copy(
name=that_column_label, nullable=True
)
else:
sdf = sdf.filter(~this_data_scol.eqNullSafe(that_data_scol))
this_scol = this_data_scol.alias(this_column_label)
this_field = combined._internal.data_fields[0].copy(name=this_column_label)
that_scol = that_data_scol.alias(that_column_label)
that_field = combined._internal.data_fields[1].copy(name=that_column_label)
sdf = sdf.select(*index_scols, this_scol, that_scol, NATURAL_ORDER_COLUMN_NAME)
internal = InternalFrame(
spark_frame=sdf,
index_spark_columns=[
scol_for(sdf, col) for col in self._internal.index_spark_column_names
],
index_names=self._internal.index_names,
index_fields=combined._internal.index_fields,
column_labels=[(this_column_label,), (that_column_label,)],
data_spark_columns=[scol_for(sdf, this_column_label), scol_for(sdf, that_column_label)],
data_fields=[this_field, that_field],
column_label_names=[None],
)
return DataFrame(internal)
def align(
self,
other: Union[DataFrame, "Series"],
join: str = "outer",
axis: Optional[Axis] = None,
copy: bool = True,
) -> Tuple["Series", Union[DataFrame, "Series"]]:
"""
Align two objects on their axes with the specified join method.
Join method is specified for each axis Index.
Parameters
----------
other : DataFrame or Series
join : {{'outer', 'inner', 'left', 'right'}}, default 'outer'
axis : allowed axis of the other object, default None
Align on index (0), columns (1), or both (None).
copy : bool, default True
Always returns new objects. If copy=False and no reindexing is
required then original objects are returned.
Returns
-------
(left, right) : (Series, type of other)
Aligned objects.
Examples
--------
>>> ps.set_option("compute.ops_on_diff_frames", True)
>>> s1 = ps.Series([7, 8, 9], index=[10, 11, 12])
>>> s2 = ps.Series(["g", "h", "i"], index=[10, 20, 30])
>>> aligned_l, aligned_r = s1.align(s2)
>>> aligned_l.sort_index()
10 7.0
11 8.0
12 9.0
20 NaN
30 NaN
dtype: float64
>>> aligned_r.sort_index()
10 g
11 None
12 None
20 h
30 i
dtype: object
Align with the join type "inner":
>>> aligned_l, aligned_r = s1.align(s2, join="inner")
>>> aligned_l.sort_index()
10 7
dtype: int64
>>> aligned_r.sort_index()
10 g
dtype: object
Align with a DataFrame:
>>> df = ps.DataFrame({"a": [1, 2, 3], "b": ["a", "b", "c"]}, index=[10, 20, 30])
>>> aligned_l, aligned_r = s1.align(df)
>>> aligned_l.sort_index()
10 7.0
11 8.0
12 9.0
20 NaN
30 NaN
dtype: float64
>>> aligned_r.sort_index()
a b
10 1.0 a
11 NaN None
12 NaN None
20 2.0 b
30 3.0 c
>>> ps.reset_option("compute.ops_on_diff_frames")
"""
axis = validate_axis(axis)
if axis == 1:
raise ValueError("Series does not support columns axis.")
self_df = self.to_frame()
left, right = self_df.align(other, join=join, axis=axis, copy=False)
if left is self_df:
left_ser = self
else:
left_ser = first_series(left).rename(self.name)
return (left_ser.copy(), right.copy()) if copy else (left_ser, right)
def between_time(
self,
start_time: Union[datetime.time, str],
end_time: Union[datetime.time, str],
include_start: bool = True,
include_end: bool = True,
axis: Axis = 0,
) -> "Series":
"""
Select values between particular times of the day (example: 9:00-9:30 AM).
By setting ``start_time`` to be later than ``end_time``,
you can get the times that are *not* between the two times.
Parameters
----------
start_time : datetime.time or str
Initial time as a time filter limit.
end_time : datetime.time or str
End time as a time filter limit.
include_start : bool, default True
Whether the start time needs to be included in the result.
include_end : bool, default True
Whether the end time needs to be included in the result.
axis : {0 or 'index', 1 or 'columns'}, default 0
Determine range time on index or columns value.
Returns
-------
Series
Data from the original object filtered to the specified dates range.
Raises
------
TypeError
If the index is not a :class:`DatetimeIndex`
See Also
--------
at_time : Select values at a particular time of the day.
last : Select final periods of time series based on a date offset.
DatetimeIndex.indexer_between_time : Get just the index locations for
values between particular times of the day.
Examples
--------
>>> idx = pd.date_range('2018-04-09', periods=4, freq='1D20min')
>>> psser = ps.Series([1, 2, 3, 4], index=idx)
>>> psser
2018-04-09 00:00:00 1
2018-04-10 00:20:00 2
2018-04-11 00:40:00 3
2018-04-12 01:00:00 4
dtype: int64
>>> psser.between_time('0:15', '0:45')
2018-04-10 00:20:00 2
2018-04-11 00:40:00 3
dtype: int64
"""
return first_series(
self.to_frame().between_time(start_time, end_time, include_start, include_end, axis)
).rename(self.name)
def at_time(
self, time: Union[datetime.time, str], asof: bool = False, axis: Axis = 0
) -> "Series":
"""
Select values at particular time of day (example: 9:30AM).
Parameters
----------
time : datetime.time or str
axis : {0 or 'index', 1 or 'columns'}, default 0
Returns
-------
Series
Raises
------
TypeError
If the index is not a :class:`DatetimeIndex`
See Also
--------
between_time : Select values between particular times of the day.
DatetimeIndex.indexer_at_time : Get just the index locations for
values at particular time of the day.
Examples
--------
>>> idx = pd.date_range('2018-04-09', periods=4, freq='12H')
>>> psser = ps.Series([1, 2, 3, 4], index=idx)
>>> psser
2018-04-09 00:00:00 1
2018-04-09 12:00:00 2
2018-04-10 00:00:00 3
2018-04-10 12:00:00 4
dtype: int64
>>> psser.at_time('12:00')
2018-04-09 12:00:00 2
2018-04-10 12:00:00 4
dtype: int64
"""
return first_series(self.to_frame().at_time(time, asof, axis)).rename(self.name)
def _cum(
self,
func: Callable[[Column], Column],
skipna: bool,
part_cols: Sequence["ColumnOrName"] = (),
ascending: bool = True,
) -> "Series":
# This is used to cummin, cummax, cumsum, etc.
if ascending:
window = (
Window.orderBy(F.asc(NATURAL_ORDER_COLUMN_NAME))
.partitionBy(*part_cols)
.rowsBetween(Window.unboundedPreceding, Window.currentRow)
)
else:
window = (
Window.orderBy(F.desc(NATURAL_ORDER_COLUMN_NAME))
.partitionBy(*part_cols)
.rowsBetween(Window.unboundedPreceding, Window.currentRow)
)
if skipna:
# There is a behavior difference between pandas and PySpark. In case of cummax,
#
# Input:
# A B
# 0 2.0 1.0
# 1 5.0 NaN
# 2 1.0 0.0
# 3 2.0 4.0
# 4 4.0 9.0
#
# pandas:
# A B
# 0 2.0 1.0
# 1 5.0 NaN
# 2 5.0 1.0
# 3 5.0 4.0
# 4 5.0 9.0
#
# PySpark:
# A B
# 0 2.0 1.0
# 1 5.0 1.0
# 2 5.0 1.0
# 3 5.0 4.0
# 4 5.0 9.0
scol = F.when(
# Manually sets nulls given the column defined above.
self.spark.column.isNull(),
SF.lit(None),
).otherwise(func(self.spark.column).over(window))
else:
# Here, we use two Windows.
# One for real data.
# The other one for setting nulls after the first null it meets.
#
# There is a behavior difference between pandas and PySpark. In case of cummax,
#
# Input:
# A B
# 0 2.0 1.0
# 1 5.0 NaN
# 2 1.0 0.0
# 3 2.0 4.0
# 4 4.0 9.0
#
# pandas:
# A B
# 0 2.0 1.0
# 1 5.0 NaN
# 2 5.0 NaN
# 3 5.0 NaN
# 4 5.0 NaN
#
# PySpark:
# A B
# 0 2.0 1.0
# 1 5.0 1.0
# 2 5.0 1.0
# 3 5.0 4.0
# 4 5.0 9.0
scol = F.when(
# By going through with max, it sets True after the first time it meets null.
F.max(self.spark.column.isNull()).over(window),
# Manually sets nulls given the column defined above.
SF.lit(None),
).otherwise(func(self.spark.column).over(window))
return self._with_new_scol(scol)
def _cumsum(self, skipna: bool, part_cols: Sequence["ColumnOrName"] = ()) -> "Series":
psser = self
if isinstance(psser.spark.data_type, BooleanType):
psser = psser.spark.transform(lambda scol: scol.cast(LongType()))
elif not isinstance(psser.spark.data_type, NumericType):
raise TypeError(
"Could not convert {} ({}) to numeric".format(
spark_type_to_pandas_dtype(psser.spark.data_type),
psser.spark.data_type.simpleString(),
)
)
return psser._cum(F.sum, skipna, part_cols)
def _cumprod(self, skipna: bool, part_cols: Sequence["ColumnOrName"] = ()) -> "Series":
if isinstance(self.spark.data_type, BooleanType):
scol = self._cum(
lambda scol: F.min(F.coalesce(scol, SF.lit(True))), skipna, part_cols
).spark.column.cast(LongType())
elif isinstance(self.spark.data_type, NumericType):
num_zeros = self._cum(
lambda scol: F.sum(F.when(scol == 0, 1).otherwise(0)), skipna, part_cols
).spark.column
num_negatives = self._cum(
lambda scol: F.sum(F.when(scol < 0, 1).otherwise(0)), skipna, part_cols
).spark.column
sign = F.when(num_negatives % 2 == 0, 1).otherwise(-1)
abs_prod = F.exp(
self._cum(lambda scol: F.sum(F.log(F.abs(scol))), skipna, part_cols).spark.column
)
scol = F.when(num_zeros > 0, 0).otherwise(sign * abs_prod)
if isinstance(self.spark.data_type, IntegralType):
scol = F.round(scol).cast(LongType())
else:
raise TypeError(
"Could not convert {} ({}) to numeric".format(
spark_type_to_pandas_dtype(self.spark.data_type),
self.spark.data_type.simpleString(),
)
)
return self._with_new_scol(scol)
# ----------------------------------------------------------------------
# Accessor Methods
# ----------------------------------------------------------------------
dt = CachedAccessor("dt", DatetimeMethods)
str = CachedAccessor("str", StringMethods)
cat = CachedAccessor("cat", CategoricalAccessor)
plot = CachedAccessor("plot", PandasOnSparkPlotAccessor)
# ----------------------------------------------------------------------
def _apply_series_op(
self, op: Callable[["Series"], Union["Series", Column]], should_resolve: bool = False
) -> "Series":
psser_or_scol = op(self)
if isinstance(psser_or_scol, Series):
psser = psser_or_scol
else:
psser = self._with_new_scol(psser_or_scol)
if should_resolve:
internal = psser._internal.resolved_copy
return first_series(DataFrame(internal))
else:
return psser.copy()
def _reduce_for_stat_function(
self,
sfun: Callable[["Series"], Column],
name: str_type,
axis: Optional[Axis] = None,
numeric_only: bool = True,
**kwargs: Any,
) -> Scalar:
"""
Applies sfun to the column and returns a scalar
Parameters
----------
sfun : the stats function to be used for aggregation
name : original pandas API name.
axis : used only for sanity check because series only support index axis.
numeric_only : not used by this implementation, but passed down by stats functions
"""
axis = validate_axis(axis)
if axis == 1:
raise NotImplementedError("Series does not support columns axis.")
scol = sfun(self)
min_count = kwargs.get("min_count", 0)
if min_count > 0:
scol = F.when(Frame._count_expr(self) >= min_count, scol)
result = unpack_scalar(self._internal.spark_frame.select(scol))
return result if result is not None else np.nan
# Override the `groupby` to specify the actual return type annotation.
def groupby(
self,
by: Union[Name, "Series", List[Union[Name, "Series"]]],
axis: Axis = 0,
as_index: bool = True,
dropna: bool = True,
) -> "SeriesGroupBy":
return cast(
"SeriesGroupBy", super().groupby(by=by, axis=axis, as_index=as_index, dropna=dropna)
)
groupby.__doc__ = Frame.groupby.__doc__
def _build_groupby(
self, by: List[Union["Series", Label]], as_index: bool, dropna: bool
) -> "SeriesGroupBy":
from pyspark.pandas.groupby import SeriesGroupBy
return SeriesGroupBy._build(self, by, as_index=as_index, dropna=dropna)
def __getitem__(self, key: Any) -> Any:
try:
if (isinstance(key, slice) and any(type(n) == int for n in [key.start, key.stop])) or (
type(key) == int
and not isinstance(self.index.spark.data_type, (IntegerType, LongType))
):
# Seems like pandas Series always uses int as positional search when slicing
# with ints, searches based on index values when the value is int.
return self.iloc[key]
return self.loc[key]
except SparkPandasIndexingError:
raise KeyError(
"Key length ({}) exceeds index depth ({})".format(
len(key), self._internal.index_level
)
)
def __getattr__(self, item: str_type) -> Any:
if item.startswith("__"):
raise AttributeError(item)
if hasattr(MissingPandasLikeSeries, item):
property_or_func = getattr(MissingPandasLikeSeries, item)
if isinstance(property_or_func, property):
return property_or_func.fget(self)
else:
return partial(property_or_func, self)
raise AttributeError("'Series' object has no attribute '{}'".format(item))
def _to_internal_pandas(self) -> pd.Series:
"""
Return a pandas Series directly from _internal to avoid overhead of copy.
This method is for internal use only.
"""
return self._psdf._internal.to_pandas_frame[self.name]
def __repr__(self) -> str_type:
max_display_count = get_option("display.max_rows")
if max_display_count is None:
return self._to_internal_pandas().to_string(
name=bool(self.name), dtype=bool(self.dtype)
)
pser = self._psdf._get_or_create_repr_pandas_cache(max_display_count)[self.name]
pser_length = len(pser)
pser = pser.iloc[:max_display_count]
if pser_length > max_display_count:
repr_string = pser.to_string(length=True)
rest, prev_footer = repr_string.rsplit("\n", 1)
match = REPR_PATTERN.search(prev_footer)
if match is not None:
length = match.group("length")
dtype_name = str(self.dtype.name)
if self.name is None:
footer = "\ndtype: {dtype}\nShowing only the first {length}".format(
length=length, dtype=pprint_thing(dtype_name)
)
else:
footer = (
"\nName: {name}, dtype: {dtype}"
"\nShowing only the first {length}".format(
length=length, name=self.name, dtype=pprint_thing(dtype_name)
)
)
return rest + footer
return pser.to_string(name=self.name, dtype=self.dtype)
def __dir__(self) -> Iterable[str_type]:
if not isinstance(self.spark.data_type, StructType):
fields = []
else:
fields = [f for f in self.spark.data_type.fieldNames() if " " not in f]
return list(super().__dir__()) + fields
def __iter__(self) -> None:
return MissingPandasLikeSeries.__iter__(self)
if sys.version_info >= (3, 7):
# In order to support the type hints such as Series[...]. See DataFrame.__class_getitem__.
def __class_getitem__(cls, params: Any) -> Type[SeriesType]:
return create_type_for_series_type(params)
elif (3, 5) <= sys.version_info < (3, 7):
# The implementation is in its metaclass so this flag is needed to distinguish
# pandas-on-Spark Series.
is_series = None
def unpack_scalar(sdf: SparkDataFrame) -> Any:
"""
Takes a dataframe that is supposed to contain a single row with a single scalar value,
and returns this value.
"""
lst = sdf.limit(2).toPandas()
assert len(lst) == 1, (sdf, lst)
row = lst.iloc[0]
lst2 = list(row)
assert len(lst2) == 1, (row, lst2)
return lst2[0]
@overload
def first_series(df: DataFrame) -> Series:
...
@overload
def first_series(df: pd.DataFrame) -> pd.Series:
...
def first_series(df: Union[DataFrame, pd.DataFrame]) -> Union[Series, pd.Series]:
"""
Takes a DataFrame and returns the first column of the DataFrame as a Series
"""
assert isinstance(df, (DataFrame, pd.DataFrame)), type(df)
if isinstance(df, DataFrame):
return df._psser_for(df._internal.column_labels[0])
else:
return df[df.columns[0]]
def _test() -> None:
import os
import doctest
import sys
from pyspark.sql import SparkSession
import pyspark.pandas.series
os.chdir(os.environ["SPARK_HOME"])
globs = pyspark.pandas.series.__dict__.copy()
globs["ps"] = pyspark.pandas
spark = (
SparkSession.builder.master("local[4]").appName("pyspark.pandas.series tests").getOrCreate()
)
(failure_count, test_count) = doctest.testmod(
pyspark.pandas.series,
globs=globs,
optionflags=doctest.ELLIPSIS | doctest.NORMALIZE_WHITESPACE,
)
spark.stop()
if failure_count:
sys.exit(-1)
if __name__ == "__main__":
_test()
| []
| []
| [
"SPARK_HOME"
]
| [] | ["SPARK_HOME"] | python | 1 | 0 | |
.github/scripts/deploy.py | #!/usr/bin/env python3
# This script does the following.
# 1. Takes in a space separated list of changed files
# 2. For each changed file, adds a header (title) based on the filename
# 3. Sets output for the prepared files to move into the site
import argparse
import os
import json
import re
import sys
import tempfile
def read_file(filename):
with open(filename, "r") as fd:
content = fd.read()
return content
def read_json(filename):
with open(filename, "r") as fd:
content = json.loads(fd.read())
return content
# Templates
draft_template = """---
title: %s
layout: proposal
pr: %s
tags:
- %s
---"""
approved_template = """---
title: %s
layout: proposal
tags:
- %s
---"""
draft_label = os.environ.get("draft_label", "draft")
approved_label = os.environ.get("approved_label", "approved")
def get_parser():
parser = argparse.ArgumentParser(description="Proposals Parsing Client")
description = "Prepare proposal drafts"
subparsers = parser.add_subparsers(
help="actions",
title="actions",
description=description,
dest="command",
)
draft = subparsers.add_parser("draft", help="prepare drafts")
approved = subparsers.add_parser("approved", help="add approved proposals")
remove = subparsers.add_parser("remove", help="remove non-existing proposals")
for command in [draft, approved, remove]:
command.add_argument(
"files", help="the drafts to consider (changed files)", nargs="*"
)
return parser
def get_title(filename):
"""
Convert name-of-markdown.md to Name Of Markdown
"""
basename = os.path.basename(filename)
return " ".join([x.capitalize() for x in basename.split(".", 1)[0].split("-")])
def is_correct(filename):
"""
Formatting and sanity checks
"""
if not os.path.exists(filename):
print("%s does not exist, skipping!" % filename)
return False
dirname = os.path.basename(os.path.dirname(filename))
if dirname != "proposals":
print("%s is not a proposal, skipping." % filename)
return False
# Check that we end in markdown
if not filename.endswith("md"):
print("%s does not end in .md, skipping." % filename)
return False
# and only have lowercase and -
basename = os.path.basename(filename).replace(".md", "")
if not re.search("^[a-z0-9-]*$", basename):
print(
"%s contains invalid characters: only lowercase letters, numbers, and - are allowed!"
% basename
)
return False
return True
def find_removed(files):
"""
Only allow removed on merge into main, so it's approved by owners
"""
removed = []
for filename in files:
if not os.path.exists(filename):
removed.append(filename)
print("::set-output name=removed::%s" % " ".join(removed))
def prepare_preposals(files, template_string, template_tag, with_pr=False):
"""
Generic shared function to prepare proposal files
"""
tmpdir = tempfile.mkdtemp()
final_files = []
for filename in files:
if not is_correct(filename):
continue
# Prepare header
title = get_title(filename)
if with_pr:
pr = PullRequest()
# Default to custom tag on PR or just draft default
template = template_string % (title, pr.url, pr.get_tag() or template_tag)
else:
template = template_string % (title, template_tag)
content = template + "\n\n" + read_file(filename)
# Write to final location
tmppath = os.path.join(tmpdir, os.path.basename(filename))
with open(tmppath, "w") as fd:
fd.write(content)
final_files.append(tmppath)
# When we have final files, set in environment
print("::set-output name=proposals::%s" % " ".join(final_files))
def prepare_approved(files):
"""
Prepare approved (in progress) proposals
"""
prepare_preposals(files, approved_template, approved_label, with_pr=False)
def prepare_drafts(files):
"""
Prepare proposal drafts
"""
prepare_preposals(files, draft_template, draft_label, with_pr=True)
class PullRequest:
"""Helper class to get pull request and labels to indicate status"""
def __init__(self):
from github import Github
self.gh = Github(os.getenv("GITHUB_TOKEN"))
events_path = os.getenv("GITHUB_EVENT_PATH")
self.event = read_json(events_path)
self.repo = self.gh.get_repo(self.repo_name)
self.number = self.event["pull_request"]["number"]
@property
def repo_name(self):
return self.event["repository"]["full_name"]
@property
def url(self):
return "https://github.com/%s/pull/%s" % (self.repo_name, self.number)
def get_tag(self):
pr = self.repo.get_pull(self.number)
# Return the first status we find
for label in pr.get_labels():
if label.name.startswith("status-"):
name = label.name.replace("status-", "").strip()
return name
def main():
parser = get_parser()
def help(return_code=0):
parser.print_help()
sys.exit(return_code)
# If an error occurs while parsing the arguments, the interpreter will exit with value 2
args, extra = parser.parse_known_args()
if not args.command:
help()
print(args.files)
# Prepare drafts
if args.command == "draft":
prepare_drafts(args.files)
elif args.command == "approved":
prepare_approved(args.files)
elif args.command == "remove":
find_removed(args.files)
if __name__ == "__main__":
main()
| []
| []
| [
"GITHUB_EVENT_PATH",
"draft_label",
"GITHUB_TOKEN",
"approved_label"
]
| [] | ["GITHUB_EVENT_PATH", "draft_label", "GITHUB_TOKEN", "approved_label"] | python | 4 | 0 | |
src/terraform-resource/check/check_backend_test.go | package check_test
import (
"io/ioutil"
"os"
"os/exec"
"path"
"terraform-resource/check"
"terraform-resource/models"
"terraform-resource/test/helpers"
. "github.com/onsi/ginkgo"
. "github.com/onsi/gomega"
)
var _ = Describe("Check with Terraform Backend", func() {
var (
checkInput models.InRequest
bucket string
prevEnvName string
currEnvName string
pathToPrevS3Fixture string
pathToCurrS3Fixture string
awsVerifier *helpers.AWSVerifier
workingDir string
workspacePath string
expectedLineage = "f62eee11-6a4e-4d39-b5c7-15d3dad8e5f7"
)
BeforeEach(func() {
accessKey := os.Getenv("AWS_ACCESS_KEY")
Expect(accessKey).ToNot(BeEmpty(), "AWS_ACCESS_KEY must be set")
secretKey := os.Getenv("AWS_SECRET_KEY")
Expect(secretKey).ToNot(BeEmpty(), "AWS_SECRET_KEY must be set")
bucket = os.Getenv("AWS_BUCKET")
Expect(bucket).ToNot(BeEmpty(), "AWS_BUCKET must be set")
bucketPath := os.Getenv("AWS_BUCKET_SUBFOLDER")
Expect(bucketPath).ToNot(BeEmpty(), "AWS_BUCKET_SUBFOLDER must be set")
region := os.Getenv("AWS_REGION") // optional
if region == "" {
region = "us-east-1"
}
awsVerifier = helpers.NewAWSVerifier(
accessKey,
secretKey,
region,
"",
)
var err error
workingDir, err = ioutil.TempDir(os.TempDir(), "terraform-resource-check-backend-test")
Expect(err).ToNot(HaveOccurred())
// ensure relative paths resolve correctly
err = os.Chdir(workingDir)
Expect(err).ToNot(HaveOccurred())
fixturesDir := path.Join(helpers.ProjectRoot(), "fixtures")
err = exec.Command("cp", "-r", fixturesDir, workingDir).Run()
Expect(err).ToNot(HaveOccurred())
workspacePath = helpers.RandomString("check-backend-test")
prevEnvName = "s3-test-fixture-previous"
currEnvName = "s3-test-fixture-current"
pathToPrevS3Fixture = path.Join(workspacePath, prevEnvName, "terraform.tfstate")
pathToCurrS3Fixture = path.Join(workspacePath, currEnvName, "terraform.tfstate")
checkInput = models.InRequest{
Source: models.Source{
Terraform: models.Terraform{
BackendType: "s3",
BackendConfig: map[string]interface{}{
"bucket": bucket,
"key": "terraform.tfstate",
"access_key": accessKey,
"secret_key": secretKey,
"region": region,
"workspace_key_prefix": workspacePath,
},
},
},
}
})
AfterEach(func() {
_ = os.RemoveAll(workingDir)
awsVerifier.DeleteObjectFromS3(bucket, pathToPrevS3Fixture)
awsVerifier.DeleteObjectFromS3(bucket, pathToCurrS3Fixture)
})
Context("when bucket is empty", func() {
It("returns an empty version list", func() {
runner := check.Runner{}
resp, err := runner.Run(checkInput)
Expect(err).ToNot(HaveOccurred())
expectedOutput := []models.Version{}
Expect(resp).To(Equal(expectedOutput))
})
})
Context("when bucket contains multiple state files", func() {
BeforeEach(func() {
prevFixture, err := os.Open(helpers.FileLocation("fixtures/s3-backend/terraform-previous.tfstate"))
Expect(err).ToNot(HaveOccurred())
defer prevFixture.Close()
awsVerifier.UploadObjectToS3(bucket, pathToPrevS3Fixture, prevFixture)
currFixture, err := os.Open(helpers.FileLocation("fixtures/s3-backend/terraform-current.tfstate"))
Expect(err).ToNot(HaveOccurred())
defer currFixture.Close()
awsVerifier.UploadObjectToS3(bucket, pathToCurrS3Fixture, currFixture)
})
Context("when watching a single env with `source.env_name`", func() {
BeforeEach(func() {
checkInput.Source.EnvName = currEnvName
})
It("returns the latest version from the backend when no version is given", func() {
runner := check.Runner{}
resp, err := runner.Run(checkInput)
Expect(err).ToNot(HaveOccurred())
expectOutput := []models.Version{
models.Version{
Serial: "1",
EnvName: currEnvName,
Lineage: expectedLineage,
},
}
Expect(resp).To(Equal(expectOutput))
})
It("returns the latest version when the given version matches latest version", func() {
checkInput.Version = models.Version{
Serial: "1",
EnvName: currEnvName,
Lineage: expectedLineage,
}
runner := check.Runner{}
resp, err := runner.Run(checkInput)
Expect(err).ToNot(HaveOccurred())
expectOutput := []models.Version{
models.Version{
Serial: "1",
EnvName: currEnvName,
Lineage: expectedLineage,
},
}
Expect(resp).To(Equal(expectOutput))
})
It("returns the latest version when the given version has a lower serial number", func() {
checkInput.Version = models.Version{
Serial: "0",
EnvName: currEnvName,
Lineage: expectedLineage,
}
runner := check.Runner{}
resp, err := runner.Run(checkInput)
Expect(err).ToNot(HaveOccurred())
expectOutput := []models.Version{
models.Version{
Serial: "1",
EnvName: currEnvName,
Lineage: expectedLineage,
},
}
Expect(resp).To(Equal(expectOutput))
})
It("returns an empty version list when the given version has a higher serial number", func() {
checkInput.Version = models.Version{
Serial: "2",
EnvName: currEnvName,
Lineage: expectedLineage,
}
runner := check.Runner{}
resp, err := runner.Run(checkInput)
Expect(err).ToNot(HaveOccurred())
expectOutput := []models.Version{}
Expect(resp).To(Equal(expectOutput))
})
It("sorts the serial numerically", func() {
checkInput.Version = models.Version{
Serial: "10",
EnvName: currEnvName,
Lineage: expectedLineage,
}
runner := check.Runner{}
resp, err := runner.Run(checkInput)
Expect(err).ToNot(HaveOccurred())
expectOutput := []models.Version{}
Expect(resp).To(Equal(expectOutput))
})
It("returns the latest version when the given lineage has changed", func() {
checkInput.Version = models.Version{
Serial: "2",
EnvName: currEnvName,
Lineage: "aaaaaaaa-aaaa-aaaa-aaaa-aaaaaaaaaaaa",
}
runner := check.Runner{}
resp, err := runner.Run(checkInput)
Expect(err).ToNot(HaveOccurred())
expectOutput := []models.Version{
models.Version{
Serial: "1",
EnvName: currEnvName,
Lineage: expectedLineage,
},
}
Expect(resp).To(Equal(expectOutput))
})
It("returns the latest version when the lineage is omitted", func() {
checkInput.Version = models.Version{
Serial: "2",
EnvName: currEnvName,
Lineage: "",
}
runner := check.Runner{}
resp, err := runner.Run(checkInput)
Expect(err).ToNot(HaveOccurred())
expectOutput := []models.Version{
models.Version{
Serial: "1",
EnvName: currEnvName,
Lineage: expectedLineage,
},
}
Expect(resp).To(Equal(expectOutput))
})
It("can run twice in a row", func() {
runner := check.Runner{}
expectOutput := []models.Version{
models.Version{
Serial: "1",
EnvName: currEnvName,
Lineage: expectedLineage,
},
}
resp, err := runner.Run(checkInput)
Expect(err).ToNot(HaveOccurred())
Expect(resp).To(Equal(expectOutput))
resp, err = runner.Run(checkInput)
Expect(err).ToNot(HaveOccurred())
Expect(resp).To(Equal(expectOutput))
})
})
Context("when watching a multiple envs with `source.env_name` unset", func() {
BeforeEach(func() {
checkInput.Source.EnvName = ""
})
It("returns an empty version list when no version is given", func() {
runner := check.Runner{}
resp, err := runner.Run(checkInput)
Expect(err).ToNot(HaveOccurred())
expectOutput := []models.Version{}
Expect(resp).To(Equal(expectOutput))
})
It("returns the latest version when the given version matches latest version", func() {
checkInput.Version = models.Version{
Serial: "1",
EnvName: currEnvName,
Lineage: expectedLineage,
}
runner := check.Runner{}
resp, err := runner.Run(checkInput)
Expect(err).ToNot(HaveOccurred())
expectOutput := []models.Version{
models.Version{
Serial: "1",
EnvName: currEnvName,
Lineage: expectedLineage,
},
}
Expect(resp).To(Equal(expectOutput))
})
})
})
Context("when 'default' workspace contains custom plugins", func() {
var pathToDefaultS3Fixture string
BeforeEach(func() {
// S3 backend ignores workspace_key_prefix/key for 'default' workspace.
// Unfortunately this makes this test vulnerable to test pollution.
pathToDefaultS3Fixture = "terraform.tfstate"
defaultFixture, err := os.Open(helpers.FileLocation("fixtures/custom-plugin-backend/terraform.tfstate"))
Expect(err).ToNot(HaveOccurred())
defer defaultFixture.Close()
awsVerifier.UploadObjectToS3(bucket, pathToDefaultS3Fixture, defaultFixture)
currFixture, err := os.Open(helpers.FileLocation("fixtures/s3-backend/terraform-current.tfstate"))
Expect(err).ToNot(HaveOccurred())
defer currFixture.Close()
awsVerifier.UploadObjectToS3(bucket, pathToCurrS3Fixture, currFixture)
checkInput.Source.EnvName = currEnvName
})
AfterEach(func() {
awsVerifier.DeleteObjectFromS3(bucket, pathToDefaultS3Fixture)
})
It("returns the latest version without trying to download plugins", func() {
runner := check.Runner{}
resp, err := runner.Run(checkInput)
Expect(err).ToNot(HaveOccurred())
expectOutput := []models.Version{
models.Version{
Serial: "1",
EnvName: currEnvName,
Lineage: expectedLineage,
},
}
Expect(resp).To(Equal(expectOutput))
})
})
})
| [
"\"AWS_ACCESS_KEY\"",
"\"AWS_SECRET_KEY\"",
"\"AWS_BUCKET\"",
"\"AWS_BUCKET_SUBFOLDER\"",
"\"AWS_REGION\""
]
| []
| [
"AWS_REGION",
"AWS_BUCKET",
"AWS_BUCKET_SUBFOLDER",
"AWS_ACCESS_KEY",
"AWS_SECRET_KEY"
]
| [] | ["AWS_REGION", "AWS_BUCKET", "AWS_BUCKET_SUBFOLDER", "AWS_ACCESS_KEY", "AWS_SECRET_KEY"] | go | 5 | 0 | |
tests/settings.py | import os
from oscar.defaults import * # noqa
# Path helper
location = lambda x: os.path.join(
os.path.dirname(os.path.realpath(__file__)), x)
ALLOWED_HOSTS = ['test', '.oscarcommerce.com']
DATABASES = {
'default': {
'ENGINE': os.environ.get('DATABASE_ENGINE',
'django.db.backends.postgresql'),
'NAME': os.environ.get('DATABASE_NAME', 'oscar'),
'USER': os.environ.get('DATABASE_USER', None),
'PASSWORD': os.environ.get('DATABASE_USER', None),
}
}
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.staticfiles',
'django.contrib.sites',
'django.contrib.flatpages',
'oscar',
'oscar.apps.analytics',
'oscar.apps.checkout',
'oscar.apps.address',
'oscar.apps.shipping',
'oscar.apps.catalogue',
'oscar.apps.catalogue.reviews',
'oscar.apps.partner',
'oscar.apps.basket',
'oscar.apps.payment',
'oscar.apps.offer',
'oscar.apps.order',
'oscar.apps.customer',
'oscar.apps.search',
'oscar.apps.voucher',
'oscar.apps.wishlists',
'oscar.apps.dashboard',
'oscar.apps.dashboard.reports',
'oscar.apps.dashboard.users',
'oscar.apps.dashboard.orders',
'oscar.apps.dashboard.catalogue',
'oscar.apps.dashboard.offers',
'oscar.apps.dashboard.partners',
'oscar.apps.dashboard.pages',
'oscar.apps.dashboard.ranges',
'oscar.apps.dashboard.reviews',
'oscar.apps.dashboard.vouchers',
'oscar.apps.dashboard.communications',
'oscar.apps.dashboard.shipping',
# 3rd-party apps that oscar depends on
'widget_tweaks',
'haystack',
'treebeard',
'sorl.thumbnail',
'easy_thumbnails',
'django_tables2',
# Contains models we need for testing
'tests._site.model_tests_app',
'tests._site.myauth',
]
# Use a custom partner app to test overriding models. I can't find a way of
# doing this on a per-test basis, so I'm using a global change.
INSTALLED_APPS[INSTALLED_APPS.index('oscar.apps.partner')] = 'tests._site.apps.partner'
INSTALLED_APPS[INSTALLED_APPS.index('oscar.apps.customer')] = 'tests._site.apps.customer'
INSTALLED_APPS[INSTALLED_APPS.index('oscar.apps.catalogue')] = 'tests._site.apps.catalogue'
AUTH_USER_MODEL = 'myauth.User'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
location('_site/templates'),
],
'OPTIONS': {
'loaders': [
('django.template.loaders.cached.Loader', [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
]),
],
'context_processors': [
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.request',
'django.template.context_processors.debug',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.contrib.messages.context_processors.messages',
'oscar.apps.search.context_processors.search_form',
'oscar.apps.customer.notifications.context_processors.notifications',
'oscar.apps.checkout.context_processors.checkout',
'oscar.core.context_processors.metadata',
]
}
}
]
MIDDLEWARE = [
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'oscar.apps.basket.middleware.BasketMiddleware',
]
AUTHENTICATION_BACKENDS = (
'oscar.apps.customer.auth_backends.EmailBackend',
'django.contrib.auth.backends.ModelBackend',
)
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
'OPTIONS': {
'min_length': 6,
}
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
HAYSTACK_CONNECTIONS = {'default': {'ENGINE': 'haystack.backends.simple_backend.SimpleEngine'}}
PASSWORD_HASHERS = ['django.contrib.auth.hashers.MD5PasswordHasher']
ROOT_URLCONF = 'tests._site.urls'
LOGIN_REDIRECT_URL = '/accounts/'
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
PUBLIC_ROOT = location('public')
MEDIA_ROOT = os.path.join(PUBLIC_ROOT, 'media')
DEBUG = False
SITE_ID = 1
USE_TZ = 1
APPEND_SLASH = True
DDF_DEFAULT_DATA_FIXTURE = 'tests.dynamic_fixtures.OscarDynamicDataFixtureClass'
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer'
LANGUAGE_CODE = 'en-gb'
OSCAR_INITIAL_ORDER_STATUS = 'A'
OSCAR_ORDER_STATUS_PIPELINE = {'A': ('B',), 'B': ()}
OSCAR_INITIAL_LINE_STATUS = 'a'
OSCAR_LINE_STATUS_PIPELINE = {'a': ('b', ), 'b': ()}
SECRET_KEY = 'notverysecret'
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
FIXTURE_DIRS = [location('unit/fixtures')]
| []
| []
| [
"DATABASE_ENGINE",
"DATABASE_NAME",
"DATABASE_USER"
]
| [] | ["DATABASE_ENGINE", "DATABASE_NAME", "DATABASE_USER"] | python | 3 | 0 | |
Godeps/_workspace/src/k8s.io/kubernetes/test/integration/persistent_volumes_test.go | // +build integration,!no-etcd
/*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package integration
import (
"fmt"
"math/rand"
"net/http/httptest"
"os"
"strconv"
"testing"
"time"
"github.com/golang/glog"
"k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/api/resource"
"k8s.io/kubernetes/pkg/api/testapi"
"k8s.io/kubernetes/pkg/api/unversioned"
clientset "k8s.io/kubernetes/pkg/client/clientset_generated/internalclientset"
"k8s.io/kubernetes/pkg/client/restclient"
fake_cloud "k8s.io/kubernetes/pkg/cloudprovider/providers/fake"
persistentvolumecontroller "k8s.io/kubernetes/pkg/controller/volume/persistentvolume"
"k8s.io/kubernetes/pkg/conversion"
"k8s.io/kubernetes/pkg/volume"
volumetest "k8s.io/kubernetes/pkg/volume/testing"
"k8s.io/kubernetes/pkg/watch"
"k8s.io/kubernetes/test/integration/framework"
)
func init() {
requireEtcd()
}
// Several tests in this file are configurable by environment variables:
// KUBE_INTEGRATION_PV_OBJECTS - nr. of PVs/PVCs to be created
// (100 by default)
// KUBE_INTEGRATION_PV_SYNC_PERIOD - volume controller sync period
// (10s by default)
// KUBE_INTEGRATION_PV_END_SLEEP - for how long should
// TestPersistentVolumeMultiPVsPVCs sleep when it's finished (0s by
// default). This is useful to test how long does it take for periodic sync
// to process bound PVs/PVCs.
//
const defaultObjectCount = 100
const defaultSyncPeriod = 10 * time.Second
func getObjectCount() int {
objectCount := defaultObjectCount
if s := os.Getenv("KUBE_INTEGRATION_PV_OBJECTS"); s != "" {
var err error
objectCount, err = strconv.Atoi(s)
if err != nil {
glog.Fatalf("cannot parse value of KUBE_INTEGRATION_PV_OBJECTS: %v", err)
}
}
glog.V(2).Infof("using KUBE_INTEGRATION_PV_OBJECTS=%d", objectCount)
return objectCount
}
func getSyncPeriod() time.Duration {
period := defaultSyncPeriod
if s := os.Getenv("KUBE_INTEGRATION_PV_SYNC_PERIOD"); s != "" {
var err error
period, err = time.ParseDuration(s)
if err != nil {
glog.Fatalf("cannot parse value of KUBE_INTEGRATION_PV_SYNC_PERIOD: %v", err)
}
}
glog.V(2).Infof("using KUBE_INTEGRATION_PV_SYNC_PERIOD=%v", period)
return period
}
func testSleep() {
var period time.Duration
if s := os.Getenv("KUBE_INTEGRATION_PV_END_SLEEP"); s != "" {
var err error
period, err = time.ParseDuration(s)
if err != nil {
glog.Fatalf("cannot parse value of KUBE_INTEGRATION_PV_END_SLEEP: %v", err)
}
}
glog.V(2).Infof("using KUBE_INTEGRATION_PV_END_SLEEP=%v", period)
if period != 0 {
time.Sleep(period)
glog.V(2).Infof("sleep finished")
}
}
func TestPersistentVolumeRecycler(t *testing.T) {
glog.V(2).Infof("TestPersistentVolumeRecycler started")
_, s := framework.RunAMaster(nil)
defer s.Close()
ns := framework.CreateTestingNamespace("pv-recycler", s, t)
defer framework.DeleteTestingNamespace(ns, s, t)
testClient, ctrl, watchPV, watchPVC := createClients(ns, t, s)
defer watchPV.Stop()
defer watchPVC.Stop()
// NOTE: This test cannot run in parallel, because it is creating and deleting
// non-namespaced objects (PersistenceVolumes).
defer testClient.Core().PersistentVolumes().DeleteCollection(nil, api.ListOptions{})
ctrl.Run()
defer ctrl.Stop()
// This PV will be claimed, released, and recycled.
pv := createPV("fake-pv-recycler", "/tmp/foo", "10G", []api.PersistentVolumeAccessMode{api.ReadWriteOnce}, api.PersistentVolumeReclaimRecycle)
pvc := createPVC("fake-pvc-recycler", ns.Name, "5G", []api.PersistentVolumeAccessMode{api.ReadWriteOnce})
_, err := testClient.PersistentVolumes().Create(pv)
if err != nil {
t.Errorf("Failed to create PersistentVolume: %v", err)
}
glog.V(2).Infof("TestPersistentVolumeRecycler pvc created")
_, err = testClient.PersistentVolumeClaims(ns.Name).Create(pvc)
if err != nil {
t.Errorf("Failed to create PersistentVolumeClaim: %v", err)
}
glog.V(2).Infof("TestPersistentVolumeRecycler pvc created")
// wait until the controller pairs the volume and claim
waitForPersistentVolumePhase(testClient, pv.Name, watchPV, api.VolumeBound)
glog.V(2).Infof("TestPersistentVolumeRecycler pv bound")
waitForPersistentVolumeClaimPhase(testClient, pvc.Name, ns.Name, watchPVC, api.ClaimBound)
glog.V(2).Infof("TestPersistentVolumeRecycler pvc bound")
// deleting a claim releases the volume, after which it can be recycled
if err := testClient.PersistentVolumeClaims(ns.Name).Delete(pvc.Name, nil); err != nil {
t.Errorf("error deleting claim %s", pvc.Name)
}
glog.V(2).Infof("TestPersistentVolumeRecycler pvc deleted")
waitForPersistentVolumePhase(testClient, pv.Name, watchPV, api.VolumeReleased)
glog.V(2).Infof("TestPersistentVolumeRecycler pv released")
waitForPersistentVolumePhase(testClient, pv.Name, watchPV, api.VolumeAvailable)
glog.V(2).Infof("TestPersistentVolumeRecycler pv available")
}
func TestPersistentVolumeDeleter(t *testing.T) {
glog.V(2).Infof("TestPersistentVolumeDeleter started")
_, s := framework.RunAMaster(nil)
defer s.Close()
ns := framework.CreateTestingNamespace("pv-deleter", s, t)
defer framework.DeleteTestingNamespace(ns, s, t)
testClient, ctrl, watchPV, watchPVC := createClients(ns, t, s)
defer watchPV.Stop()
defer watchPVC.Stop()
// NOTE: This test cannot run in parallel, because it is creating and deleting
// non-namespaced objects (PersistenceVolumes).
defer testClient.Core().PersistentVolumes().DeleteCollection(nil, api.ListOptions{})
ctrl.Run()
defer ctrl.Stop()
// This PV will be claimed, released, and deleted.
pv := createPV("fake-pv-deleter", "/tmp/foo", "10G", []api.PersistentVolumeAccessMode{api.ReadWriteOnce}, api.PersistentVolumeReclaimDelete)
pvc := createPVC("fake-pvc-deleter", ns.Name, "5G", []api.PersistentVolumeAccessMode{api.ReadWriteOnce})
_, err := testClient.PersistentVolumes().Create(pv)
if err != nil {
t.Errorf("Failed to create PersistentVolume: %v", err)
}
glog.V(2).Infof("TestPersistentVolumeDeleter pv created")
_, err = testClient.PersistentVolumeClaims(ns.Name).Create(pvc)
if err != nil {
t.Errorf("Failed to create PersistentVolumeClaim: %v", err)
}
glog.V(2).Infof("TestPersistentVolumeDeleter pvc created")
waitForPersistentVolumePhase(testClient, pv.Name, watchPV, api.VolumeBound)
glog.V(2).Infof("TestPersistentVolumeDeleter pv bound")
waitForPersistentVolumeClaimPhase(testClient, pvc.Name, ns.Name, watchPVC, api.ClaimBound)
glog.V(2).Infof("TestPersistentVolumeDeleter pvc bound")
// deleting a claim releases the volume, after which it can be recycled
if err := testClient.PersistentVolumeClaims(ns.Name).Delete(pvc.Name, nil); err != nil {
t.Errorf("error deleting claim %s", pvc.Name)
}
glog.V(2).Infof("TestPersistentVolumeDeleter pvc deleted")
waitForPersistentVolumePhase(testClient, pv.Name, watchPV, api.VolumeReleased)
glog.V(2).Infof("TestPersistentVolumeDeleter pv released")
for {
event := <-watchPV.ResultChan()
if event.Type == watch.Deleted {
break
}
}
glog.V(2).Infof("TestPersistentVolumeDeleter pv deleted")
}
func TestPersistentVolumeBindRace(t *testing.T) {
// Test a race binding many claims to a PV that is pre-bound to a specific
// PVC. Only this specific PVC should get bound.
glog.V(2).Infof("TestPersistentVolumeBindRace started")
_, s := framework.RunAMaster(nil)
defer s.Close()
ns := framework.CreateTestingNamespace("pv-bind-race", s, t)
defer framework.DeleteTestingNamespace(ns, s, t)
testClient, ctrl, watchPV, watchPVC := createClients(ns, t, s)
defer watchPV.Stop()
defer watchPVC.Stop()
// NOTE: This test cannot run in parallel, because it is creating and deleting
// non-namespaced objects (PersistenceVolumes).
defer testClient.Core().PersistentVolumes().DeleteCollection(nil, api.ListOptions{})
ctrl.Run()
defer ctrl.Stop()
pv := createPV("fake-pv-race", "/tmp/foo", "10G", []api.PersistentVolumeAccessMode{api.ReadWriteOnce}, api.PersistentVolumeReclaimRetain)
pvc := createPVC("fake-pvc-race", ns.Name, "5G", []api.PersistentVolumeAccessMode{api.ReadWriteOnce})
counter := 0
maxClaims := 100
claims := []*api.PersistentVolumeClaim{}
for counter <= maxClaims {
counter += 1
clone, _ := conversion.NewCloner().DeepCopy(pvc)
newPvc, _ := clone.(*api.PersistentVolumeClaim)
newPvc.ObjectMeta = api.ObjectMeta{Name: fmt.Sprintf("fake-pvc-race-%d", counter)}
claim, err := testClient.PersistentVolumeClaims(ns.Name).Create(newPvc)
if err != nil {
t.Fatal("Error creating newPvc: %v", err)
}
claims = append(claims, claim)
}
glog.V(2).Infof("TestPersistentVolumeBindRace claims created")
// putting a bind manually on a pv should only match the claim it is bound to
rand.Seed(time.Now().Unix())
claim := claims[rand.Intn(maxClaims-1)]
claimRef, err := api.GetReference(claim)
if err != nil {
t.Fatalf("Unexpected error getting claimRef: %v", err)
}
pv.Spec.ClaimRef = claimRef
pv.Spec.ClaimRef.UID = ""
pv, err = testClient.PersistentVolumes().Create(pv)
if err != nil {
t.Fatalf("Unexpected error creating pv: %v", err)
}
glog.V(2).Infof("TestPersistentVolumeBindRace pv created, pre-bound to %s", claim.Name)
waitForPersistentVolumePhase(testClient, pv.Name, watchPV, api.VolumeBound)
glog.V(2).Infof("TestPersistentVolumeBindRace pv bound")
waitForAnyPersistentVolumeClaimPhase(watchPVC, api.ClaimBound)
glog.V(2).Infof("TestPersistentVolumeBindRace pvc bound")
pv, err = testClient.PersistentVolumes().Get(pv.Name)
if err != nil {
t.Fatalf("Unexpected error getting pv: %v", err)
}
if pv.Spec.ClaimRef == nil {
t.Fatalf("Unexpected nil claimRef")
}
if pv.Spec.ClaimRef.Namespace != claimRef.Namespace || pv.Spec.ClaimRef.Name != claimRef.Name {
t.Fatalf("Bind mismatch! Expected %s/%s but got %s/%s", claimRef.Namespace, claimRef.Name, pv.Spec.ClaimRef.Namespace, pv.Spec.ClaimRef.Name)
}
}
// TestPersistentVolumeClaimLabelSelector test binding using label selectors
func TestPersistentVolumeClaimLabelSelector(t *testing.T) {
_, s := framework.RunAMaster(nil)
defer s.Close()
ns := framework.CreateTestingNamespace("pvc-label-selector", s, t)
defer framework.DeleteTestingNamespace(ns, s, t)
testClient, controller, watchPV, watchPVC := createClients(ns, t, s)
defer watchPV.Stop()
defer watchPVC.Stop()
// NOTE: This test cannot run in parallel, because it is creating and deleting
// non-namespaced objects (PersistenceVolumes).
defer testClient.Core().PersistentVolumes().DeleteCollection(nil, api.ListOptions{})
controller.Run()
defer controller.Stop()
var (
err error
modes = []api.PersistentVolumeAccessMode{api.ReadWriteOnce}
reclaim = api.PersistentVolumeReclaimRetain
pv_true = createPV("pv-true", "/tmp/foo-label", "1G", modes, reclaim)
pv_false = createPV("pv-false", "/tmp/foo-label", "1G", modes, reclaim)
pvc = createPVC("pvc-ls-1", ns.Name, "1G", modes)
)
pv_true.ObjectMeta.SetLabels(map[string]string{"foo": "true"})
pv_false.ObjectMeta.SetLabels(map[string]string{"foo": "false"})
_, err = testClient.PersistentVolumes().Create(pv_true)
if err != nil {
t.Fatalf("Failed to create PersistentVolume: %v", err)
}
_, err = testClient.PersistentVolumes().Create(pv_false)
if err != nil {
t.Fatalf("Failed to create PersistentVolume: %v", err)
}
t.Log("volumes created")
pvc.Spec.Selector = &unversioned.LabelSelector{
MatchLabels: map[string]string{
"foo": "true",
},
}
_, err = testClient.PersistentVolumeClaims(ns.Name).Create(pvc)
if err != nil {
t.Fatalf("Failed to create PersistentVolumeClaim: %v", err)
}
t.Log("claim created")
waitForAnyPersistentVolumePhase(watchPV, api.VolumeBound)
t.Log("volume bound")
waitForPersistentVolumeClaimPhase(testClient, pvc.Name, ns.Name, watchPVC, api.ClaimBound)
t.Log("claim bound")
pv, err := testClient.PersistentVolumes().Get("pv-false")
if err != nil {
t.Fatalf("Unexpected error getting pv: %v", err)
}
if pv.Spec.ClaimRef != nil {
t.Fatalf("False PV shouldn't be bound")
}
pv, err = testClient.PersistentVolumes().Get("pv-true")
if err != nil {
t.Fatalf("Unexpected error getting pv: %v", err)
}
if pv.Spec.ClaimRef == nil {
t.Fatalf("True PV should be bound")
}
if pv.Spec.ClaimRef.Namespace != pvc.Namespace || pv.Spec.ClaimRef.Name != pvc.Name {
t.Fatalf("Bind mismatch! Expected %s/%s but got %s/%s", pvc.Namespace, pvc.Name, pv.Spec.ClaimRef.Namespace, pv.Spec.ClaimRef.Name)
}
}
// TestPersistentVolumeClaimLabelSelectorMatchExpressions test binding using
// MatchExpressions label selectors
func TestPersistentVolumeClaimLabelSelectorMatchExpressions(t *testing.T) {
_, s := framework.RunAMaster(nil)
defer s.Close()
ns := framework.CreateTestingNamespace("pvc-match-expresssions", s, t)
defer framework.DeleteTestingNamespace(ns, s, t)
testClient, controller, watchPV, watchPVC := createClients(ns, t, s)
defer watchPV.Stop()
defer watchPVC.Stop()
// NOTE: This test cannot run in parallel, because it is creating and deleting
// non-namespaced objects (PersistenceVolumes).
defer testClient.Core().PersistentVolumes().DeleteCollection(nil, api.ListOptions{})
controller.Run()
defer controller.Stop()
var (
err error
modes = []api.PersistentVolumeAccessMode{api.ReadWriteOnce}
reclaim = api.PersistentVolumeReclaimRetain
pv_true = createPV("pv-true", "/tmp/foo-label", "1G", modes, reclaim)
pv_false = createPV("pv-false", "/tmp/foo-label", "1G", modes, reclaim)
pvc = createPVC("pvc-ls-1", ns.Name, "1G", modes)
)
pv_true.ObjectMeta.SetLabels(map[string]string{"foo": "valA", "bar": ""})
pv_false.ObjectMeta.SetLabels(map[string]string{"foo": "valB", "baz": ""})
_, err = testClient.PersistentVolumes().Create(pv_true)
if err != nil {
t.Fatalf("Failed to create PersistentVolume: %v", err)
}
_, err = testClient.PersistentVolumes().Create(pv_false)
if err != nil {
t.Fatalf("Failed to create PersistentVolume: %v", err)
}
t.Log("volumes created")
pvc.Spec.Selector = &unversioned.LabelSelector{
MatchExpressions: []unversioned.LabelSelectorRequirement{
{
Key: "foo",
Operator: unversioned.LabelSelectorOpIn,
Values: []string{"valA"},
},
{
Key: "foo",
Operator: unversioned.LabelSelectorOpNotIn,
Values: []string{"valB"},
},
{
Key: "bar",
Operator: unversioned.LabelSelectorOpExists,
Values: []string{},
},
{
Key: "baz",
Operator: unversioned.LabelSelectorOpDoesNotExist,
Values: []string{},
},
},
}
_, err = testClient.PersistentVolumeClaims(ns.Name).Create(pvc)
if err != nil {
t.Fatalf("Failed to create PersistentVolumeClaim: %v", err)
}
t.Log("claim created")
waitForAnyPersistentVolumePhase(watchPV, api.VolumeBound)
t.Log("volume bound")
waitForPersistentVolumeClaimPhase(testClient, pvc.Name, ns.Name, watchPVC, api.ClaimBound)
t.Log("claim bound")
pv, err := testClient.PersistentVolumes().Get("pv-false")
if err != nil {
t.Fatalf("Unexpected error getting pv: %v", err)
}
if pv.Spec.ClaimRef != nil {
t.Fatalf("False PV shouldn't be bound")
}
pv, err = testClient.PersistentVolumes().Get("pv-true")
if err != nil {
t.Fatalf("Unexpected error getting pv: %v", err)
}
if pv.Spec.ClaimRef == nil {
t.Fatalf("True PV should be bound")
}
if pv.Spec.ClaimRef.Namespace != pvc.Namespace || pv.Spec.ClaimRef.Name != pvc.Name {
t.Fatalf("Bind mismatch! Expected %s/%s but got %s/%s", pvc.Namespace, pvc.Name, pv.Spec.ClaimRef.Namespace, pv.Spec.ClaimRef.Name)
}
}
// TestPersistentVolumeMultiPVs tests binding of one PVC to 100 PVs with
// different size.
func TestPersistentVolumeMultiPVs(t *testing.T) {
_, s := framework.RunAMaster(nil)
defer s.Close()
ns := framework.CreateTestingNamespace("multi-pvs", s, t)
defer framework.DeleteTestingNamespace(ns, s, t)
testClient, controller, watchPV, watchPVC := createClients(ns, t, s)
defer watchPV.Stop()
defer watchPVC.Stop()
// NOTE: This test cannot run in parallel, because it is creating and deleting
// non-namespaced objects (PersistenceVolumes).
defer testClient.Core().PersistentVolumes().DeleteCollection(nil, api.ListOptions{})
controller.Run()
defer controller.Stop()
maxPVs := getObjectCount()
pvs := make([]*api.PersistentVolume, maxPVs)
for i := 0; i < maxPVs; i++ {
// This PV will be claimed, released, and deleted
pvs[i] = createPV("pv-"+strconv.Itoa(i), "/tmp/foo"+strconv.Itoa(i), strconv.Itoa(i)+"G",
[]api.PersistentVolumeAccessMode{api.ReadWriteOnce}, api.PersistentVolumeReclaimRetain)
}
pvc := createPVC("pvc-2", ns.Name, strconv.Itoa(maxPVs/2)+"G", []api.PersistentVolumeAccessMode{api.ReadWriteOnce})
for i := 0; i < maxPVs; i++ {
_, err := testClient.PersistentVolumes().Create(pvs[i])
if err != nil {
t.Errorf("Failed to create PersistentVolume %d: %v", i, err)
}
waitForPersistentVolumePhase(testClient, pvs[i].Name, watchPV, api.VolumeAvailable)
}
t.Log("volumes created")
_, err := testClient.PersistentVolumeClaims(ns.Name).Create(pvc)
if err != nil {
t.Errorf("Failed to create PersistentVolumeClaim: %v", err)
}
t.Log("claim created")
// wait until the binder pairs the claim with a volume
waitForAnyPersistentVolumePhase(watchPV, api.VolumeBound)
t.Log("volume bound")
waitForPersistentVolumeClaimPhase(testClient, pvc.Name, ns.Name, watchPVC, api.ClaimBound)
t.Log("claim bound")
// only one PV is bound
bound := 0
for i := 0; i < maxPVs; i++ {
pv, err := testClient.PersistentVolumes().Get(pvs[i].Name)
if err != nil {
t.Fatalf("Unexpected error getting pv: %v", err)
}
if pv.Spec.ClaimRef == nil {
continue
}
// found a bounded PV
p := pv.Spec.Capacity[api.ResourceStorage]
pvCap := p.Value()
expectedCap := resource.MustParse(strconv.Itoa(maxPVs/2) + "G")
expectedCapVal := expectedCap.Value()
if pv.Spec.ClaimRef.Name != pvc.Name || pvCap != expectedCapVal {
t.Fatalf("Bind mismatch! Expected %s capacity %d but got %s capacity %d", pvc.Name, expectedCapVal, pv.Spec.ClaimRef.Name, pvCap)
}
t.Logf("claim bounded to %s capacity %v", pv.Name, pv.Spec.Capacity[api.ResourceStorage])
bound += 1
}
t.Log("volumes checked")
if bound != 1 {
t.Fatalf("Only 1 PV should be bound but got %d", bound)
}
// deleting a claim releases the volume
if err := testClient.PersistentVolumeClaims(ns.Name).Delete(pvc.Name, nil); err != nil {
t.Errorf("error deleting claim %s", pvc.Name)
}
t.Log("claim deleted")
waitForAnyPersistentVolumePhase(watchPV, api.VolumeReleased)
t.Log("volumes released")
}
// TestPersistentVolumeMultiPVsPVCs tests binding of 100 PVC to 100 PVs.
// This test is configurable by KUBE_INTEGRATION_PV_* variables.
func TestPersistentVolumeMultiPVsPVCs(t *testing.T) {
_, s := framework.RunAMaster(nil)
defer s.Close()
ns := framework.CreateTestingNamespace("multi-pvs-pvcs", s, t)
defer framework.DeleteTestingNamespace(ns, s, t)
testClient, binder, watchPV, watchPVC := createClients(ns, t, s)
defer watchPV.Stop()
defer watchPVC.Stop()
// NOTE: This test cannot run in parallel, because it is creating and deleting
// non-namespaced objects (PersistenceVolumes).
defer testClient.Core().PersistentVolumes().DeleteCollection(nil, api.ListOptions{})
binder.Run()
defer binder.Stop()
objCount := getObjectCount()
pvs := make([]*api.PersistentVolume, objCount)
pvcs := make([]*api.PersistentVolumeClaim, objCount)
for i := 0; i < objCount; i++ {
// This PV will be claimed, released, and deleted
pvs[i] = createPV("pv-"+strconv.Itoa(i), "/tmp/foo"+strconv.Itoa(i), "1G",
[]api.PersistentVolumeAccessMode{api.ReadWriteOnce}, api.PersistentVolumeReclaimRetain)
pvcs[i] = createPVC("pvc-"+strconv.Itoa(i), ns.Name, "1G", []api.PersistentVolumeAccessMode{api.ReadWriteOnce})
}
// Create PVs first
glog.V(2).Infof("TestPersistentVolumeMultiPVsPVCs: start")
// Create the volumes in a separate goroutine to pop events from
// watchPV early - it seems it has limited capacity and it gets stuck
// with >3000 volumes.
go func() {
for i := 0; i < objCount; i++ {
_, _ = testClient.PersistentVolumes().Create(pvs[i])
}
}()
// Wait for them to get Available
for i := 0; i < objCount; i++ {
waitForAnyPersistentVolumePhase(watchPV, api.VolumeAvailable)
glog.V(1).Infof("%d volumes available", i+1)
}
glog.V(2).Infof("TestPersistentVolumeMultiPVsPVCs: volumes are Available")
// Create the claims, again in a separate goroutine.
go func() {
for i := 0; i < objCount; i++ {
_, _ = testClient.PersistentVolumeClaims(ns.Name).Create(pvcs[i])
}
}()
// wait until the binder pairs all volumes
for i := 0; i < objCount; i++ {
waitForAnyPersistentVolumeClaimPhase(watchPVC, api.ClaimBound)
glog.V(1).Infof("%d claims bound", i+1)
}
glog.V(2).Infof("TestPersistentVolumeMultiPVsPVCs: claims are bound")
// check that everything is bound to something
for i := 0; i < objCount; i++ {
pv, err := testClient.PersistentVolumes().Get(pvs[i].Name)
if err != nil {
t.Fatalf("Unexpected error getting pv: %v", err)
}
if pv.Spec.ClaimRef == nil {
t.Fatalf("PV %q is not bound", pv.Name)
}
glog.V(2).Infof("PV %q is bound to PVC %q", pv.Name, pv.Spec.ClaimRef.Name)
pvc, err := testClient.PersistentVolumeClaims(ns.Name).Get(pvcs[i].Name)
if err != nil {
t.Fatalf("Unexpected error getting pvc: %v", err)
}
if pvc.Spec.VolumeName == "" {
t.Fatalf("PVC %q is not bound", pvc.Name)
}
glog.V(2).Infof("PVC %q is bound to PV %q", pvc.Name, pvc.Spec.VolumeName)
}
testSleep()
}
// TestPersistentVolumeProvisionMultiPVCs tests provisioning of many PVCs.
// This test is configurable by KUBE_INTEGRATION_PV_* variables.
func TestPersistentVolumeProvisionMultiPVCs(t *testing.T) {
_, s := framework.RunAMaster(nil)
defer s.Close()
ns := framework.CreateTestingNamespace("provision-multi-pvs", s, t)
defer framework.DeleteTestingNamespace(ns, s, t)
testClient, binder, watchPV, watchPVC := createClients(ns, t, s)
defer watchPV.Stop()
defer watchPVC.Stop()
// NOTE: This test cannot run in parallel, because it is creating and deleting
// non-namespaced objects (PersistenceVolumes).
defer testClient.Core().PersistentVolumes().DeleteCollection(nil, api.ListOptions{})
binder.Run()
defer binder.Stop()
objCount := getObjectCount()
pvcs := make([]*api.PersistentVolumeClaim, objCount)
for i := 0; i < objCount; i++ {
pvc := createPVC("pvc-provision-"+strconv.Itoa(i), ns.Name, "1G", []api.PersistentVolumeAccessMode{api.ReadWriteOnce})
pvc.Annotations = map[string]string{
"volume.alpha.kubernetes.io/storage-class": "",
}
pvcs[i] = pvc
}
glog.V(2).Infof("TestPersistentVolumeProvisionMultiPVCs: start")
// Create the claims in a separate goroutine to pop events from watchPVC
// early. It gets stuck with >3000 claims.
go func() {
for i := 0; i < objCount; i++ {
_, _ = testClient.PersistentVolumeClaims(ns.Name).Create(pvcs[i])
}
}()
// Wait until the controller provisions and binds all of them
for i := 0; i < objCount; i++ {
waitForAnyPersistentVolumeClaimPhase(watchPVC, api.ClaimBound)
glog.V(1).Infof("%d claims bound", i+1)
}
glog.V(2).Infof("TestPersistentVolumeProvisionMultiPVCs: claims are bound")
// check that we have enough bound PVs
pvList, err := testClient.PersistentVolumes().List(api.ListOptions{})
if err != nil {
t.Fatalf("Failed to list volumes: %s", err)
}
if len(pvList.Items) != objCount {
t.Fatalf("Expected to get %d volumes, got %d", objCount, len(pvList.Items))
}
for i := 0; i < objCount; i++ {
pv := &pvList.Items[i]
if pv.Status.Phase != api.VolumeBound {
t.Fatalf("Expected volume %s to be bound, is %s instead", pv.Name, pv.Status.Phase)
}
glog.V(2).Infof("PV %q is bound to PVC %q", pv.Name, pv.Spec.ClaimRef.Name)
}
// Delete the claims
for i := 0; i < objCount; i++ {
_ = testClient.PersistentVolumeClaims(ns.Name).Delete(pvcs[i].Name, nil)
}
// Wait for the PVs to get deleted by listing remaining volumes
// (delete events were unreliable)
for {
volumes, err := testClient.PersistentVolumes().List(api.ListOptions{})
if err != nil {
t.Fatalf("Failed to list volumes: %v", err)
}
glog.V(1).Infof("%d volumes remaining", len(volumes.Items))
if len(volumes.Items) == 0 {
break
}
time.Sleep(time.Second)
}
glog.V(2).Infof("TestPersistentVolumeProvisionMultiPVCs: volumes are deleted")
}
// TestPersistentVolumeMultiPVsDiffAccessModes tests binding of one PVC to two
// PVs with different access modes.
func TestPersistentVolumeMultiPVsDiffAccessModes(t *testing.T) {
_, s := framework.RunAMaster(nil)
defer s.Close()
ns := framework.CreateTestingNamespace("multi-pvs-diff-access", s, t)
defer framework.DeleteTestingNamespace(ns, s, t)
testClient, controller, watchPV, watchPVC := createClients(ns, t, s)
defer watchPV.Stop()
defer watchPVC.Stop()
// NOTE: This test cannot run in parallel, because it is creating and deleting
// non-namespaced objects (PersistenceVolumes).
defer testClient.Core().PersistentVolumes().DeleteCollection(nil, api.ListOptions{})
controller.Run()
defer controller.Stop()
// This PV will be claimed, released, and deleted
pv_rwo := createPV("pv-rwo", "/tmp/foo", "10G",
[]api.PersistentVolumeAccessMode{api.ReadWriteOnce}, api.PersistentVolumeReclaimRetain)
pv_rwm := createPV("pv-rwm", "/tmp/bar", "10G",
[]api.PersistentVolumeAccessMode{api.ReadWriteMany}, api.PersistentVolumeReclaimRetain)
pvc := createPVC("pvc-rwm", ns.Name, "5G", []api.PersistentVolumeAccessMode{api.ReadWriteMany})
_, err := testClient.PersistentVolumes().Create(pv_rwm)
if err != nil {
t.Errorf("Failed to create PersistentVolume: %v", err)
}
_, err = testClient.PersistentVolumes().Create(pv_rwo)
if err != nil {
t.Errorf("Failed to create PersistentVolume: %v", err)
}
t.Log("volumes created")
_, err = testClient.PersistentVolumeClaims(ns.Name).Create(pvc)
if err != nil {
t.Errorf("Failed to create PersistentVolumeClaim: %v", err)
}
t.Log("claim created")
// wait until the controller pairs the volume and claim
waitForAnyPersistentVolumePhase(watchPV, api.VolumeBound)
t.Log("volume bound")
waitForPersistentVolumeClaimPhase(testClient, pvc.Name, ns.Name, watchPVC, api.ClaimBound)
t.Log("claim bound")
// only RWM PV is bound
pv, err := testClient.PersistentVolumes().Get("pv-rwo")
if err != nil {
t.Fatalf("Unexpected error getting pv: %v", err)
}
if pv.Spec.ClaimRef != nil {
t.Fatalf("ReadWriteOnce PV shouldn't be bound")
}
pv, err = testClient.PersistentVolumes().Get("pv-rwm")
if err != nil {
t.Fatalf("Unexpected error getting pv: %v", err)
}
if pv.Spec.ClaimRef == nil {
t.Fatalf("ReadWriteMany PV should be bound")
}
if pv.Spec.ClaimRef.Name != pvc.Name {
t.Fatalf("Bind mismatch! Expected %s but got %s", pvc.Name, pv.Spec.ClaimRef.Name)
}
// deleting a claim releases the volume
if err := testClient.PersistentVolumeClaims(ns.Name).Delete(pvc.Name, nil); err != nil {
t.Errorf("error deleting claim %s", pvc.Name)
}
t.Log("claim deleted")
waitForAnyPersistentVolumePhase(watchPV, api.VolumeReleased)
t.Log("volume released")
}
func waitForPersistentVolumePhase(client *clientset.Clientset, pvName string, w watch.Interface, phase api.PersistentVolumePhase) {
// Check if the volume is already in requested phase
volume, err := client.Core().PersistentVolumes().Get(pvName)
if err == nil && volume.Status.Phase == phase {
return
}
// Wait for the phase
for {
event := <-w.ResultChan()
volume, ok := event.Object.(*api.PersistentVolume)
if !ok {
continue
}
if volume.Status.Phase == phase && volume.Name == pvName {
glog.V(2).Infof("volume %q is %s", volume.Name, phase)
break
}
}
}
func waitForPersistentVolumeClaimPhase(client *clientset.Clientset, claimName, namespace string, w watch.Interface, phase api.PersistentVolumeClaimPhase) {
// Check if the claim is already in requested phase
claim, err := client.Core().PersistentVolumeClaims(namespace).Get(claimName)
if err == nil && claim.Status.Phase == phase {
return
}
// Wait for the phase
for {
event := <-w.ResultChan()
claim, ok := event.Object.(*api.PersistentVolumeClaim)
if !ok {
continue
}
if claim.Status.Phase == phase && claim.Name == claimName {
glog.V(2).Infof("claim %q is %s", claim.Name, phase)
break
}
}
}
func waitForAnyPersistentVolumePhase(w watch.Interface, phase api.PersistentVolumePhase) {
for {
event := <-w.ResultChan()
volume, ok := event.Object.(*api.PersistentVolume)
if !ok {
continue
}
if volume.Status.Phase == phase {
glog.V(2).Infof("volume %q is %s", volume.Name, phase)
break
}
}
}
func waitForAnyPersistentVolumeClaimPhase(w watch.Interface, phase api.PersistentVolumeClaimPhase) {
for {
event := <-w.ResultChan()
claim, ok := event.Object.(*api.PersistentVolumeClaim)
if !ok {
continue
}
if claim.Status.Phase == phase {
glog.V(2).Infof("claim %q is %s", claim.Name, phase)
break
}
}
}
func createClients(ns *api.Namespace, t *testing.T, s *httptest.Server) (*clientset.Clientset, *persistentvolumecontroller.PersistentVolumeController, watch.Interface, watch.Interface) {
// Use higher QPS and Burst, there is a test for race conditions which
// creates many objects and default values were too low.
binderClient := clientset.NewForConfigOrDie(&restclient.Config{
Host: s.URL,
ContentConfig: restclient.ContentConfig{GroupVersion: testapi.Default.GroupVersion()},
QPS: 1000000,
Burst: 1000000,
})
testClient := clientset.NewForConfigOrDie(&restclient.Config{
Host: s.URL,
ContentConfig: restclient.ContentConfig{GroupVersion: testapi.Default.GroupVersion()},
QPS: 1000000,
Burst: 1000000,
})
host := volumetest.NewFakeVolumeHost("/tmp/fake", nil, nil, "" /* rootContext */)
plugin := &volumetest.FakeVolumePlugin{
PluginName: "plugin-name",
Host: host,
Config: volume.VolumeConfig{},
LastProvisionerOptions: volume.VolumeOptions{},
NewAttacherCallCount: 0,
NewDetacherCallCount: 0,
Mounters: nil,
Unmounters: nil,
Attachers: nil,
Detachers: nil,
}
plugins := []volume.VolumePlugin{plugin}
cloud := &fake_cloud.FakeCloud{}
syncPeriod := getSyncPeriod()
ctrl := persistentvolumecontroller.NewPersistentVolumeController(binderClient, syncPeriod, plugin, plugins, cloud, "", nil, nil, nil, true)
watchPV, err := testClient.PersistentVolumes().Watch(api.ListOptions{})
if err != nil {
t.Fatalf("Failed to watch PersistentVolumes: %v", err)
}
watchPVC, err := testClient.PersistentVolumeClaims(ns.Name).Watch(api.ListOptions{})
if err != nil {
t.Fatalf("Failed to watch PersistentVolumeClaimss: %v", err)
}
return testClient, ctrl, watchPV, watchPVC
}
func createPV(name, path, cap string, mode []api.PersistentVolumeAccessMode, reclaim api.PersistentVolumeReclaimPolicy) *api.PersistentVolume {
return &api.PersistentVolume{
ObjectMeta: api.ObjectMeta{Name: name},
Spec: api.PersistentVolumeSpec{
PersistentVolumeSource: api.PersistentVolumeSource{HostPath: &api.HostPathVolumeSource{Path: path}},
Capacity: api.ResourceList{api.ResourceName(api.ResourceStorage): resource.MustParse(cap)},
AccessModes: mode,
PersistentVolumeReclaimPolicy: reclaim,
},
}
}
func createPVC(name, namespace, cap string, mode []api.PersistentVolumeAccessMode) *api.PersistentVolumeClaim {
return &api.PersistentVolumeClaim{
ObjectMeta: api.ObjectMeta{
Name: name,
Namespace: namespace,
},
Spec: api.PersistentVolumeClaimSpec{
Resources: api.ResourceRequirements{Requests: api.ResourceList{api.ResourceName(api.ResourceStorage): resource.MustParse(cap)}},
AccessModes: mode,
},
}
}
| [
"\"KUBE_INTEGRATION_PV_OBJECTS\"",
"\"KUBE_INTEGRATION_PV_SYNC_PERIOD\"",
"\"KUBE_INTEGRATION_PV_END_SLEEP\""
]
| []
| [
"KUBE_INTEGRATION_PV_SYNC_PERIOD",
"KUBE_INTEGRATION_PV_OBJECTS",
"KUBE_INTEGRATION_PV_END_SLEEP"
]
| [] | ["KUBE_INTEGRATION_PV_SYNC_PERIOD", "KUBE_INTEGRATION_PV_OBJECTS", "KUBE_INTEGRATION_PV_END_SLEEP"] | go | 3 | 0 | |
examples/authenticate/clientcreds/client_credentials.go | // This example demonstrates how to authenticate with Spotify using the
// client credentials flow. Note that this flow does not include authorization
// and can't be used to access a user's private data.
//
// Make sure you set the SPOTIFY_ID and SPOTIFY_SECRET environment variables
// prior to running this example.
package main
import (
"context"
"fmt"
"log"
"os"
"github.com/ankjevel/spotify"
"golang.org/x/oauth2/clientcredentials"
)
func main() {
config := &clientcredentials.Config{
ClientID: os.Getenv("SPOTIFY_ID"),
ClientSecret: os.Getenv("SPOTIFY_SECRET"),
TokenURL: spotify.TokenURL,
}
token, err := config.Token(context.Background())
if err != nil {
log.Fatalf("couldn't get token: %v", err)
}
client := spotify.Authenticator{}.NewClient(token)
msg, page, err := client.FeaturedPlaylists()
if err != nil {
log.Fatalf("couldn't get features playlists: %v", err)
}
fmt.Println(msg)
for _, playlist := range page.Playlists {
fmt.Println(" ", playlist.Name)
}
}
| [
"\"SPOTIFY_ID\"",
"\"SPOTIFY_SECRET\""
]
| []
| [
"SPOTIFY_ID",
"SPOTIFY_SECRET"
]
| [] | ["SPOTIFY_ID", "SPOTIFY_SECRET"] | go | 2 | 0 | |
models/config/client_common.go | // Copyright 2016 fatedier, [email protected]
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package config
import (
"fmt"
"os"
"strconv"
"strings"
ini "github.com/vaughan0/go-ini"
)
// client common config
type ClientCommonConf struct {
ServerAddr string `json:"server_addr"`
ServerPort int `json:"server_port"`
HttpProxy string `json:"http_proxy"`
LogFile string `json:"log_file"`
LogWay string `json:"log_way"`
LogLevel string `json:"log_level"`
LogMaxDays int64 `json:"log_max_days"`
Token string `json:"token"`
AdminAddr string `json:"admin_addr"`
AdminPort int `json:"admin_port"`
AdminUser string `json:"admin_user"`
AdminPwd string `json:"admin_pwd"`
PoolCount int `json:"pool_count"`
TcpMux bool `json:"tcp_mux"`
User string `json:"user"`
DnsServer string `json:"dns_server"`
LoginFailExit bool `json:"login_fail_exit"`
Start map[string]struct{} `json:"start"`
Protocol string `json:"protocol"`
TLSEnable bool `json:"tls_enable"`
HeartBeatInterval int64 `json:"heartbeat_interval"`
HeartBeatTimeout int64 `json:"heartbeat_timeout"`
}
func GetDefaultClientConf() *ClientCommonConf {
return &ClientCommonConf{
ServerAddr: "0.0.0.0",
ServerPort: 7000,
HttpProxy: os.Getenv("http_proxy"),
LogFile: "console",
LogWay: "console",
LogLevel: "info",
LogMaxDays: 3,
Token: "",
AdminAddr: "127.0.0.1",
AdminPort: 0,
AdminUser: "",
AdminPwd: "",
PoolCount: 1,
TcpMux: true,
User: "",
DnsServer: "",
LoginFailExit: true,
Start: make(map[string]struct{}),
Protocol: "tcp",
TLSEnable: false,
HeartBeatInterval: 30,
HeartBeatTimeout: 90,
}
}
func UnmarshalClientConfFromIni(defaultCfg *ClientCommonConf, content string) (cfg *ClientCommonConf, err error) {
cfg = defaultCfg
if cfg == nil {
cfg = GetDefaultClientConf()
}
conf, err := ini.Load(strings.NewReader(content))
if err != nil {
err = fmt.Errorf("parse ini conf file error: %v", err)
return nil, err
}
var (
tmpStr string
ok bool
v int64
)
if tmpStr, ok = conf.Get("common", "server_addr"); ok {
cfg.ServerAddr = tmpStr
}
if tmpStr, ok = conf.Get("common", "server_port"); ok {
v, err = strconv.ParseInt(tmpStr, 10, 64)
if err != nil {
err = fmt.Errorf("Parse conf error: invalid server_port")
return
}
cfg.ServerPort = int(v)
}
if tmpStr, ok = conf.Get("common", "http_proxy"); ok {
cfg.HttpProxy = tmpStr
}
if tmpStr, ok = conf.Get("common", "log_file"); ok {
cfg.LogFile = tmpStr
if cfg.LogFile == "console" {
cfg.LogWay = "console"
} else {
cfg.LogWay = "file"
}
}
if tmpStr, ok = conf.Get("common", "log_level"); ok {
cfg.LogLevel = tmpStr
}
if tmpStr, ok = conf.Get("common", "log_max_days"); ok {
if v, err = strconv.ParseInt(tmpStr, 10, 64); err == nil {
cfg.LogMaxDays = v
}
}
if tmpStr, ok = conf.Get("common", "token"); ok {
cfg.Token = tmpStr
}
if tmpStr, ok = conf.Get("common", "admin_addr"); ok {
cfg.AdminAddr = tmpStr
}
if tmpStr, ok = conf.Get("common", "admin_port"); ok {
if v, err = strconv.ParseInt(tmpStr, 10, 64); err == nil {
cfg.AdminPort = int(v)
} else {
err = fmt.Errorf("Parse conf error: invalid admin_port")
return
}
}
if tmpStr, ok = conf.Get("common", "admin_user"); ok {
cfg.AdminUser = tmpStr
}
if tmpStr, ok = conf.Get("common", "admin_pwd"); ok {
cfg.AdminPwd = tmpStr
}
if tmpStr, ok = conf.Get("common", "pool_count"); ok {
if v, err = strconv.ParseInt(tmpStr, 10, 64); err == nil {
cfg.PoolCount = int(v)
}
}
if tmpStr, ok = conf.Get("common", "tcp_mux"); ok && tmpStr == "false" {
cfg.TcpMux = false
} else {
cfg.TcpMux = true
}
if tmpStr, ok = conf.Get("common", "user"); ok {
cfg.User = tmpStr
}
if tmpStr, ok = conf.Get("common", "dns_server"); ok {
cfg.DnsServer = tmpStr
}
if tmpStr, ok = conf.Get("common", "start"); ok {
proxyNames := strings.Split(tmpStr, ",")
for _, name := range proxyNames {
cfg.Start[strings.TrimSpace(name)] = struct{}{}
}
}
if tmpStr, ok = conf.Get("common", "login_fail_exit"); ok && tmpStr == "false" {
cfg.LoginFailExit = false
} else {
cfg.LoginFailExit = true
}
if tmpStr, ok = conf.Get("common", "protocol"); ok {
// Now it only support tcp and kcp and websocket.
if tmpStr != "tcp" && tmpStr != "kcp" && tmpStr != "websocket" {
err = fmt.Errorf("Parse conf error: invalid protocol")
return
}
cfg.Protocol = tmpStr
}
if tmpStr, ok = conf.Get("common", "tls_enable"); ok && tmpStr == "true" {
cfg.TLSEnable = true
} else {
cfg.TLSEnable = false
}
if tmpStr, ok = conf.Get("common", "heartbeat_timeout"); ok {
if v, err = strconv.ParseInt(tmpStr, 10, 64); err != nil {
err = fmt.Errorf("Parse conf error: invalid heartbeat_timeout")
return
} else {
cfg.HeartBeatTimeout = v
}
}
if tmpStr, ok = conf.Get("common", "heartbeat_interval"); ok {
if v, err = strconv.ParseInt(tmpStr, 10, 64); err != nil {
err = fmt.Errorf("Parse conf error: invalid heartbeat_interval")
return
} else {
cfg.HeartBeatInterval = v
}
}
return
}
func (cfg *ClientCommonConf) Check() (err error) {
if cfg.HeartBeatInterval <= 0 {
err = fmt.Errorf("Parse conf error: invalid heartbeat_interval")
return
}
if cfg.HeartBeatTimeout < cfg.HeartBeatInterval {
err = fmt.Errorf("Parse conf error: invalid heartbeat_timeout, heartbeat_timeout is less than heartbeat_interval")
return
}
return
}
| [
"\"http_proxy\""
]
| []
| [
"http_proxy"
]
| [] | ["http_proxy"] | go | 1 | 0 | |
test_torch.py | # -*- coding: utf-8 -*-
import os
os.environ["CUDA_VISIBLE_DEVICES"] = "0"
import cv2
import time
import numpy as np
import torch
import segmentation_models_pytorch as smp
import albumentations as A
from albumentations.pytorch import ToTensorV2
from config import MEAN, STD, COLORS, N_CLASS
from utils import load_img, dump_str, randering_mask
def get_val_transform(input_size):
return A.Compose([
A.Resize(input_size, input_size),
A.Normalize(mean=MEAN, std=STD),
ToTensorV2(),
])
def create_model(arch, encoder, in_channel, out_channel, pretrained=None):
smp_net = getattr(smp, arch)
model = smp_net( # smp.UnetPlusPlus
encoder_name=encoder, # choose encoder, e.g. mobilenet_v2 or efficientnet-b7
encoder_weights=pretrained, # use `imagenet` pretrained weights for encoder initialization
in_channels=in_channel, # model input channels (1 for grayscale images, 3 for RGB, etc.)
classes=out_channel, # model output channels (number of classes in your dataset)
)
return model
def test_pytorch(test_dir, model, save_dir='torch_output'):
test_set = [os.path.join(test_dir, f) for f in os.listdir(test_dir) if f.endswith('.png')]
test_num = len(test_set)
print('test num:', test_num)
os.makedirs(save_dir, exist_ok=True)
transform = get_val_transform(256)
total_time = 0
model.eval()
for i, path in enumerate(test_set):
img_name = os.path.basename(path)
print(f'{i + 1}/{test_num}, {img_name}')
raw_img = cv2.imread(path)
_img = cv2.cvtColor(raw_img, cv2.COLOR_BGR2RGB)
_img = transform(image=_img)['image']
_img = _img.unsqueeze(0)
t = time.time()
with torch.no_grad():
_img = _img.cuda()
output = model(_img)
output = output.squeeze().cpu().numpy()
total_time += time.time() - t
pred_mask = np.argmax(output, axis=0)
if True:
img_draw = randering_mask(raw_img, pred_mask, N_CLASS, COLORS, alpha=0.8, beta=0.5)
cv2.imwrite(os.path.join(save_dir, img_name[:-4] + '_cover.jpg'), img_draw)
print('pytorch:', total_time / len(test_set))
if __name__=="__main__":
test_dir = 'imgs_0.4_512'
ckpt_file = 'upp_rsn50.v8.1/ckpt/checkpoint-epoch92.pth'
checkpoint = torch.load(ckpt_file)
state_dict = checkpoint['state_dict']
model = create_model(arch='UnetPlusPlus',
encoder='resnet50',
in_channel=3,
out_channel=N_CLASS).cuda()
model.load_state_dict(state_dict)
test_pytorch(test_dir, model)
| []
| []
| [
"CUDA_VISIBLE_DEVICES"
]
| [] | ["CUDA_VISIBLE_DEVICES"] | python | 1 | 0 | |
database_test.go | package gxutil
import (
"context"
"errors"
"os"
"strings"
"testing"
"time"
"github.com/spf13/cast"
"github.com/stretchr/testify/assert"
// "github.com/gobuffalo/packr"
)
var (
PostgresURL = os.Getenv("POSTGRES_URL")
SQLiteURL = "./test.db"
)
type person struct {
FirstName string `gorm:"primary_key" json:"first_name"`
LastName string `json:"last_name"`
Email string `json:"email"`
}
type place struct {
Country string `json:"country" gorm:"index:idx_country_city"`
City string `json:"city" gorm:"index:idx_country_city"`
Telcode int64 `json:"telcode"`
}
type transact struct {
Datetime time.Time `json:"date" `
Description string `json:"description"`
OriginalDescription string `json:"original_description"`
Amount float64 `json:"amount"`
TransactionType string `json:"transaction_type"`
Category string `json:"category"`
AccountName string `json:"account_name"`
Labels string `json:"labels"`
Notes string `json:"notes"`
}
type testDB struct {
conn Connection
name string
URL string
schema string
transactDDL string
personDDL string
placeDDL string
placeIndex string
placeVwDDL string
placeVwSelect string
}
var DBs = map[string]*testDB{
"postgres": &testDB{
name: "postgres",
URL: os.Getenv("POSTGRES_URL"),
schema: "public",
transactDDL: `CREATE TABLE transact (date_time date, description varchar(255), original_description varchar(255), amount decimal(10,5), transaction_type varchar(255), category varchar(255), account_name varchar(255), labels varchar(255), notes varchar(255) )`,
personDDL: `CREATE TABLE person (first_name varchar(255), last_name varchar(255), email varchar(255), CONSTRAINT person_first_name PRIMARY KEY (first_name) )`,
placeDDL: "CREATE TABLE public.place\n(\n \"country\" text NULL,\n \"city\" text NULL,\n \"telcode\" bigint NULL\n)",
placeIndex: `CREATE INDEX idx_country_city
ON place(country, city)`,
placeVwDDL: `create or replace view place_vw as select * from place where telcode = 65`,
placeVwSelect: " SELECT place.country,\n place.city,\n place.telcode\n FROM place\n WHERE (place.telcode = 65);",
},
"sqlite3": &testDB{
name: "sqlite3",
URL: "file:./test.db",
schema: "main",
transactDDL: `CREATE TABLE transact (date_time date, description varchar(255), original_description varchar(255), amount decimal(10,5), transaction_type varchar(255), category varchar(255), account_name varchar(255), labels varchar(255), notes varchar(255) )`,
personDDL: `CREATE TABLE person (first_name varchar(255), last_name varchar(255), email varchar(255), CONSTRAINT person_first_name PRIMARY KEY (first_name) )`,
placeDDL: "CREATE TABLE \"place\" (\"country\" varchar(255),\"city\" varchar(255),\"telcode\" bigint )",
placeIndex: `CREATE INDEX idx_country_city
ON place(country, city)`,
placeVwDDL: "CREATE VIEW place_vw as select * from place where telcode = 65",
placeVwSelect: "CREATE VIEW place_vw as select * from place where telcode = 65",
},
"mysql": &testDB{
name: "mysql",
URL: os.Getenv("MYSQL_URL"),
schema: "mysql",
transactDDL: `CREATE TABLE transact (date_time date, description varchar(255), original_description varchar(255), amount decimal(10,5), transaction_type varchar(255), category varchar(255), account_name varchar(255), labels varchar(255), notes varchar(255) )`,
personDDL: `CREATE TABLE person (first_name varchar(255), last_name varchar(255), email varchar(255), CONSTRAINT person_first_name PRIMARY KEY (first_name) )`,
placeDDL: "CREATE TABLE `place` (\n `country` varchar(255) DEFAULT NULL,\n `city` varchar(255) DEFAULT NULL,\n `telcode` decimal(10,0) DEFAULT NULL,\n KEY `idx_country_city` (`country`,`city`)\n) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4 COLLATE=utf8mb4_0900_ai_ci",
placeIndex: `select 1`, //`CREATE INDEX idx_country_city ON place(country, city)`,
placeVwDDL: `create or replace view place_vw as select * from place where telcode = 65`,
placeVwSelect: "CREATE ALGORITHM=UNDEFINED DEFINER=`admin`@`%` SQL SECURITY DEFINER VIEW `place_vw` AS select `place`.`country` AS `country`,`place`.`city` AS `city`,`place`.`telcode` AS `telcode` from `place` where (`place`.`telcode` = 65)",
},
// "sqlserver": &testDB{
// name: "sqlserver",
// URL: os.Getenv("MSSQL_URL"),
// schema: "public",
// transactDDL: `CREATE TABLE transact (date_time date, description varchar(255), original_description varchar(255), amount decimal(10,5), transaction_type varchar(255), category varchar(255), account_name varchar(255), labels varchar(255), notes varchar(255) )`,
// personDDL: `CREATE TABLE person (first_name varchar(255), last_name varchar(255), email varchar(255), CONSTRAINT person_first_name PRIMARY KEY (first_name) )`,
// placeDDL: "CREATE TABLE public.place\n(\n \"country\" text NULL,\n \"city\" text NULL,\n \"telcode\" bigint NULL\n)",
// placeIndex: `CREATE INDEX idx_country_city
// ON place(country, city)`,
// placeVwDDL: `create or replace view place_vw as select * from place where telcode = 65`,
// placeVwSelect: " SELECT place.country,\n place.city,\n place.telcode\n FROM place\n WHERE (place.telcode = 65);",
// },
"oracle": &testDB{
name: "oracle",
URL: os.Getenv("ORACLE_URL"),
schema: "system",
transactDDL: `CREATE TABLE transact (date_time date, description varchar(255), original_description varchar(255), amount decimal(10,5), transaction_type varchar(255), category varchar(255), account_name varchar(255), labels varchar(255), notes varchar(255) )`,
personDDL: `CREATE TABLE person (first_name varchar(255), last_name varchar(255), email varchar(255), CONSTRAINT person_first_name PRIMARY KEY (first_name) )`,
placeDDL: "\n CREATE TABLE \"SYSTEM\".\"PLACE\" \n (\t\"COUNTRY\" VARCHAR2(255), \n\t\"CITY\" VARCHAR2(255), \n\t\"TELCODE\" NUMBER(*,0)\n ) PCTFREE 10 PCTUSED 40 INITRANS 1 MAXTRANS 255 NOCOMPRESS LOGGING\n STORAGE(INITIAL 65536 NEXT 1048576 MINEXTENTS 1 MAXEXTENTS 2147483645\n PCTINCREASE 0 FREELISTS 1 FREELIST GROUPS 1 BUFFER_POOL DEFAULT FLASH_CACHE DEFAULT CELL_FLASH_CACHE DEFAULT)\n TABLESPACE \"SYSTEM\" ",
placeIndex: `CREATE INDEX idx_country_city
ON place(country, city)`,
placeVwDDL: "CREATE VIEW place_vw as select * from place where telcode = 65",
placeVwSelect: "select \"COUNTRY\",\"CITY\",\"TELCODE\" from place where telcode = 65",
},
"redshift": &testDB{
name: "redshift",
URL: os.Getenv("REDSHIFT_URL"),
schema: "public",
transactDDL: `CREATE TABLE public.transact (date_time date, description varchar(255), original_description varchar(255), amount decimal(10,5), transaction_type varchar(255), category varchar(255), account_name varchar(255), labels varchar(255), notes varchar(255) )`,
personDDL: `CREATE TABLE public.person (first_name varchar(255), last_name varchar(255), email varchar(255), CONSTRAINT person_first_name PRIMARY KEY (first_name) )`,
placeDDL: "CREATE TABLE public.place\n(\n \"country\" text NULL,\n \"city\" text NULL,\n \"telcode\" bigint NULL\n)",
placeIndex: `CREATE INDEX idx_country_city
ON place(country, city)`,
placeVwDDL: `create or replace view public.place_vw as select * from place where telcode = 65`,
placeVwSelect: " SELECT place.country,\n place.city,\n place.telcode\n FROM place\n WHERE (place.telcode = 65);",
},
}
func TestPostgres(t *testing.T) {
DBTest(t, DBs["postgres"])
}
func TestSQLite(t *testing.T) {
os.Remove(strings.ReplaceAll(DBs["sqlite3"].URL, "file:", ""))
DBTest(t, DBs["sqlite3"])
}
func TestMySQL(t *testing.T) {
DBTest(t, DBs["mysql"])
}
func TestOracle(t *testing.T) {
DBTest(t, DBs["oracle"])
}
func TestRedshift(t *testing.T) {
// DBTest(t, DBs["redshift"])
}
func TestSqlServer(t *testing.T) {
// DBTest(t, DBs["sqlserver"])
}
func DBTest(t *testing.T, db *testDB) {
println("Testing " + db.name)
if db.URL == "" {
assert.Error(t, errors.New("No Env Var URL for "+db.name))
return
}
conn := GetConn(db.URL)
err := conn.Connect()
assert.NoError(t, err)
err = conn.DropTable(db.schema+".person", db.schema+".place", db.schema+".transact")
assert.NoError(t, err)
err = conn.DropView(db.schema + ".place_vw")
assert.NoError(t, err)
// gConn, err := conn.GetGormConn()
// assert.NoError(t, err)
// gConn.SingularTable(true)
// gConn.AutoMigrate(&person{}, &place{}, &transact{})
conn.Db().MustExec(db.transactDDL)
conn.Db().MustExec(db.personDDL)
conn.Db().MustExec(db.placeDDL)
conn.Db().MustExec(db.placeVwDDL)
if db.name != "redshift" {
conn.Db().MustExec(db.placeIndex)
}
personInsertStatement := conn.GenerateInsertStatement("person", []string{"first_name", "last_name", "email"})
placeInsertStatement := conn.GenerateInsertStatement("place", []string{"country", "city", "telcode"})
transactInsertStatement := conn.GenerateInsertStatement("transact", []string{"date_time", "description", "amount"})
tx := conn.Db().MustBegin()
tx.MustExec(personInsertStatement, "Jason", "Moiron", "[email protected]")
tx.MustExec(personInsertStatement, "John", "Doe", "[email protected]")
tx.MustExec(placeInsertStatement, "United States", "New York", "1")
tx.MustExec(placeInsertStatement, "Hong Kong", nil, "852")
tx.MustExec(placeInsertStatement, "Singapore", nil, "65")
tx.MustExec(transactInsertStatement, cast.ToTime("2019-10-10"), "test\" \nproduct", 65.657)
tx.MustExec(transactInsertStatement, cast.ToTime("2020-10-10"), "new \nproduct", 5.657)
tx.Commit()
// Test Streaming
streamRec, err := conn.StreamRecords(`select * from person`)
assert.NoError(t, err)
recs := []map[string]interface{}{}
for rec := range streamRec {
recs = append(recs, rec)
}
assert.Len(t, recs, 2)
stream, err := conn.StreamRows(`select * from person`)
assert.NoError(t, err)
rows := [][]interface{}{}
for row := range stream.Rows {
rows = append(rows, row)
}
assert.Len(t, rows, 2)
data, err := conn.Query(`select * from person`)
assert.NoError(t, err)
assert.Len(t, data.Rows, 2)
data, err = conn.Query(`select * from place`)
assert.NoError(t, err)
assert.Len(t, data.Rows, 3)
data, err = conn.Query(`select * from transact`)
assert.NoError(t, err)
assert.Len(t, data.Rows, 2)
assert.Equal(t, 65.657, cast.ToFloat64(data.Records()[0]["amount"]))
// GetSchemas
data, err = conn.GetSchemas()
assert.NoError(t, err)
assert.Greater(t, len(data.Rows), 0)
// GetTables
data, err = conn.GetTables(db.schema)
assert.NoError(t, err)
assert.Greater(t, len(data.Rows), 0)
// GetViews
data, err = conn.GetViews(db.schema)
assert.NoError(t, err)
assert.Greater(t, len(data.Rows), 0)
assert.Greater(t, data.Duration, 0.0)
// GetColumns
data, err = conn.GetColumns(db.schema + ".person")
assert.NoError(t, err)
assert.Len(t, data.Rows, 3)
assert.Contains(t, []string{"text", "varchar(255)", "VARCHAR2", "character varying", "varchar"}, data.Records()[0]["data_type"])
// GetPrimaryKeys
if db.name != "redshift" {
data, err = conn.GetPrimaryKeys(db.schema + ".person")
assert.NoError(t, err)
assert.Len(t, data.Rows, 1)
assert.Equal(t, "first_name", strings.ToLower(cast.ToString(data.Records()[0]["column_name"])))
}
// GetIndexes
if db.name != "redshift" {
data, err = conn.GetIndexes(db.schema + ".place")
assert.NoError(t, err)
assert.Len(t, data.Rows, 2)
assert.Equal(t, "city", strings.ToLower(cast.ToString(data.Records()[1]["column_name"])))
}
// GetColumnsFull
data, err = conn.GetColumnsFull(db.schema + ".place")
assert.NoError(t, err)
assert.Len(t, data.Rows, 3)
assert.Contains(t, []string{"bigint", "NUMBER", "decimal"}, data.Records()[2]["data_type"])
// GetDDL of table
if db.name != "redshift" {
ddl, err := conn.GetDDL(db.schema + ".place")
assert.NoError(t, err)
assert.Equal(t, db.placeDDL, ddl)
}
// GetDDL of view
if db.name != "redshift" {
ddl, err := conn.GetDDL(db.schema + ".place_vw")
assert.NoError(t, err)
assert.Equal(t, db.placeVwSelect, ddl)
}
// load Csv from test file
csv1 := CSV{Path: "test/test1.csv"}
stream, err = csv1.ReadStream()
assert.NoError(t, err)
csvTable := db.schema + ".test1"
err = conn.DropTable(csvTable)
assert.NoError(t, err)
ddl, err := conn.GenerateDDL(csvTable, Dataset{Columns: stream.Columns, Rows: stream.Buffer})
assert.NoError(t, err)
ok := assert.NotEmpty(t, ddl)
if ok {
_, err = conn.Db().Exec(ddl)
assert.NoError(t, err)
// import to database
conn.SetProp("s3Bucket", os.Getenv("S3_BUCKET"))
_, err = conn.InsertStream(csvTable, stream)
assert.NoError(t, err)
// select back to assert equality
count, err := conn.GetCount(csvTable)
assert.NoError(t, err)
assert.Equal(t, uint64(1000), count)
}
// Test Schemata
sData, err := conn.GetSchemata(db.schema)
assert.NoError(t, err)
assert.Equal(t, db.schema, sData.Name)
assert.Contains(t, sData.Tables, "person")
assert.Contains(t, sData.Tables, "place_vw")
assert.Contains(t, conn.Schemata().Tables, db.schema+".person")
assert.Len(t, sData.Tables["person"].Columns, 3)
assert.Contains(t, []string{"text", "varchar(255)", "VARCHAR2", "character varying", "varchar"}, sData.Tables["person"].ColumnsMap["email"].Type)
assert.Equal(t, true, sData.Tables["place_vw"].IsView)
assert.Equal(t, int64(3), conn.Schemata().Tables[db.schema+".person"].ColumnsMap["email"].Position)
// RunAnalysis field_stat
values := map[string]interface{}{
"t1": db.schema + ".place",
"t2": db.schema + ".place",
"t1_field": "t1.country",
"t1_fields1": "country",
"t1_filter": "1=1",
"t2_field": "t2.country",
"t2_fields1": "country",
"t2_filter": "1=1",
"conds": `lower(t1.country) = lower(t2.country)`,
}
data, err = conn.RunAnalysis("table_join_match", values)
assert.NoError(t, err)
assert.Len(t, data.Rows, 2)
assert.Contains(t, []interface{}{0.0, int64(0), "0"}, data.Records()[0]["t1_null_cnt"])
assert.Equal(t, 100.0, cast.ToFloat64(data.Records()[1]["match_rate"]))
// RunAnalysisTable field_stat
data, err = conn.RunAnalysisTable("table_count", db.schema+".person", db.schema+".place")
assert.NoError(t, err)
assert.Len(t, data.Rows, 2)
assert.EqualValues(t, int64(2), data.Records()[0]["cnt"])
assert.EqualValues(t, int64(3), data.Records()[1]["cnt"])
// RunAnalysisField field_stat
data, err = conn.RunAnalysisField("field_stat", db.schema+".person")
assert.NoError(t, err)
assert.Len(t, data.Rows, 3)
assert.EqualValues(t, int64(2), data.Records()[0]["tot_cnt"])
assert.EqualValues(t, int64(0), data.Records()[1]["f_dup_cnt"])
// Extract / Load Test
if db.name != "sqlite3" {
ELTest(t, db, csvTable)
}
// Drop all tables
err = conn.DropTable("person", "place", "transact", "test1")
assert.NoError(t, err)
if db.name != "sqlite3" {
// test sleep function
sleepSQL := R(
conn.GetTemplateValue("function.sleep"),
"seconds", "1",
)
dd, err := conn.Query(sleepSQL)
assert.Greater(t, dd.Duration, 1.0)
assert.NoError(t, err)
// Test cancel query
cancelDone := make(chan bool)
ctx, cancel := context.WithCancel(conn.Context().ctx)
go func() {
_, err := conn.QueryContext(ctx, sleepSQL)
assert.Error(t, err)
cancelDone <- true
}()
time.Sleep(100 * time.Millisecond)
cancel()
<-cancelDone // wait for cancel to be done
}
err = conn.Close()
assert.NoError(t, err)
}
func ELTest(t *testing.T, db *testDB, srcTable string) {
tgtTable := srcTable + "2"
_, sTable := splitTableFullName(srcTable)
_, tTable := splitTableFullName(tgtTable)
// var srcConn, tgtConn PostgresConn
srcConn := GetConn(db.URL)
tgtConn := GetConn(db.URL)
err := srcConn.Connect()
assert.NoError(t, err)
err = tgtConn.Connect()
assert.NoError(t, err)
ddl, err := srcConn.GetDDL(srcTable)
assert.NoError(t, err)
newDdl := strings.Replace(ddl, sTable, tTable, 1)
if db.name == "oracle" {
newDdl = strings.Replace(
ddl, strings.ToUpper(sTable),
strings.ToUpper(tTable), 1,
)
}
err = tgtConn.DropTable(tgtTable)
assert.NoError(t, err)
_, err = tgtConn.Db().Exec(newDdl)
assert.NoError(t, Error(err, newDdl))
stream, err := srcConn.StreamRows(`select * from ` + srcTable)
assert.NoError(t, err)
if err == nil {
_, err = tgtConn.InsertStream(tgtTable, stream)
assert.NoError(t, err)
data, err := tgtConn.RunAnalysisTable("table_count", srcTable, tgtTable)
if assert.NoError(t, err) {
assert.Equal(t, data.Records()[0]["cnt"], data.Records()[1]["cnt"])
}
}
// use Bulk
_, err = tgtConn.Query("truncate table " + tgtTable)
assert.NoError(t, err)
stream, err = srcConn.BulkExportStream(`select * from ` + srcTable)
assert.NoError(t, err)
if err == nil {
_, err = tgtConn.BulkImportStream(tgtTable, stream)
assert.NoError(t, err)
data, err := tgtConn.RunAnalysisTable("table_count", srcTable, tgtTable)
if assert.NoError(t, err) {
assert.Equal(t, data.Records()[0]["cnt"], data.Records()[1]["cnt"])
}
}
err = tgtConn.DropTable(tgtTable)
assert.NoError(t, err)
srcConn.Close()
tgtConn.Close()
}
| [
"\"POSTGRES_URL\"",
"\"POSTGRES_URL\"",
"\"MYSQL_URL\"",
"\"MSSQL_URL\"",
"\"ORACLE_URL\"",
"\"REDSHIFT_URL\"",
"\"S3_BUCKET\""
]
| []
| [
"S3_BUCKET",
"MYSQL_URL",
"MSSQL_URL",
"POSTGRES_URL",
"ORACLE_URL",
"REDSHIFT_URL"
]
| [] | ["S3_BUCKET", "MYSQL_URL", "MSSQL_URL", "POSTGRES_URL", "ORACLE_URL", "REDSHIFT_URL"] | go | 6 | 0 | |
pkg/webhook/webhook-server_test.go | // Copyright © 2019-2021 Talend - www.talend.com
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package webhook
import (
"flag"
"net/http"
"net/http/httptest"
"os"
"strconv"
"strings"
"testing"
"github.com/stretchr/testify/assert"
"k8s.io/apimachinery/pkg/util/uuid"
"k8s.io/klog"
)
func TestWebhookServer(t *testing.T) {
verbose, _ := strconv.ParseBool(os.Getenv("VERBOSE"))
if verbose {
// Set Klog verbosity level to have detailed logs from our webhook (where we use level 5+ to log such info)
klogFlags := flag.NewFlagSet("klog", flag.ExitOnError)
klog.InitFlags(klogFlags)
klogFlags.Set("v", "5")
}
// Create webhook instance
vaultInjector, err := createTestVaultInjector()
if err != nil {
t.Fatalf("Loading error: %s", err)
}
tables := []struct {
name string
admissionReviewVersion string
vaultInjection bool
statusCode int
}{
{
name: "AdmissionReview v1, no injection",
admissionReviewVersion: "v1",
vaultInjection: false,
statusCode: http.StatusOK,
},
{
name: "AdmissionReview v1",
admissionReviewVersion: "v1",
vaultInjection: true,
statusCode: http.StatusOK,
},
{
name: "AdmissionReview v1beta1",
admissionReviewVersion: "v1beta1",
vaultInjection: true,
statusCode: http.StatusOK,
},
{
name: "AdmissionReview v1beta2",
admissionReviewVersion: "v1beta2",
vaultInjection: true,
statusCode: http.StatusBadRequest,
},
}
for _, table := range tables {
t.Run(table.name, func(t *testing.T) {
uid := string(uuid.NewUUID())
request := httptest.NewRequest(http.MethodPost, "/mutate", strings.NewReader(`{
"kind":"AdmissionReview",
"apiVersion":"admission.k8s.io/`+table.admissionReviewVersion+`",
"request":{
"uid":"`+uid+`",
"kind":{
"group":"",
"version":"v1",
"kind":"Pod"
},
"namespace":"default",
"operation":"CREATE",
"object":{
"apiVersion":"v1",
"kind":"Pod",
"metadata":{
"annotations":{
"sidecar.vault.talend.org/inject": "`+strconv.FormatBool(table.vaultInjection)+`"
},
"labels":{
"com.talend.application": "test",
"com.talend.service": "test-app-svc"
}
},
"spec":{
"containers":[
{
"name": "testcontainer",
"image": "myfakeimage:1.0.0",
"volumeMounts":[
{
"name": "default-token-1234",
"mountPath" : "/var/run/secrets/kubernetes.io/serviceaccount"
}
]
}
]
}
}
}
}`))
request.Header.Add("Content-Type", "application/json")
responseRecorder := httptest.NewRecorder()
vaultInjector.Serve(responseRecorder, request)
if klog.V(5) {
klog.Infof("HTTP Response=%+v", responseRecorder)
}
assert.Equal(t, responseRecorder.Code, table.statusCode)
assert.Condition(t, func() bool {
if responseRecorder.Code == http.StatusOK {
return strings.Contains(responseRecorder.Body.String(),
`"kind":"AdmissionReview","apiVersion":"admission.k8s.io/`+table.admissionReviewVersion+`"`) &&
strings.Contains(responseRecorder.Body.String(),
`"response":{"uid":"`+uid+`"`)
} else {
return true // HTTP error: return true to skip this test
}
}, "AdmissionReview version must match received version and admission response UID must match admission request UID")
})
}
}
| [
"\"VERBOSE\""
]
| []
| [
"VERBOSE"
]
| [] | ["VERBOSE"] | go | 1 | 0 | |
.github/workflows/pretf.py | import os
import subprocess
from pathlib import Path
from tempfile import NamedTemporaryFile
docker_image = "claranet/direnv-asdf:latest"
top = Path(__file__).parent.parent.parent
home = os.environ["HOME"]
uid = os.getuid()
gid = os.getgid()
# Use a temporary file for the AWS credentials file,
# so it will be automatically deleted afterwards.
with NamedTemporaryFile() as aws_creds_file:
aws_creds_file.write(os.environ["AWS_CREDENTIALS_FILE"])
aws_creds_file.flush()
# Create these directories before Docker runs,
# otherwise Docker will create them as the root user.
os.makedirs(f"{top}/.direnv", exist_ok=True)
os.makedirs(f"{top}/.docker/.direnv", exist_ok=True)
os.makedirs(f"{top}/.docker/home", exist_ok=True)
# Build the Docker command and then run it.
cmd = ["docker", "run", "--rm"]
volumes = (
f"{top}:/src",
f"{top}/.docker/.direnv:/src/.direnv",
f"{top}/.docker/home:{home}",
f"{aws_creds_file.name}:/tmp/aws:ro",
"/etc/passwd:/etc/passwd:ro",
)
for volume in volumes:
cmd.extend(["--volume", volume])
cmd.extend(["--env", "AWS_SHARED_CREDENTIALS_FILE=/tmp/aws"])
cmd.extend(["--user", f"{uid}:{gid}"])
cmd.extend(["--workdir", "/src/vpc/dev"])
cmd.extend([docker_image])
cmd.extend(["pretf", "validate"])
subprocess.run(cmd, check=True)
| []
| []
| [
"HOME",
"AWS_CREDENTIALS_FILE"
]
| [] | ["HOME", "AWS_CREDENTIALS_FILE"] | python | 2 | 0 | |
internal/gapicgen/cmd/genbot/github.go | // Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
import (
"context"
"fmt"
"io/ioutil"
"log"
"os"
"os/exec"
"os/user"
"path"
"strings"
"time"
"github.com/google/go-github/v32/github"
"github.com/shurcooL/githubv4"
"golang.org/x/oauth2"
)
const (
gocloudBranchName = "regen_gocloud"
gocloudCommitTitle = "feat(all): auto-regenerate gapics"
gocloudCommitBody = `
This is an auto-generated regeneration of the gapic clients by
cloud.google.com/go/internal/gapicgen. Once the corresponding genproto PR is
submitted, genbot will update this PR with a newer dependency to the newer
version of genproto and assign reviewers to this PR.
If you have been assigned to review this PR, please:
- Ensure that the version of genproto in go.mod has been updated.
- Ensure that CI is passing. If it's failing, it requires your manual attention.
- Approve and submit this PR if you believe it's ready to ship.
`
genprotoBranchName = "regen_genproto"
genprotoCommitTitle = "feat(all): auto-regenerate .pb.go files"
genprotoCommitBody = `
This is an auto-generated regeneration of the .pb.go files by
cloud.google.com/go/internal/gapicgen. Once this PR is submitted, genbot will
update the corresponding PR to depend on the newer version of go-genproto, and
assign reviewers. Whilst this or any regen PR is open in go-genproto, genbot
will not create any more regeneration PRs. If all regen PRs are closed,
gapicgen will create a new set of regeneration PRs once per night.
If you have been assigned to review this PR, please:
- Ensure that CI is passing. If it's failing, it requires your manual attention.
- Approve and submit this PR if you believe it's ready to ship. That will prompt
genbot to assign reviewers to the google-cloud-go PR.
`
)
// githubReviewers is the list of github usernames that will be assigned to
// review the PRs.
//
// TODO(ndietz): Can we use github teams?
var githubReviewers = []string{"hongalex", "broady", "noahdietz", "tritone", "codyoss", "tbpg"}
// PullRequest represents a GitHub pull request.
type PullRequest struct {
Author string
Title string
URL string
Created time.Time
IsOpen bool
Number int
Repo string
IsDraft bool
NodeID string
}
// GithubClient is a convenience wrapper around Github clients.
type GithubClient struct {
cV3 *github.Client
cV4 *githubv4.Client
// Username is the GitHub username. Read-only.
Username string
}
// NewGithubClient creates a new GithubClient.
func NewGithubClient(ctx context.Context, username, name, email, accessToken string) (*GithubClient, error) {
if err := setGitCreds(name, email, username, accessToken); err != nil {
return nil, err
}
ts := oauth2.StaticTokenSource(
&oauth2.Token{AccessToken: accessToken},
)
tc := oauth2.NewClient(ctx, ts)
return &GithubClient{cV3: github.NewClient(tc), cV4: githubv4.NewClient(tc), Username: username}, nil
}
// SetGitCreds sets credentials for gerrit.
func setGitCreds(githubName, githubEmail, githubUsername, accessToken string) error {
u, err := user.Current()
if err != nil {
return err
}
gitCredentials := []byte(fmt.Sprintf("https://%s:%[email protected]", githubUsername, accessToken))
if err := ioutil.WriteFile(path.Join(u.HomeDir, ".git-credentials"), gitCredentials, 0644); err != nil {
return err
}
c := exec.Command("git", "config", "--global", "user.name", githubName)
c.Stdout = os.Stdout
c.Stderr = os.Stderr
c.Stdin = os.Stdin // Prevents "the input device is not a TTY" error.
c.Env = []string{
fmt.Sprintf("PATH=%s", os.Getenv("PATH")), // TODO(deklerk): Why do we need to do this? Doesn't seem to be necessary in other exec.Commands.
fmt.Sprintf("HOME=%s", os.Getenv("HOME")), // TODO(deklerk): Why do we need to do this? Doesn't seem to be necessary in other exec.Commands.
}
if err := c.Run(); err != nil {
return err
}
c = exec.Command("git", "config", "--global", "user.email", githubEmail)
c.Stdout = os.Stdout
c.Stderr = os.Stderr
c.Stdin = os.Stdin // Prevents "the input device is not a TTY" error.
c.Env = []string{
fmt.Sprintf("PATH=%s", os.Getenv("PATH")), // TODO(deklerk): Why do we need to do this? Doesn't seem to be necessary in other exec.Commands.
fmt.Sprintf("HOME=%s", os.Getenv("HOME")), // TODO(deklerk): Why do we need to do this? Doesn't seem to be necessary in other exec.Commands.
}
return c.Run()
}
// GetRegenPR finds the first regen pull request with the given status. Accepted
// statues are: open, closed, or all.
func (gc *GithubClient) GetRegenPR(ctx context.Context, repo string, status string) (*PullRequest, error) {
log.Printf("getting %v pull requests with status %q", repo, status)
// We don't bother paginating, because it hurts our requests quota and makes
// the page slower without a lot of value.
opt := &github.PullRequestListOptions{
ListOptions: github.ListOptions{PerPage: 50},
State: status,
}
prs, _, err := gc.cV3.PullRequests.List(ctx, "googleapis", repo, opt)
if err != nil {
return nil, err
}
for _, pr := range prs {
if !strings.Contains(pr.GetTitle(), "auto-regenerate") {
continue
}
if pr.GetUser().GetLogin() != gc.Username {
continue
}
return &PullRequest{
Author: pr.GetUser().GetLogin(),
Title: pr.GetTitle(),
URL: pr.GetHTMLURL(),
Created: pr.GetCreatedAt(),
IsOpen: pr.GetState() == "open",
Number: pr.GetNumber(),
Repo: repo,
IsDraft: pr.GetDraft(),
NodeID: pr.GetNodeID(),
}, nil
}
return nil, nil
}
// CreateGenprotoPR creates a PR for a given genproto change.
//
// hasCorrespondingPR indicates that there is a corresponding google-cloud-go PR.
func (gc *GithubClient) CreateGenprotoPR(ctx context.Context, genprotoDir string, hasCorrespondingPR bool) (prNumber int, _ error) {
log.Println("creating genproto PR")
body := genprotoCommitBody
if !hasCorrespondingPR {
body += "\n\nThere is no corresponding google-cloud-go PR.\n"
}
c := exec.Command("/bin/bash", "-c", `
set -ex
git config credential.helper store # cache creds from ~/.git-credentials
git branch -D $BRANCH_NAME || true
git push -d origin $BRANCH_NAME || true
git add -A
git checkout -b $BRANCH_NAME
git commit -m "$COMMIT_TITLE" -m "$COMMIT_BODY"
git push origin $BRANCH_NAME
`)
c.Stdout = os.Stdout
c.Stderr = os.Stderr
c.Stdin = os.Stdin // Prevents "the input device is not a TTY" error.
c.Env = []string{
fmt.Sprintf("PATH=%s", os.Getenv("PATH")), // TODO(deklerk): Why do we need to do this? Doesn't seem to be necessary in other exec.Commands.
fmt.Sprintf("HOME=%s", os.Getenv("HOME")), // TODO(deklerk): Why do we need to do this? Doesn't seem to be necessary in other exec.Commands.
fmt.Sprintf("COMMIT_TITLE=%s", genprotoCommitTitle),
fmt.Sprintf("COMMIT_BODY=%s", body),
fmt.Sprintf("BRANCH_NAME=%s", genprotoBranchName),
}
c.Dir = genprotoDir
if err := c.Run(); err != nil {
return 0, err
}
head := fmt.Sprintf("googleapis:" + genprotoBranchName)
base := "master"
t := genprotoCommitTitle // Because we have to take the address.
pr, _, err := gc.cV3.PullRequests.Create(ctx, "googleapis", "go-genproto", &github.NewPullRequest{
Title: &t,
Body: &body,
Head: &head,
Base: &base,
})
if err != nil {
return 0, err
}
// Can't assign the submitter of the PR as a reviewer.
var reviewers []string
for _, r := range githubReviewers {
if r != *githubUsername {
reviewers = append(reviewers, r)
}
}
if _, _, err := gc.cV3.PullRequests.RequestReviewers(ctx, "googleapis", "go-genproto", pr.GetNumber(), github.ReviewersRequest{
Reviewers: reviewers,
}); err != nil {
return 0, err
}
log.Printf("creating genproto PR... done %s\n", pr.GetHTMLURL())
return pr.GetNumber(), nil
}
// CreateGocloudPR creats a PR for a given google-cloud-go change.
func (gc *GithubClient) CreateGocloudPR(ctx context.Context, gocloudDir string, genprotoPRNum int) (prNumber int, _ error) {
log.Println("creating google-cloud-go PR")
var body string
var draft bool
if genprotoPRNum > 0 {
body = gocloudCommitBody + fmt.Sprintf("\n\nCorresponding genproto PR: https://github.com/googleapis/go-genproto/pull/%d\n", genprotoPRNum)
draft = true
} else {
body = gocloudCommitBody + "\n\nThere is no corresponding genproto PR.\n"
}
c := exec.Command("/bin/bash", "-c", `
set -ex
git config credential.helper store # cache creds from ~/.git-credentials
git branch -D $BRANCH_NAME || true
git push -d origin $BRANCH_NAME || true
git add -A
git checkout -b $BRANCH_NAME
git commit -m "$COMMIT_TITLE" -m "$COMMIT_BODY"
git push origin $BRANCH_NAME
`)
c.Stdout = os.Stdout
c.Stderr = os.Stderr
c.Stdin = os.Stdin // Prevents "the input device is not a TTY" error.
c.Env = []string{
fmt.Sprintf("PATH=%s", os.Getenv("PATH")), // TODO(deklerk): Why do we need to do this? Doesn't seem to be necessary in other exec.Commands.
fmt.Sprintf("HOME=%s", os.Getenv("HOME")), // TODO(deklerk): Why do we need to do this? Doesn't seem to be necessary in other exec.Commands.
fmt.Sprintf("COMMIT_TITLE=%s", gocloudCommitTitle),
fmt.Sprintf("COMMIT_BODY=%s", body),
fmt.Sprintf("BRANCH_NAME=%s", gocloudBranchName),
}
c.Dir = gocloudDir
if err := c.Run(); err != nil {
return 0, err
}
t := gocloudCommitTitle // Because we have to take the address.
pr, _, err := gc.cV3.PullRequests.Create(ctx, "googleapis", "google-cloud-go", &github.NewPullRequest{
Title: &t,
Body: &body,
Head: github.String(fmt.Sprintf("googleapis:" + gocloudBranchName)),
Base: github.String("master"),
Draft: github.Bool(draft),
})
if err != nil {
return 0, err
}
log.Printf("creating google-cloud-go PR... done %s\n", pr.GetHTMLURL())
return pr.GetNumber(), nil
}
// AmendWithPRURL amends the given genproto PR with a link to the given
// google-cloud-go PR.
func (gc *GithubClient) AmendWithPRURL(ctx context.Context, genprotoPRNum int, genprotoDir string, gocloudPRNum int) error {
newBody := genprotoCommitBody + fmt.Sprintf("\n\nCorresponding google-cloud-go PR: googleapis/google-cloud-go#%d\n", gocloudPRNum)
c := exec.Command("/bin/bash", "-c", `
set -ex
git config credential.helper store # cache creds from ~/.git-credentials
git checkout $BRANCH_NAME
git commit --amend -m "$COMMIT_TITLE" -m "$COMMIT_BODY"
git push -f origin $BRANCH_NAME
`)
c.Stdout = os.Stdout
c.Stderr = os.Stderr
c.Stdin = os.Stdin // Prevents "the input device is not a TTY" error.
c.Env = []string{
fmt.Sprintf("PATH=%s", os.Getenv("PATH")), // TODO(deklerk): Why do we need to do this? Doesn't seem to be necessary in other exec.Commands.
fmt.Sprintf("HOME=%s", os.Getenv("HOME")), // TODO(deklerk): Why do we need to do this? Doesn't seem to be necessary in other exec.Commands.
fmt.Sprintf("COMMIT_TITLE=%s", genprotoCommitTitle),
fmt.Sprintf("COMMIT_BODY=%s", newBody),
fmt.Sprintf("BRANCH_NAME=%s", genprotoBranchName),
}
c.Dir = genprotoDir
if err := c.Run(); err != nil {
return err
}
_, _, err := gc.cV3.PullRequests.Edit(ctx, "googleapis", "go-genproto", genprotoPRNum, &github.PullRequest{
Body: &newBody,
})
return err
}
// MarkPRReadyForReview switches a draft pull request to a reviewable pull
// request.
func (gc *GithubClient) MarkPRReadyForReview(ctx context.Context, repo string, nodeID string) error {
var m struct {
MarkPullRequestReadyForReview struct {
PullRequest struct {
ID githubv4.ID
}
} `graphql:"markPullRequestReadyForReview(input: $input)"`
}
input := githubv4.MarkPullRequestReadyForReviewInput{
PullRequestID: nodeID,
}
if err := gc.cV4.Mutate(ctx, &m, input, nil); err != nil {
return err
}
return nil
}
| [
"\"PATH\"",
"\"HOME\"",
"\"PATH\"",
"\"HOME\"",
"\"PATH\"",
"\"HOME\"",
"\"PATH\"",
"\"HOME\"",
"\"PATH\"",
"\"HOME\""
]
| []
| [
"HOME",
"PATH"
]
| [] | ["HOME", "PATH"] | go | 2 | 0 | |
models/engine/db_storage.py | #!/usr/bin/python3
"""
Database engine
"""
import os
from sqlalchemy import create_engine, MetaData
from sqlalchemy.orm import sessionmaker, scoped_session
from models.base_model import Base
from models import base_model, amenity, city, place, review, state, user
class DBStorage:
"""
handles long term storage of all class instances
"""
CNC = {
'Amenity': amenity.Amenity,
'City': city.City,
'Place': place.Place,
'Review': review.Review,
'State': state.State,
'User': user.User
}
"""
handles storage for database
"""
__engine = None
__session = None
def __init__(self):
"""
creates the engine self.__engine
"""
self.__engine = create_engine(
'mysql+mysqldb://{}:{}@{}/{}'.format(
os.environ.get('instakush_MYSQL_USER'),
os.environ.get('instakush_MYSQL_PWD'),
os.environ.get('instakush_MYSQL_HOST'),
os.environ.get('instakush_MYSQL_DB')))
if os.environ.get("instakush_ENV") == 'test':
Base.metadata.drop_all(self.__engine)
def all(self, cls=None):
"""
returns a dictionary of all objects
"""
obj_dict = {}
if cls is not None:
a_query = self.__session.query(DBStorage.CNC[cls])
for obj in a_query:
obj_ref = "{}.{}".format(type(obj).__name__, obj.id)
obj_dict[obj_ref] = obj
return obj_dict
for c in DBStorage.CNC.values():
a_query = self.__session.query(c)
for obj in a_query:
obj_ref = "{}.{}".format(type(obj).__name__, obj.id)
obj_dict[obj_ref] = obj
return obj_dict
def new(self, obj):
"""
adds objects to current database session
"""
self.__session.add(obj)
def save(self):
"""
commits all changes of current database session
"""
self.__session.commit()
def rollback_session(self):
"""
rollsback a session in the event of an exception
"""
self.__session.rollback()
def delete(self, obj=None):
"""
deletes obj from current database session if not None
"""
if obj:
self.__session.delete(obj)
self.save()
def delete_all(self):
"""
deletes all stored objects, for testing purposes
"""
for c in DBStorage.CNC.values():
a_query = self.__session.query(c)
all_objs = [obj for obj in a_query]
for obj in range(len(all_objs)):
to_delete = all_objs.pop(0)
to_delete.delete()
self.save()
def reload(self):
"""
creates all tables in database & session from engine
"""
Base.metadata.create_all(self.__engine)
self.__session = scoped_session(
sessionmaker(
bind=self.__engine,
expire_on_commit=False))
def close(self):
"""
calls remove() on private session attribute (self.session)
"""
self.__session.remove()
def get(self, cls, id):
"""
retrieves one object based on class name and id
"""
if cls and id:
fetch = "{}.{}".format(cls, id)
all_obj = self.all(cls)
return all_obj.get(fetch)
return None
def count(self, cls=None):
"""
returns the count of all objects in storage
"""
return (len(self.all(cls)))
| []
| []
| [
"instakush_ENV",
"instakush_MYSQL_HOST",
"instakush_MYSQL_PWD",
"instakush_MYSQL_DB",
"instakush_MYSQL_USER"
]
| [] | ["instakush_ENV", "instakush_MYSQL_HOST", "instakush_MYSQL_PWD", "instakush_MYSQL_DB", "instakush_MYSQL_USER"] | python | 5 | 0 | |
tests/test_other.py | # coding=utf-8
# Copyright 2013 The Emscripten Authors. All rights reserved.
# Emscripten is available under two separate licenses, the MIT license and the
# University of Illinois/NCSA Open Source License. Both these licenses can be
# found in the LICENSE file.
# noqa: E241
from functools import wraps
import glob
import gzip
import itertools
import json
import os
import pipes
import re
import select
import shlex
import shutil
import subprocess
import sys
import time
import tempfile
import unittest
import uuid
from subprocess import PIPE, STDOUT
if __name__ == '__main__':
raise Exception('do not run this file directly; do something like: tests/runner.py other')
from tools.shared import try_delete, config
from tools.shared import EMCC, EMXX, EMAR, EMRANLIB, PYTHON, FILE_PACKAGER, WINDOWS, EM_BUILD_VERBOSE
from tools.shared import CLANG_CC, CLANG_CXX, LLVM_AR, LLVM_DWARFDUMP
from runner import RunnerCore, path_from_root, is_slow_test, ensure_dir, disabled, make_executable
from runner import env_modify, no_mac, no_windows, requires_native_clang, with_env_modify
from runner import create_test_file, parameterized, NON_ZERO, node_pthreads
from tools import shared, building, utils
import jsrun
import clang_native
from tools import line_endings
from tools import webassembly
scons_path = utils.which('scons')
emmake = shared.bat_suffix(path_from_root('emmake'))
emcmake = shared.bat_suffix(path_from_root('emcmake'))
emconfigure = shared.bat_suffix(path_from_root('emconfigure'))
emconfig = shared.bat_suffix(path_from_root('em-config'))
emsize = shared.bat_suffix(path_from_root('emsize'))
wasm_dis = os.path.join(building.get_binaryen_bin(), 'wasm-dis')
wasm_opt = os.path.join(building.get_binaryen_bin(), 'wasm-opt')
class temp_directory():
def __init__(self, dirname):
self.dir = dirname
def __enter__(self):
self.directory = tempfile.mkdtemp(prefix='emtest_temp_', dir=self.dir)
self.prev_cwd = os.getcwd()
os.chdir(self.directory)
print('temp_directory: ' + self.directory)
return self.directory
def __exit__(self, type, value, traceback):
os.chdir(self.prev_cwd)
def uses_canonical_tmp(func):
"""Decorator that signals the use of the canonical temp by a test method.
This decorator takes care of cleaning the directory after the
test to satisfy the leak detector.
"""
@wraps(func)
def decorated(self):
# Before running the test completely remove the canonical_tmp
if os.path.exists(self.canonical_temp_dir):
shutil.rmtree(self.canonical_temp_dir)
try:
func(self)
finally:
# Make sure the test isn't lying about the fact that it uses
# canonical_tmp
self.assertTrue(os.path.exists(self.canonical_temp_dir))
# Remove the temp dir in a try-finally, as otherwise if the
# test fails we would not clean it up, and if leak detection
# is set we will show that error instead of the actual one.
shutil.rmtree(self.canonical_temp_dir)
return decorated
def parse_wasm(filename):
wat = shared.run_process([wasm_dis, filename], stdout=PIPE).stdout
imports = []
exports = []
funcs = []
for line in wat.splitlines():
line = line.strip()
if line.startswith('(import '):
line = line.strip('()')
parts = line.split()
module = parts[1].strip('"')
name = parts[2].strip('"')
imports.append('%s.%s' % (module, name))
if line.startswith('(export '):
line = line.strip('()')
name = line.split()[1].strip('"')
exports.append(name)
if line.startswith('(func '):
line = line.strip('()')
name = line.split()[1].strip('"')
funcs.append(name)
return imports, exports, funcs
class other(RunnerCore):
def assertIsObjectFile(self, filename):
self.assertTrue(building.is_wasm(filename))
def do_other_test(self, testname, emcc_args=[], run_args=[]):
orig_args = self.emcc_args
self.emcc_args += emcc_args
self.do_run_in_out_file_test('tests', 'other', testname, args=run_args)
self.emcc_args = orig_args
# Another utility to run a test in this suite. This receives a source file
# to compile, with optional compiler and execution flags.
# Output can be checked by seeing if literals are contained, and that a list
# of regexes match. The return code can also be checked.
def do_smart_test(self, source, literals=[], engine=None, regexes=[],
emcc_args=[], run_args=[], assert_returncode=0):
self.run_process([EMCC, source] + emcc_args)
seen = self.run_js('a.out.js', engine=engine, args=run_args, assert_returncode=assert_returncode) + '\n'
for literal in literals:
self.assertContained([literal], seen)
for regex in regexes:
self.assertTrue(re.search(regex, seen), 'Expected regex "%s" to match on:\n%s' % (regex, seen))
def run_on_pty(self, cmd):
master, slave = os.openpty()
output = []
try:
env = os.environ.copy()
env['TERM'] = 'xterm-color'
proc = subprocess.Popen(cmd, stdout=slave, stderr=slave, env=env)
while proc.poll() is None:
r, w, x = select.select([master], [], [], 1)
if r:
output.append(os.read(master, 1024))
return (proc.returncode, b''.join(output))
finally:
os.close(master)
os.close(slave)
def test_emcc_v(self):
for compiler in [EMCC, EMXX]:
# -v, without input files
proc = self.run_process([compiler, '-v'], stdout=PIPE, stderr=PIPE)
self.assertEqual(proc.stdout, '')
# assert that the emcc message comes first. We had a bug where the sub-process output
# from clang would be flushed to stderr first.
self.assertContained('emcc (Emscripten gcc/clang-like replacement', proc.stderr)
self.assertTrue(proc.stderr.startswith('emcc (Emscripten gcc/clang-like replacement'))
self.assertContained('clang version %s' % shared.EXPECTED_LLVM_VERSION, proc.stderr)
self.assertContained('GNU', proc.stderr)
self.assertNotContained('this is dangerous', proc.stderr)
def test_emcc_generate_config(self):
for compiler in [EMCC, EMXX]:
config_path = './emscripten_config'
self.run_process([compiler, '--generate-config', config_path])
self.assertExists(config_path, 'A config file should have been created at %s' % config_path)
config_contents = open(config_path).read()
self.assertContained('EMSCRIPTEN_ROOT', config_contents)
self.assertContained('LLVM_ROOT', config_contents)
os.remove(config_path)
def test_emcc_output_mjs(self):
self.run_process([EMCC, '-o', 'hello_world.mjs', path_from_root('tests', 'hello_world.c')])
with open('hello_world.mjs') as f:
output = f.read()
self.assertContained('export default Module;', output)
# TODO(sbc): Test that this is actually runnable. We currently don't have
# any tests for EXPORT_ES6 but once we do this should be enabled.
# self.assertContained('hello, world!', self.run_js('hello_world.mjs'))
def test_emcc_output_worker_mjs(self):
self.run_process([EMCC, '-o', 'hello_world.mjs', '-pthread', '-O1',
path_from_root('tests', 'hello_world.c'),
'-s', 'EXPORT_NAME=FooModule'])
with open('hello_world.mjs') as f:
self.assertContained('export default FooModule;', f.read())
with open('hello_world.worker.js') as f:
self.assertContained('import(', f.read())
def test_export_es6_implies_modularize(self):
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'EXPORT_ES6=1'])
with open('a.out.js') as f:
self.assertContained('export default Module;', f.read())
def test_export_es6_requires_modularize(self):
err = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'EXPORT_ES6=1', '-s', 'MODULARIZE=0'])
self.assertContained('EXPORT_ES6 requires MODULARIZE to be set', err)
def test_emcc_out_file(self):
# Verify that "-ofile" works in addition to "-o" "file"
self.run_process([EMCC, '-c', '-ofoo.o', path_from_root('tests', 'hello_world.c')])
self.assertExists('foo.o')
self.run_process([EMCC, '-ofoo.js', 'foo.o'])
self.assertExists('foo.js')
@parameterized({
'c': [EMCC, '.c'],
'cxx': [EMXX, '.cpp']})
def test_emcc_basics(self, compiler, suffix):
# emcc src.cpp ==> writes a.out.js and a.out.wasm
self.run_process([compiler, path_from_root('tests', 'hello_world' + suffix)])
self.assertExists('a.out.js')
self.assertExists('a.out.wasm')
self.assertContained('hello, world!', self.run_js('a.out.js'))
# --version
output = self.run_process([compiler, '--version'], stdout=PIPE, stderr=PIPE)
output = output.stdout.replace('\r', '')
self.assertContained('emcc (Emscripten gcc/clang-like replacement)', output)
self.assertContained('''Copyright (C) 2014 the Emscripten authors (see AUTHORS.txt)
This is free and open source software under the MIT license.
There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
''', output)
# --help
output = self.run_process([compiler, '--help'], stdout=PIPE, stderr=PIPE)
self.assertContained('Display this information', output.stdout)
self.assertContained('Most clang options will work', output.stdout)
# -dumpmachine
output = self.run_process([compiler, '-dumpmachine'], stdout=PIPE, stderr=PIPE)
self.assertContained(shared.get_llvm_target(), output.stdout)
# -dumpversion
output = self.run_process([compiler, '-dumpversion'], stdout=PIPE, stderr=PIPE)
self.assertEqual(shared.EMSCRIPTEN_VERSION, output.stdout.strip())
# properly report source code errors, and stop there
self.clear()
stderr = self.expect_fail([compiler, path_from_root('tests', 'hello_world_error' + suffix)])
self.assertNotContained('IOError', stderr) # no python stack
self.assertNotContained('Traceback', stderr) # no python stack
self.assertContained('error: invalid preprocessing directive', stderr)
self.assertContained(["error: use of undeclared identifier 'cheez", "error: unknown type name 'cheez'"], stderr)
self.assertContained('errors generated.', stderr.splitlines()[-2])
@parameterized({
'c': [EMCC, '.c'],
'cxx': [EMXX, '.cpp']})
def test_emcc_2(self, compiler, suffix):
# emcc src.cpp -c and emcc src.cpp -o src.[o|bc] ==> should give a .bc file
for args in [[], ['-o', 'src.o'], ['-o', 'src.bc'], ['-o', 'src.so']]:
print('args:', args)
target = args[1] if len(args) == 2 else 'hello_world.o'
self.clear()
self.run_process([compiler, '-c', path_from_root('tests', 'hello_world' + suffix)] + args)
syms = building.llvm_nm(target)
self.assertIn('main', syms.defs)
# wasm backend will also have '__original_main' or such
self.assertEqual(len(syms.defs), 2)
if target == 'js': # make sure emcc can recognize the target as a bitcode file
shutil.move(target, target + '.bc')
target += '.bc'
self.run_process([compiler, target, '-o', target + '.js'])
self.assertContained('hello, world!', self.run_js(target + '.js'))
@parameterized({
'c': [EMCC, '.c'],
'cxx': [EMXX, '.cpp']})
def test_emcc_3(self, compiler, suffix):
# handle singleton archives
self.run_process([compiler, '-c', path_from_root('tests', 'hello_world' + suffix), '-o', 'a.o'])
self.run_process([LLVM_AR, 'r', 'a.a', 'a.o'], stdout=PIPE, stderr=PIPE)
self.run_process([compiler, 'a.a'])
self.assertContained('hello, world!', self.run_js('a.out.js'))
# emcc [..] -o [path] ==> should work with absolute paths
for path in [os.path.abspath(os.path.join('..', 'file1.js')), os.path.join('b_dir', 'file2.js')]:
print(path)
os.chdir(self.get_dir())
self.clear()
print(os.listdir(os.getcwd()))
ensure_dir(os.path.join('a_dir', 'b_dir'))
os.chdir('a_dir')
# use single file so we don't have more files to clean up
self.run_process([compiler, path_from_root('tests', 'hello_world' + suffix), '-o', path, '-s', 'SINGLE_FILE=1'])
last = os.getcwd()
os.chdir(os.path.dirname(path))
self.assertContained('hello, world!', self.run_js(os.path.basename(path)))
os.chdir(last)
try_delete(path)
@is_slow_test
@parameterized({
'c': [EMCC],
'cxx': [EMXX]})
def test_emcc_4(self, compiler):
# Optimization: emcc src.cpp -o something.js [-Ox]. -O0 is the same as not specifying any optimization setting
for params, opt_level, obj_params, closure, has_malloc in [ # obj_params are used after compiling first
(['-o', 'something.js'], 0, None, 0, 1),
(['-o', 'something.js', '-O0', '-g'], 0, None, 0, 0),
(['-o', 'something.js', '-O1'], 1, None, 0, 0),
(['-o', 'something.js', '-O1', '-g'], 1, None, 0, 0), # no closure since debug
(['-o', 'something.js', '-O2'], 2, None, 0, 1),
(['-o', 'something.js', '-O2', '-g'], 2, None, 0, 0),
(['-o', 'something.js', '-Os'], 2, None, 0, 1),
(['-o', 'something.js', '-O3'], 3, None, 0, 1),
# and, test compiling first
(['-c', '-o', 'something.o'], 0, [], 0, 0),
(['-c', '-o', 'something.o', '-O0'], 0, [], 0, 0),
(['-c', '-o', 'something.o', '-O1'], 1, ['-O1'], 0, 0),
(['-c', '-o', 'something.o', '-O2'], 2, ['-O2'], 0, 0),
(['-c', '-o', 'something.o', '-O3'], 3, ['-O3'], 0, 0),
(['-O1', '-c', '-o', 'something.o'], 1, [], 0, 0),
# non-wasm
(['-s', 'WASM=0', '-o', 'something.js'], 0, None, 0, 1),
(['-s', 'WASM=0', '-o', 'something.js', '-O0', '-g'], 0, None, 0, 0),
(['-s', 'WASM=0', '-o', 'something.js', '-O1'], 1, None, 0, 0),
(['-s', 'WASM=0', '-o', 'something.js', '-O1', '-g'], 1, None, 0, 0), # no closure since debug
(['-s', 'WASM=0', '-o', 'something.js', '-O2'], 2, None, 0, 1),
(['-s', 'WASM=0', '-o', 'something.js', '-O2', '-g'], 2, None, 0, 0),
(['-s', 'WASM=0', '-o', 'something.js', '-Os'], 2, None, 0, 1),
(['-s', 'WASM=0', '-o', 'something.js', '-O3'], 3, None, 0, 1),
# and, test compiling to bitcode first
(['-s', 'WASM=0', '-c', '-o', 'something.o'], 0, ['-s', 'WASM=0'], 0, 0),
(['-s', 'WASM=0', '-c', '-o', 'something.o', '-O0'], 0, ['-s', 'WASM=0'], 0, 0),
(['-s', 'WASM=0', '-c', '-o', 'something.o', '-O1'], 1, ['-s', 'WASM=0', '-O1'], 0, 0),
(['-s', 'WASM=0', '-c', '-o', 'something.o', '-O2'], 2, ['-s', 'WASM=0', '-O2'], 0, 0),
(['-s', 'WASM=0', '-c', '-o', 'something.o', '-O3'], 3, ['-s', 'WASM=0', '-O3'], 0, 0),
(['-s', 'WASM=0', '-O1', '-c', '-o', 'something.o'], 1, ['-s', 'WASM=0'], 0, 0),
]:
print(params, opt_level, obj_params, closure, has_malloc)
self.clear()
keep_debug = '-g' in params
args = [compiler, path_from_root('tests', 'hello_world_loop' + ('_malloc' if has_malloc else '') + '.cpp')] + params
print('..', args)
output = self.run_process(args, stdout=PIPE, stderr=PIPE)
assert len(output.stdout) == 0, output.stdout
if obj_params is not None:
self.assertExists('something.o', output.stderr)
obj_args = [compiler, 'something.o', '-o', 'something.js'] + obj_params
print('....', obj_args)
output = self.run_process(obj_args, stdout=PIPE, stderr=PIPE)
self.assertExists('something.js', output.stderr)
self.assertContained('hello, world!', self.run_js('something.js'))
# Verify optimization level etc. in the generated code
# XXX these are quite sensitive, and will need updating when code generation changes
generated = open('something.js').read()
main = self.get_func(generated, '_main') if 'function _main' in generated else generated
assert 'new Uint16Array' in generated and 'new Uint32Array' in generated, 'typed arrays 2 should be used by default'
assert 'SAFE_HEAP' not in generated, 'safe heap should not be used by default'
assert ': while(' not in main, 'when relooping we also js-optimize, so there should be no labelled whiles'
if closure:
if opt_level == 0:
assert '._main =' in generated, 'closure compiler should have been run'
elif opt_level >= 1:
assert '._main=' in generated, 'closure compiler should have been run (and output should be minified)'
else:
# closure has not been run, we can do some additional checks. TODO: figure out how to do these even with closure
assert '._main = ' not in generated, 'closure compiler should not have been run'
if keep_debug:
assert ('assert(INITIAL_MEMORY >= TOTAL_STACK' in generated) == (opt_level == 0), 'assertions should be in opt == 0'
if 'WASM=0' in params:
looks_unminified = ' = {}' in generated and ' = []' in generated
looks_minified = '={}' in generated and '=[]' and ';var' in generated
assert not (looks_minified and looks_unminified)
if opt_level == 0 or '-g' in params:
assert looks_unminified
elif opt_level >= 2:
assert looks_minified
def test_multiple_sources(self):
# Compiling two sources at a time should work.
cmd = [EMCC, '-c', path_from_root('tests', 'twopart_main.cpp'), path_from_root('tests', 'twopart_side.c')]
self.run_process(cmd)
# Object files should be generated by default in the current working
# directory, and not alongside the sources.
self.assertExists('twopart_main.o')
self.assertExists('twopart_side.o')
self.assertNotExists(path_from_root('tests', 'twopart_main.o'))
self.assertNotExists(path_from_root('tests', 'twopart_side.o'))
# But it is an error if '-o' is also specified.
self.clear()
err = self.expect_fail(cmd + ['-o', 'out.o'])
self.assertContained('cannot specify -o with -c/-S/-E/-M and multiple source files', err)
self.assertNotExists('twopart_main.o')
self.assertNotExists('twopart_side.o')
self.assertNotExists(path_from_root('tests', 'twopart_main.o'))
self.assertNotExists(path_from_root('tests', 'twopart_side.o'))
def test_combining_object_files(self):
# Compiling two files with -c will generate separate object files
self.run_process([EMCC, path_from_root('tests', 'twopart_main.cpp'), path_from_root('tests', 'twopart_side.c'), '-c'])
self.assertExists('twopart_main.o')
self.assertExists('twopart_side.o')
# Linking with just one of them is expected to fail
err = self.expect_fail([EMCC, 'twopart_main.o'])
self.assertContained('undefined symbol: theFunc', err)
# Linking with both should work
self.run_process([EMCC, 'twopart_main.o', 'twopart_side.o'])
self.assertContained('side got: hello from main, over', self.run_js('a.out.js'))
# Combining object files into another object should also work, using the `-r` flag
self.run_process([EMCC, '-r', 'twopart_main.o', 'twopart_side.o', '-o', 'combined.o'])
# Warn about legecy support for outputing object file without `-r`, `-c` or `-shared`
err = self.run_process([EMCC, 'twopart_main.o', 'twopart_side.o', '-o', 'combined2.o'], stderr=PIPE).stderr
self.assertContained('warning: generating an executable with an object extension (.o)', err)
# Should be two symbols (and in the wasm backend, also __original_main)
syms = building.llvm_nm('combined.o')
self.assertIn('main', syms.defs)
self.assertEqual(len(syms.defs), 3)
self.run_process([EMCC, 'combined.o', '-o', 'combined.o.js'])
self.assertContained('side got: hello from main, over', self.run_js('combined.o.js'))
def test_js_transform(self):
with open('t.py', 'w') as f:
f.write('''
import sys
f = open(sys.argv[1], 'a')
f.write('transformed!')
f.close()
''')
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '--js-transform', '%s t.py' % (PYTHON)])
self.assertIn('transformed!', open('a.out.js').read())
def test_js_mem_file(self):
for opts in [0, 1, 2, 3]:
print('mem init in', opts)
self.clear()
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'WASM=0', '-O' + str(opts)])
if opts >= 2:
self.assertExists('a.out.js.mem')
else:
self.assertNotExists('a.out.js.mem')
def test_emcc_asm_v_wasm(self):
for opts in ([], ['-O1'], ['-O2'], ['-O3']):
print('opts', opts)
for mode in ([], ['-s', 'WASM=0']):
self.clear()
wasm = '=0' not in str(mode)
print(' mode', mode, 'wasm?', wasm)
self.run_process([EMCC, path_from_root('tests', 'hello_world.c')] + opts + mode)
self.assertExists('a.out.js')
if wasm:
self.assertExists('a.out.wasm')
for engine in config.JS_ENGINES:
print(' engine', engine)
out = self.run_js('a.out.js', engine=engine)
self.assertContained('hello, world!', out)
def test_emcc_cflags(self):
output = self.run_process([EMCC, '--cflags'], stdout=PIPE)
flags = output.stdout.strip()
self.assertContained(shared.shlex_join(shared.emsdk_cflags([], False)), flags)
output = self.run_process([EMXX, '--cflags'], stdout=PIPE)
flags = output.stdout.strip()
self.assertContained(shared.shlex_join(shared.emsdk_cflags([], True)), flags)
# check they work
cmd = [CLANG_CXX, path_from_root('tests', 'hello_world.cpp')] + shlex.split(flags.replace('\\', '\\\\')) + ['-c', '-emit-llvm', '-o', 'a.bc']
self.run_process(cmd)
self.run_process([EMCC, 'a.bc'])
self.assertContained('hello, world!', self.run_js('a.out.js'))
def test_emcc_print_search_dirs(self):
result = self.run_process([EMCC, '-print-search-dirs'], stdout=PIPE, stderr=PIPE)
self.assertContained('programs: =', result.stdout)
self.assertContained('libraries: =', result.stdout)
def test_emar_em_config_flag(self):
# Test that the --em-config flag is accepted but not passed down do llvm-ar.
# We expand this in case the EM_CONFIG is ~/.emscripten (default)
conf = os.path.expanduser(config.EM_CONFIG)
proc = self.run_process([EMAR, '--em-config', conf, '-version'], stdout=PIPE, stderr=PIPE)
self.assertEqual(proc.stderr, "")
self.assertContained('LLVM', proc.stdout)
def test_emsize(self):
# test binaryen generated by running:
# emcc tests/hello_world.c -Oz --closure 1 -o tests/other/test_emsize.js
with open(path_from_root('tests', 'other', 'test_emsize.out')) as expected_output:
expected = expected_output.read()
cmd = [emsize, path_from_root('tests', 'other', 'test_emsize.js')]
for command in [cmd, cmd + ['-format=sysv']]:
output = self.run_process(cmd, stdout=PIPE).stdout
self.assertContained(expected, output)
@is_slow_test
@parameterized({
# ('directory to the test', 'output filename', ['extra args to pass to
# CMake']) Testing all combinations would be too much work and the test
# would take 10 minutes+ to finish (CMake feature detection is slow), so
# combine multiple features into one to try to cover as much as possible
# while still keeping this test in sensible time limit.
'js': ('target_js', 'test_cmake.js', ['-DCMAKE_BUILD_TYPE=Debug']),
'html': ('target_html', 'hello_world_gles.html', ['-DCMAKE_BUILD_TYPE=Release']),
'library': ('target_library', 'libtest_cmake.a', ['-DCMAKE_BUILD_TYPE=MinSizeRel']),
'static_cpp': ('target_library', 'libtest_cmake.a', ['-DCMAKE_BUILD_TYPE=RelWithDebInfo', '-DCPP_LIBRARY_TYPE=STATIC']),
'stdproperty': ('stdproperty', 'helloworld.js', [])
})
def test_cmake(self, test_dir, output_file, cmake_args):
# Test all supported generators.
if WINDOWS:
generators = ['MinGW Makefiles', 'NMake Makefiles']
else:
generators = ['Unix Makefiles', 'Ninja', 'Eclipse CDT4 - Ninja']
configurations = {'MinGW Makefiles' : {'build' : ['mingw32-make'] }, # noqa
'NMake Makefiles' : {'build' : ['nmake', '/NOLOGO']}, # noqa
'Unix Makefiles' : {'build' : ['make']}, # noqa
'Ninja' : {'build' : ['ninja']}, # noqa
'Eclipse CDT4 - Ninja': {'build' : ['ninja']}, # noqa
}
for generator in generators:
conf = configurations[generator]
if not utils.which(conf['build'][0]):
# Use simple test if applicable
print('Skipping %s test for CMake support; build tool found found: %s.' % (generator, conf['build'][0]))
continue
cmakelistsdir = path_from_root('tests', 'cmake', test_dir)
with temp_directory(self.get_dir()) as tempdirname:
# Run Cmake
cmd = [emcmake, 'cmake'] + cmake_args + ['-G', generator, cmakelistsdir]
env = os.environ.copy()
# https://github.com/emscripten-core/emscripten/pull/5145: Check that CMake works even if EMCC_SKIP_SANITY_CHECK=1 is passed.
if test_dir == 'target_html':
env['EMCC_SKIP_SANITY_CHECK'] = '1'
print(str(cmd))
self.run_process(cmd, env=env, stdout=None if EM_BUILD_VERBOSE >= 2 else PIPE, stderr=None if EM_BUILD_VERBOSE >= 1 else PIPE)
# Build
cmd = conf['build']
if EM_BUILD_VERBOSE >= 3 and 'Ninja' not in generator:
cmd += ['VERBOSE=1']
self.run_process(cmd, stdout=None if EM_BUILD_VERBOSE >= 2 else PIPE)
self.assertExists(tempdirname + '/' + output_file, 'building a cmake-generated Makefile failed to produce an output file %s!' % tempdirname + '/' + output_file)
# Run through node, if CMake produced a .js file.
if output_file.endswith('.js'):
ret = self.run_process(config.NODE_JS + [tempdirname + '/' + output_file], stdout=PIPE).stdout
self.assertTextDataIdentical(open(cmakelistsdir + '/out.txt').read().strip(), ret.strip())
# Test that the various CMAKE_xxx_COMPILE_FEATURES that are advertised for the Emscripten toolchain match with the actual language features that Clang supports.
# If we update LLVM version and this test fails, copy over the new advertised features from Clang and place them to cmake/Modules/Platform/Emscripten.cmake.
@no_windows('Skipped on Windows because CMake does not configure native Clang builds well on Windows.')
def test_cmake_compile_features(self):
with temp_directory(self.get_dir()):
cmd = ['cmake', '-DCMAKE_C_COMPILER=' + CLANG_CC, '-DCMAKE_CXX_COMPILER=' + CLANG_CXX, path_from_root('tests', 'cmake', 'stdproperty')]
print(str(cmd))
native_features = self.run_process(cmd, stdout=PIPE).stdout
with temp_directory(self.get_dir()):
cmd = [emcmake, 'cmake', path_from_root('tests', 'cmake', 'stdproperty')]
print(str(cmd))
emscripten_features = self.run_process(cmd, stdout=PIPE).stdout
native_features = '\n'.join([x for x in native_features.split('\n') if '***' in x])
emscripten_features = '\n'.join([x for x in emscripten_features.split('\n') if '***' in x])
self.assertTextDataIdentical(native_features, emscripten_features)
# Tests that it's possible to pass C++11 or GNU++11 build modes to CMake by building code that
# needs C++11 (embind)
def test_cmake_with_embind_cpp11_mode(self):
if WINDOWS and not utils.which('ninja'):
self.skipTest('Skipping cmake test on windows since ninja not found')
for args in [[], ['-DNO_GNU_EXTENSIONS=1']]:
self.clear()
# Use ninja generator here since we assume its always installed on our build/test machines.
configure = [emcmake, 'cmake', path_from_root('tests', 'cmake', 'cmake_with_emval')] + args
if WINDOWS:
configure += ['-G', 'Ninja']
print(str(configure))
self.run_process(configure)
build = ['cmake', '--build', '.']
print(str(build))
self.run_process(build)
out = self.run_process(config.NODE_JS + ['cmake_with_emval.js'], stdout=PIPE).stdout
if '-DNO_GNU_EXTENSIONS=1' in args:
self.assertContained('Hello! __STRICT_ANSI__: 1, __cplusplus: 201103', out)
else:
self.assertContained('Hello! __STRICT_ANSI__: 0, __cplusplus: 201103', out)
# Tests that the Emscripten CMake toolchain option
def test_cmake_bitcode_static_libraries(self):
# Test that this option produces an error
err = self.expect_fail([emcmake, 'cmake', path_from_root('tests', 'cmake', 'static_lib'), '-DEMSCRIPTEN_GENERATE_BITCODE_STATIC_LIBRARIES=ON'])
self.assertContained('EMSCRIPTEN_GENERATE_BITCODE_STATIC_LIBRARIES is not compatible with the', err)
# Tests that the CMake variable EMSCRIPTEN_VERSION is properly provided to user CMake scripts
def test_cmake_emscripten_version(self):
self.run_process([emcmake, 'cmake', path_from_root('tests', 'cmake', 'emscripten_version')])
def test_system_include_paths(self):
# Verify that all default include paths are within `emscripten/system`
def verify_includes(stderr):
self.assertContained('<...> search starts here:', stderr)
assert stderr.count('End of search list.') == 1, stderr
start = stderr.index('<...> search starts here:')
end = stderr.index('End of search list.')
includes = stderr[start:end]
includes = [i.strip() for i in includes.splitlines()[1:]]
cachedir = os.path.normpath(shared.Cache.dirname)
llvmroot = os.path.normpath(os.path.dirname(config.LLVM_ROOT))
for i in includes:
i = os.path.normpath(i)
# we also allow for the cache include directory and llvm's own builtin includes.
# all other include paths should be inside the sysroot.
if i.startswith(cachedir) or i.startswith(llvmroot):
continue
self.assertContained(path_from_root('system'), i)
err = self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-v'], stderr=PIPE).stderr
verify_includes(err)
err = self.run_process([EMXX, path_from_root('tests', 'hello_world.cpp'), '-v'], stderr=PIPE).stderr
verify_includes(err)
def test_failure_error_code(self):
for compiler in [EMCC, EMXX]:
# Test that if one file is missing from the build, then emcc shouldn't succeed, and shouldn't produce an output file.
self.expect_fail([compiler, path_from_root('tests', 'hello_world.c'), 'this_file_is_missing.c', '-o', 'out.js'])
self.assertFalse(os.path.exists('out.js'))
def test_use_cxx(self):
create_test_file('empty_file', ' ')
dash_xc = self.run_process([EMCC, '-v', '-xc', 'empty_file'], stderr=PIPE).stderr
self.assertNotContained('-x c++', dash_xc)
dash_xcpp = self.run_process([EMCC, '-v', '-xc++', 'empty_file'], stderr=PIPE).stderr
self.assertContained('-x c++', dash_xcpp)
def test_cxx11(self):
for std in ['-std=c++11', '--std=c++11']:
for compiler in [EMCC, EMXX]:
self.run_process([compiler, std, path_from_root('tests', 'hello_cxx11.cpp')])
# Regression test for issue #4522: Incorrect CC vs CXX detection
def test_incorrect_c_detection(self):
# This auto-detection only works for the compile phase.
# For linking you need to use `em++` or pass `-x c++`
create_test_file('test.c', 'foo\n')
for compiler in [EMCC, EMXX]:
self.run_process([compiler, '-c', '--bind', '--embed-file', 'test.c', path_from_root('tests', 'hello_world.cpp')])
def test_odd_suffixes(self):
for suffix in ['CPP', 'c++', 'C++', 'cxx', 'CXX', 'cc', 'CC']:
self.clear()
print(suffix)
shutil.copyfile(path_from_root('tests', 'hello_world.c'), 'test.' + suffix)
self.run_process([EMCC, self.in_dir('test.' + suffix)])
self.assertContained('hello, world!', self.run_js('a.out.js'))
for suffix in ['lo']:
self.clear()
print(suffix)
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-shared', '-o', 'binary.' + suffix])
self.run_process([EMCC, 'binary.' + suffix])
self.assertContained('hello, world!', self.run_js('a.out.js'))
def test_preprocessed_input(self):
# .i and .ii files are assumed to be the output the pre-processor so clang doesn't add include
# paths. This means we can only compile and run things that don't contain includes.
for suffix in ['.i', '.ii']:
create_test_file('simple' + suffix, '''
#ifdef __cplusplus
extern "C" {
#endif
int puts(const char *s);
#ifdef __cplusplus
}
#endif
int main() { puts("hello"); }
''')
self.run_process([EMCC, 'simple' + suffix])
self.assertContained('hello', self.run_js('a.out.js'))
create_test_file('with_include' + suffix, '#include <stdio.h>\nint main() { puts("hello"); }')
err = self.expect_fail([EMCC, 'with_include' + suffix])
self.assertContained('fatal error: \'stdio.h\' file not found', err)
def test_wl_linkflags(self):
# Test path -L and -l via -Wl, arguments and -Wl, response files
create_test_file('main.cpp', '''
extern "C" void printey();
int main() {
printey();
return 0;
}
''')
create_test_file('libfile.cpp', '''
#include <stdio.h>
extern "C" void printey() {
printf("hello from lib\\n");
}
''')
create_test_file('linkflags.txt', '''
-L.
-lfoo
''')
self.run_process([EMCC, '-o', 'libfile.o', '-c', 'libfile.cpp'])
self.run_process([EMAR, 'cr', 'libfoo.a', 'libfile.o'])
self.run_process([EMCC, 'main.cpp', '-L.', '-lfoo'])
self.run_process([EMCC, 'main.cpp', '-Wl,-L.', '-Wl,-lfoo'])
self.run_process([EMCC, 'main.cpp', '-Wl,@linkflags.txt'])
def test_l_link(self):
# Linking with -lLIBNAME and -L/DIRNAME should work, also should work with spaces
create_test_file('main.cpp', '''
extern void printey();
int main() {
printey();
return 0;
}
''')
create_test_file('libfile.cpp', '''
#include <stdio.h>
void printey() {
printf("hello from lib\\n");
}
''')
ensure_dir('libdir')
libfile = self.in_dir('libdir', 'libfile.so')
aout = 'a.out.js'
def build(path, args):
self.run_process([EMCC, path] + args)
# Test linking the library built here by emcc
build('libfile.cpp', ['-c'])
shutil.move('libfile.o', libfile)
build('main.cpp', ['-L' + 'libdir', '-lfile'])
self.assertContained('hello from lib', self.run_js(aout))
# Also test execution with `-l c` and space-separated library linking syntax
os.remove(aout)
build('libfile.cpp', ['-c', '-l', 'c'])
shutil.move('libfile.o', libfile)
build('main.cpp', ['-L', 'libdir', '-l', 'file'])
self.assertContained('hello from lib', self.run_js(aout))
# Must not leave unneeded linker stubs
self.assertNotExists('a.out')
self.assertNotExists('a.exe')
def test_commons_link(self):
create_test_file('a.h', r'''
#if !defined(A_H)
#define A_H
extern int foo[8];
#endif
''')
create_test_file('a.c', r'''
#include "a.h"
int foo[8];
''')
create_test_file('main.c', r'''
#include <stdio.h>
#include "a.h"
int main() {
printf("|%d|\n", foo[0]);
return 0;
}
''')
self.run_process([EMCC, '-o', 'a.o', '-c', 'a.c'])
self.run_process([EMAR, 'rv', 'library.a', 'a.o'])
self.run_process([EMCC, '-o', 'main.o', '-c', 'main.c'])
self.run_process([EMCC, '-o', 'a.js', 'main.o', 'library.a'])
self.assertContained('|0|', self.run_js('a.js'))
@parameterized({
'expand_symlinks': [[]],
'no_canonical_prefixes': [['-no-canonical-prefixes']],
})
@no_windows('Windows does not support symlinks')
def test_symlink_points_to_bad_suffix(self, flags):
"""Tests compiling a symlink where foobar.c points to foobar.xxx.
In this case, we should always successfully compile the code."""
create_test_file('foobar.xxx', 'int main(){ return 0; }')
os.symlink('foobar.xxx', 'foobar.c')
self.run_process([EMCC, 'foobar.c', '-c', '-o', 'foobar.o'] + flags)
@no_windows('Windows does not support symlinks')
def test_symlink_has_bad_suffix(self):
"""Tests that compiling foobar.xxx fails even if it points to foobar.c.
"""
create_test_file('foobar.c', 'int main(){ return 0; }')
os.symlink('foobar.c', 'foobar.xxx')
err = self.expect_fail([EMCC, 'foobar.xxx', '-o', 'foobar.js'])
self.assertContained('unknown file type: foobar.xxx', err)
def test_multiply_defined_libsymbols(self):
lib_name = 'libA.c'
a2_name = 'a2.c'
b2_name = 'b2.c'
main_name = 'main.c'
create_test_file(lib_name, 'int mult() { return 1; }')
create_test_file(a2_name, 'void x() {}')
create_test_file(b2_name, 'void y() {}')
create_test_file(main_name, r'''
#include <stdio.h>
int mult();
int main() {
printf("result: %d\n", mult());
return 0;
}
''')
building.emcc(lib_name, ['-shared'], output_filename='libA.so')
building.emcc(a2_name, ['-r', '-L.', '-lA'])
building.emcc(b2_name, ['-r', '-L.', '-lA'])
building.emcc(main_name, ['-L.', '-lA', a2_name + '.o', b2_name + '.o'], output_filename='a.out.js')
self.assertContained('result: 1', self.run_js('a.out.js'))
def test_multiply_defined_libsymbols_2(self):
a = "int x() { return 55; }"
a_name = 'a.c'
create_test_file(a_name, a)
b = "int y() { return 2; }"
b_name = 'b.c'
create_test_file(b_name, b)
c = "int z() { return 5; }"
c_name = 'c.c'
create_test_file(c_name, c)
main = r'''
#include <stdio.h>
int x();
int y();
int z();
int main() {
printf("result: %d\n", x() + y() + z());
return 0;
}
'''
main_name = 'main.c'
create_test_file(main_name, main)
building.emcc(a_name, ['-c']) # a.c.o
building.emcc(b_name, ['-c']) # b.c.o
building.emcc(c_name, ['-c']) # c.c.o
lib_name = 'libLIB.a'
building.emar('cr', lib_name, [a_name + '.o', b_name + '.o']) # libLIB.a with a and b
# a is in the lib AND in an .o, so should be ignored in the lib. We do still need b from the lib though
building.emcc(main_name, [a_name + '.o', c_name + '.o', '-L.', '-lLIB'], output_filename='a.out.js')
self.assertContained('result: 62', self.run_js('a.out.js'))
def test_link_group(self):
lib_src_name = 'lib.c'
create_test_file(lib_src_name, 'int x() { return 42; }')
main_name = 'main.c'
create_test_file(main_name, r'''
#include <stdio.h>
int x();
int main() {
printf("result: %d\n", x());
return 0;
}
''')
building.emcc(lib_src_name, ['-c']) # lib.c.o
lib_name = 'libLIB.a'
building.emar('cr', lib_name, [lib_src_name + '.o']) # libLIB.a with lib.c.o
def test(lib_args, err_expected):
print(err_expected)
output = self.run_process([EMCC, main_name, '-o', 'a.out.js'] + lib_args, stdout=PIPE, stderr=PIPE, check=not err_expected)
if err_expected:
self.assertContained(err_expected, output.stderr)
else:
self.assertNotContained('undefined symbol', output.stderr)
out_js = 'a.out.js'
self.assertExists(out_js, output.stdout + '\n' + output.stderr)
self.assertContained('result: 42', self.run_js(out_js))
test(['-Wl,--start-group', lib_name, '-Wl,--end-group'], None)
test(['-Wl,--start-group', lib_name], None)
print('embind test with groups')
main_name = 'main.cpp'
create_test_file(main_name, r'''
#include <stdio.h>
#include <emscripten/val.h>
using namespace emscripten;
extern "C" int x();
int main() {
int y = -x();
y = val::global("Math").call<int>("abs", y);
printf("result: %d\n", y);
return 0;
}
''')
test(['-Wl,--start-group', lib_name, '-Wl,--end-group', '--bind'], None)
def test_whole_archive(self):
# Verify that -Wl,--whole-archive includes the static constructor from the
# otherwise unreferenced library.
self.run_process([EMCC, '-c', '-o', 'main.o', path_from_root('tests', 'test_whole_archive', 'main.c')])
self.run_process([EMCC, '-c', '-o', 'testlib.o', path_from_root('tests', 'test_whole_archive', 'testlib.c')])
self.run_process([EMAR, 'crs', 'libtest.a', 'testlib.o'])
self.run_process([EMCC, '-Wl,--whole-archive', 'libtest.a', '-Wl,--no-whole-archive', 'main.o'])
self.assertContained('foo is: 42\n', self.run_js('a.out.js'))
self.run_process([EMCC, '-Wl,-whole-archive', 'libtest.a', '-Wl,-no-whole-archive', 'main.o'])
self.assertContained('foo is: 42\n', self.run_js('a.out.js'))
# Verify the --no-whole-archive prevents the inclusion of the ctor
self.run_process([EMCC, '-Wl,-whole-archive', '-Wl,--no-whole-archive', 'libtest.a', 'main.o'])
self.assertContained('foo is: 0\n', self.run_js('a.out.js'))
def test_whole_archive_48156(self):
# Regression test for http://llvm.org/PR48156
# TODO: distill this test further and move to lld
self.run_process([EMCC, '-c', '-o', 'foo.o', '-O1',
path_from_root('tests', 'test_whole_archive_foo.cpp')])
self.run_process([EMCC, '-c', '-o', 'main.o', '-O1',
path_from_root('tests', 'test_whole_archive_main.cpp')])
self.run_process([EMAR, 'rc', 'libfoo.a', 'foo.o'])
self.run_process([EMAR, 'rc', 'libmain.a', 'main.o'])
self.run_process([
EMCC, path_from_root('tests', 'test_whole_archive_init.cpp'),
'-O1', 'libfoo.a', '-Wl,--whole-archive', 'libmain.a', '-Wl,--no-whole-archive'])
self.assertContained('Result: 11', self.run_js('a.out.js'))
def test_link_group_bitcode(self):
create_test_file('1.c', r'''
int f(void);
int main() {
f();
return 0;
}
''')
create_test_file('2.c', r'''
#include <stdio.h>
int f() {
printf("Hello\n");
return 0;
}
''')
self.run_process([EMCC, '-o', '1.o', '-c', '1.c'])
self.run_process([EMCC, '-o', '2.o', '-c', '2.c'])
self.run_process([EMAR, 'crs', '2.a', '2.o'])
self.run_process([EMCC, '-r', '-o', 'out.bc', '-Wl,--start-group', '2.a', '1.o', '-Wl,--end-group'])
self.run_process([EMCC, 'out.bc'])
self.assertContained('Hello', self.run_js('a.out.js'))
# We deliberately ignore duplicate input files in order to allow
# "libA.so" on the command line twice. This is not really .so support
# and the .so files are really object files.
def test_redundant_link(self):
create_test_file('libA.c', 'int mult() { return 1; }')
create_test_file('main.c', r'''
#include <stdio.h>
int mult();
int main() {
printf("result: %d\n", mult());
return 0;
}
''')
building.emcc('libA.c', ['-shared'], output_filename='libA.so')
building.emcc('main.c', ['libA.so', 'libA.so'], output_filename='a.out.js')
self.assertContained('result: 1', self.run_js('a.out.js'))
def test_dot_a_all_contents_invalid(self):
# check that we error if an object file in a .a is not valid bitcode.
# do not silently ignore native object files, which may have been
# built by mistake
create_test_file('native.c', 'int native() { return 5; }')
create_test_file('main.c', 'extern int native(); int main() { return native(); }')
self.run_process([CLANG_CC, 'native.c', '-target', 'x86_64-linux', '-c', '-o', 'native.o'])
self.run_process([EMAR, 'crs', 'libfoo.a', 'native.o'])
stderr = self.expect_fail([EMCC, 'main.c', 'libfoo.a'])
self.assertContained('unknown file type', stderr)
def test_export_all(self):
lib = r'''
#include <stdio.h>
void libf1() { printf("libf1\n"); }
void libf2() { printf("libf2\n"); }
'''
create_test_file('lib.c', lib)
create_test_file('main.js', '''
var Module = {
onRuntimeInitialized: function() {
_libf1();
_libf2();
}
};
''')
building.emcc('lib.c', ['-s', 'EXPORT_ALL', '-s', 'LINKABLE', '--pre-js', 'main.js'], output_filename='a.out.js')
self.assertContained('libf1\nlibf2\n', self.run_js('a.out.js'))
def test_export_all_and_exported_functions(self):
# EXPORT_ALL should not export library functions by default.
# This mans that to export library function you also need to explicitly
# list them in EXPORTED_FUNCTIONS.
lib = r'''
#include <stdio.h>
#include <emscripten.h>
EMSCRIPTEN_KEEPALIVE void libfunc() { puts("libfunc\n"); }
'''
create_test_file('lib.c', lib)
create_test_file('main.js', '''
var Module = {
onRuntimeInitialized: function() {
_libfunc();
__get_daylight();
}
};
''')
# __get_daylight should not be linked by default, even with EXPORT_ALL
building.emcc('lib.c', ['-s', 'EXPORT_ALL', '--pre-js', 'main.js'], output_filename='a.out.js')
err = self.run_js('a.out.js', assert_returncode=NON_ZERO)
self.assertContained('__get_daylight is not defined', err)
building.emcc('lib.c', ['-s', "EXPORTED_FUNCTIONS=['__get_daylight']", '-s', 'EXPORT_ALL', '--pre-js', 'main.js'], output_filename='a.out.js')
self.assertContained('libfunc\n', self.run_js('a.out.js'))
def test_stdin(self):
def run_test():
for engine in config.JS_ENGINES:
if engine == config.V8_ENGINE:
continue # no stdin support in v8 shell
engine[0] = os.path.normpath(engine[0])
print(engine, file=sys.stderr)
# work around a bug in python's subprocess module
# (we'd use self.run_js() normally)
try_delete('out.txt')
cmd = jsrun.make_command(os.path.normpath('out.js'), engine)
cmd = shared.shlex_join(cmd)
if WINDOWS:
os.system('type "in.txt" | {} >out.txt'.format(cmd))
else: # posix
os.system('cat in.txt | {} > out.txt'.format(cmd))
self.assertContained('abcdef\nghijkl\neof', open('out.txt').read())
building.emcc(path_from_root('tests', 'module', 'test_stdin.c'), output_filename='out.js')
create_test_file('in.txt', 'abcdef\nghijkl')
run_test()
building.emcc(path_from_root('tests', 'module', 'test_stdin.c'),
['-O2', '--closure', '1'], output_filename='out.js')
run_test()
def test_ungetc_fscanf(self):
create_test_file('main.cpp', r'''
#include <stdio.h>
int main(int argc, char const *argv[])
{
char str[4] = {0};
FILE* f = fopen("my_test.input", "r");
if (f == NULL) {
printf("cannot open file\n");
return -1;
}
ungetc('x', f);
ungetc('y', f);
ungetc('z', f);
fscanf(f, "%3s", str);
printf("%s\n", str);
return 0;
}
''')
create_test_file('my_test.input', 'abc')
building.emcc('main.cpp', ['--embed-file', 'my_test.input'], output_filename='a.out.js')
self.assertContained('zyx', self.run_process(config.JS_ENGINES[0] + ['a.out.js'], stdout=PIPE, stderr=PIPE).stdout)
def test_abspaths(self):
# Includes with absolute paths are generally dangerous, things like -I/usr/.. will get to system local headers, not our portable ones.
shutil.copyfile(path_from_root('tests', 'hello_world.c'), 'main.c')
for args, expected in [(['-I/usr/something', '-Wwarn-absolute-paths'], True),
(['-L/usr/something', '-Wwarn-absolute-paths'], True),
(['-I/usr/something'], False),
(['-L/usr/something'], False),
(['-I/usr/something', '-Wno-warn-absolute-paths'], False),
(['-L/usr/something', '-Wno-warn-absolute-paths'], False),
(['-Isubdir/something', '-Wwarn-absolute-paths'], False),
(['-Lsubdir/something', '-Wwarn-absolute-paths'], False),
([], False)]:
print(args, expected)
proc = self.run_process([EMCC, 'main.c'] + args, stderr=PIPE)
WARNING = 'encountered. If this is to a local system header/library, it may cause problems (local system files make sense for compiling natively on your system, but not necessarily to JavaScript)'
self.assertContainedIf(WARNING, proc.stderr, expected)
def test_local_link(self):
# Linking a local library directly, like /usr/lib/libsomething.so, cannot work of course since it
# doesn't contain bitcode. However, when we see that we should look for a bitcode file for that
# library in the -L paths and system/lib
create_test_file('main.cpp', '''
extern void printey();
int main() {
printey();
return 0;
}
''')
ensure_dir('subdir')
open(os.path.join('subdir', 'libfile.so'), 'w').write('this is not llvm bitcode!')
create_test_file('libfile.cpp', '''
#include <stdio.h>
void printey() {
printf("hello from lib\\n");
}
''')
self.run_process([EMCC, 'libfile.cpp', '-shared', '-o', 'libfile.so'], stderr=PIPE)
self.run_process([EMCC, 'main.cpp', os.path.join('subdir', 'libfile.so'), '-L.'])
self.assertContained('hello from lib', self.run_js('a.out.js'))
def test_identical_basenames(self):
# Issue 287: files in different dirs but with the same basename get confused as the same,
# causing multiply defined symbol errors
ensure_dir('foo')
ensure_dir('bar')
open(os.path.join('foo', 'main.cpp'), 'w').write('''
extern void printey();
int main() {
printey();
return 0;
}
''')
open(os.path.join('bar', 'main.cpp'), 'w').write('''
#include <stdio.h>
void printey() { printf("hello there\\n"); }
''')
self.run_process([EMCC, os.path.join('foo', 'main.cpp'), os.path.join('bar', 'main.cpp')])
self.assertContained('hello there', self.run_js('a.out.js'))
# ditto with first creating .o files
try_delete('a.out.js')
self.run_process([EMCC, '-c', os.path.join('foo', 'main.cpp'), '-o', os.path.join('foo', 'main.o')])
self.run_process([EMCC, '-c', os.path.join('bar', 'main.cpp'), '-o', os.path.join('bar', 'main.o')])
self.run_process([EMCC, os.path.join('foo', 'main.o'), os.path.join('bar', 'main.o')])
self.assertContained('hello there', self.run_js('a.out.js'))
def test_main_a(self):
# if main() is in a .a, we need to pull in that .a
main_name = 'main.c'
create_test_file(main_name, r'''
#include <stdio.h>
extern int f();
int main() {
printf("result: %d.\n", f());
return 0;
}
''')
other_name = 'other.c'
create_test_file(other_name, r'''
#include <stdio.h>
int f() { return 12346; }
''')
self.run_process([EMCC, main_name, '-c', '-o', main_name + '.bc'])
self.run_process([EMCC, other_name, '-c', '-o', other_name + '.bc'])
self.run_process([EMAR, 'cr', main_name + '.a', main_name + '.bc'])
self.run_process([EMCC, other_name + '.bc', main_name + '.a'])
self.assertContained('result: 12346.', self.run_js('a.out.js'))
def test_multiple_archives_duplicate_basenames(self):
create_test_file('common.c', r'''
#include <stdio.h>
void a(void) {
printf("a\n");
}
''')
self.run_process([EMCC, 'common.c', '-c', '-o', 'common.o'])
try_delete('liba.a')
self.run_process([EMAR, 'rc', 'liba.a', 'common.o'])
create_test_file('common.c', r'''
#include <stdio.h>
void b(void) {
printf("b\n");
}
''')
self.run_process([EMCC, 'common.c', '-c', '-o', 'common.o'])
try_delete('libb.a')
self.run_process([EMAR, 'rc', 'libb.a', 'common.o'])
create_test_file('main.c', r'''
void a(void);
void b(void);
int main() {
a();
b();
}
''')
self.run_process([EMCC, 'main.c', '-L.', '-la', '-lb'])
self.assertContained('a\nb\n', self.run_js('a.out.js'))
def test_archive_duplicate_basenames(self):
ensure_dir('a')
create_test_file(os.path.join('a', 'common.c'), r'''
#include <stdio.h>
void a(void) {
printf("a\n");
}
''')
self.run_process([EMCC, os.path.join('a', 'common.c'), '-c', '-o', os.path.join('a', 'common.o')])
ensure_dir('b')
create_test_file(os.path.join('b', 'common.c'), r'''
#include <stdio.h>
void b(void) {
printf("b...\n");
}
''')
self.run_process([EMCC, os.path.join('b', 'common.c'), '-c', '-o', os.path.join('b', 'common.o')])
try_delete('liba.a')
self.run_process([EMAR, 'rc', 'liba.a', os.path.join('a', 'common.o'), os.path.join('b', 'common.o')])
# Verify that archive contains basenames with hashes to avoid duplication
text = self.run_process([EMAR, 't', 'liba.a'], stdout=PIPE).stdout
self.assertEqual(text.count('common'), 2)
for line in text.split('\n'):
# should not have huge hash names
self.assertLess(len(line), 20, line)
create_test_file('main.c', r'''
void a(void);
void b(void);
int main() {
a();
b();
}
''')
err = self.run_process([EMCC, 'main.c', '-L.', '-la'], stderr=PIPE).stderr
self.assertNotIn('archive file contains duplicate entries', err)
self.assertContained('a\nb...\n', self.run_js('a.out.js'))
# Using llvm-ar directly should cause duplicate basenames
try_delete('libdup.a')
self.run_process([LLVM_AR, 'rc', 'libdup.a', os.path.join('a', 'common.o'), os.path.join('b', 'common.o')])
text = self.run_process([EMAR, 't', 'libdup.a'], stdout=PIPE).stdout
self.assertEqual(text.count('common.o'), 2)
# With fastcomp we don't support duplicate members so this should generate
# a warning. With the wasm backend (lld) this is fully supported.
cmd = [EMCC, 'main.c', '-L.', '-ldup']
self.run_process(cmd)
self.assertContained('a\nb...\n', self.run_js('a.out.js'))
def test_export_from_archive(self):
export_name = 'this_is_an_entry_point'
full_export_name = '_this_is_an_entry_point'
create_test_file('export.c', r'''
#include <stdio.h>
void this_is_an_entry_point(void) {
printf("Hello, world!\n");
}
''')
self.run_process([EMCC, 'export.c', '-c', '-o', 'export.o'])
self.run_process([EMAR, 'rc', 'libexport.a', 'export.o'])
create_test_file('main.c', r'''
int main() {
return 0;
}
''')
# Sanity check: the symbol should not be linked in if not requested.
self.run_process([EMCC, 'main.c', '-L.', '-lexport'])
self.assertFalse(self.is_exported_in_wasm(export_name, 'a.out.wasm'))
# Exporting it causes it to appear in the output.
self.run_process([EMCC, 'main.c', '-L.', '-lexport', '-s', "EXPORTED_FUNCTIONS=['%s']" % full_export_name])
self.assertTrue(self.is_exported_in_wasm(export_name, 'a.out.wasm'))
def test_embed_file(self):
create_test_file('somefile.txt', 'hello from a file with lots of data and stuff in it thank you very much')
create_test_file('main.cpp', r'''
#include <stdio.h>
int main() {
FILE *f = fopen("somefile.txt", "r");
char buf[100];
fread(buf, 1, 20, f);
buf[20] = 0;
fclose(f);
printf("|%s|\n", buf);
return 0;
}
''')
self.run_process([EMCC, 'main.cpp', '--embed-file', 'somefile.txt'])
self.assertContained('|hello from a file wi|', self.run_js('a.out.js'))
# preload twice, should not err
self.run_process([EMCC, 'main.cpp', '--embed-file', 'somefile.txt', '--embed-file', 'somefile.txt'])
self.assertContained('|hello from a file wi|', self.run_js('a.out.js'))
def test_embed_file_dup(self):
ensure_dir(self.in_dir('tst', 'test1'))
ensure_dir(self.in_dir('tst', 'test2'))
open(self.in_dir('tst', 'aa.txt'), 'w').write('frist')
open(self.in_dir('tst', 'test1', 'aa.txt'), 'w').write('sacond')
open(self.in_dir('tst', 'test2', 'aa.txt'), 'w').write('thard')
create_test_file('main.cpp', r'''
#include <stdio.h>
#include <string.h>
void print_file(const char *name) {
FILE *f = fopen(name, "r");
char buf[100];
memset(buf, 0, 100);
fread(buf, 1, 20, f);
buf[20] = 0;
fclose(f);
printf("|%s|\n", buf);
}
int main() {
print_file("tst/aa.txt");
print_file("tst/test1/aa.txt");
print_file("tst/test2/aa.txt");
return 0;
}
''')
self.run_process([EMCC, 'main.cpp', '--embed-file', 'tst'])
self.assertContained('|frist|\n|sacond|\n|thard|\n', self.run_js('a.out.js'))
def test_exclude_file(self):
ensure_dir(self.in_dir('tst', 'abc.exe'))
ensure_dir(self.in_dir('tst', 'abc.txt'))
open(self.in_dir('tst', 'hello.exe'), 'w').write('hello')
open(self.in_dir('tst', 'hello.txt'), 'w').write('world')
open(self.in_dir('tst', 'abc.exe', 'foo'), 'w').write('emscripten')
open(self.in_dir('tst', 'abc.txt', 'bar'), 'w').write('!!!')
create_test_file('main.cpp', r'''
#include <stdio.h>
int main() {
if(fopen("tst/hello.exe", "rb")) printf("Failed\n");
if(!fopen("tst/hello.txt", "rb")) printf("Failed\n");
if(fopen("tst/abc.exe/foo", "rb")) printf("Failed\n");
if(!fopen("tst/abc.txt/bar", "rb")) printf("Failed\n");
return 0;
}
''')
self.run_process([EMCC, 'main.cpp', '--embed-file', 'tst', '--exclude-file', '*.exe'])
self.assertEqual(self.run_js('a.out.js').strip(), '')
def test_multidynamic_link(self):
# Linking the same dynamic library in statically will error, normally, since we statically link
# it, causing dupe symbols
def test(link_flags, lib_suffix):
print(link_flags, lib_suffix)
self.clear()
ensure_dir('libdir')
create_test_file('main.cpp', r'''
#include <stdio.h>
extern void printey();
extern void printother();
int main() {
printf("*");
printey();
printf("\n");
printother();
printf("\n");
printf("*");
return 0;
}
''')
create_test_file(os.path.join('libdir', 'libfile.cpp'), '''
#include <stdio.h>
void printey() {
printf("hello from lib");
}
''')
create_test_file(os.path.join('libdir', 'libother.cpp'), '''
#include <stdio.h>
extern void printey();
void printother() {
printf("|");
printey();
printf("|");
}
''')
# Build libfile normally into an .so
self.run_process([EMCC, os.path.join('libdir', 'libfile.cpp'), '-shared', '-o', os.path.join('libdir', 'libfile.so' + lib_suffix)])
# Build libother and dynamically link it to libfile
self.run_process([EMCC, os.path.join('libdir', 'libother.cpp')] + link_flags + ['-shared', '-o', os.path.join('libdir', 'libother.so')])
# Build the main file, linking in both the libs
self.run_process([EMCC, '-Llibdir', os.path.join('main.cpp')] + link_flags + ['-lother', '-c'])
print('...')
# The normal build system is over. We need to do an additional step to link in the dynamic
# libraries, since we ignored them before
self.run_process([EMCC, '-Llibdir', 'main.o'] + link_flags + ['-lother', '-s', 'EXIT_RUNTIME=1'])
self.assertContained('*hello from lib\n|hello from lib|\n*', self.run_js('a.out.js'))
test(['-lfile'], '') # -l, auto detection from library path
test([self.in_dir('libdir', 'libfile.so.3.1.4.1.5.9')], '.3.1.4.1.5.9') # handle libX.so.1.2.3 as well
def test_js_link(self):
create_test_file('main.cpp', '''
#include <stdio.h>
int main() {
printf("hello from main\\n");
return 0;
}
''')
create_test_file('before.js', '''
var MESSAGE = 'hello from js';
// Module is initialized with empty object by default, so if there are no keys - nothing was run yet
if (Object.keys(Module).length) throw 'This code should run before anything else!';
''')
create_test_file('after.js', '''
out(MESSAGE);
''')
self.run_process([EMCC, 'main.cpp', '--pre-js', 'before.js', '--post-js', 'after.js', '-s', 'WASM_ASYNC_COMPILATION=0'])
self.assertContained('hello from main\nhello from js\n', self.run_js('a.out.js'))
def test_sdl_endianness(self):
create_test_file('main.cpp', r'''
#include <stdio.h>
#include <SDL/SDL.h>
int main() {
printf("%d, %d, %d\n", SDL_BYTEORDER, SDL_LIL_ENDIAN, SDL_BIG_ENDIAN);
return 0;
}
''')
self.run_process([EMCC, 'main.cpp'])
self.assertContained('1234, 1234, 4321\n', self.run_js('a.out.js'))
def test_sdl2_mixer_wav(self):
building.emcc(path_from_root('tests', 'sdl2_mixer_wav.c'), ['-s', 'USE_SDL_MIXER=2'], output_filename='a.out.js')
def test_libpng(self):
shutil.copyfile(path_from_root('tests', 'third_party', 'libpng', 'pngtest.png'), 'pngtest.png')
building.emcc(path_from_root('tests', 'third_party', 'libpng', 'pngtest.c'), ['--embed-file', 'pngtest.png', '-s', 'USE_LIBPNG=1'], output_filename='a.out.js')
output = self.run_js('a.out.js')
self.assertContained('libpng passes test', output)
def test_libjpeg(self):
shutil.copyfile(path_from_root('tests', 'screenshot.jpg'), 'screenshot.jpg')
building.emcc(path_from_root('tests', 'jpeg_test.c'), ['--embed-file', 'screenshot.jpg', '-s', 'USE_LIBJPEG=1'], output_filename='a.out.js')
self.assertContained('Image is 600 by 450 with 3 components', self.run_js('a.out.js', args=['screenshot.jpg']))
def test_bullet(self):
building.emcc(path_from_root('tests', 'bullet_hello_world.cpp'), ['-s', 'USE_BULLET=1'], output_filename='a.out.js')
self.assertContained('BULLET RUNNING', self.run_process(config.JS_ENGINES[0] + ['a.out.js'], stdout=PIPE, stderr=PIPE).stdout)
def test_vorbis(self):
# This will also test if ogg compiles, because vorbis depends on ogg
building.emcc(path_from_root('tests', 'vorbis_test.c'), ['-s', 'USE_VORBIS=1'], output_filename='a.out.js')
self.assertContained('ALL OK', self.run_process(config.JS_ENGINES[0] + ['a.out.js'], stdout=PIPE, stderr=PIPE).stdout)
def test_bzip2(self):
building.emcc(path_from_root('tests', 'bzip2_test.c'), ['-s', 'USE_BZIP2=1'], output_filename='a.out.js')
self.assertContained("usage: unzcrash filename", self.run_process(config.JS_ENGINES[0] + ['a.out.js'], stdout=PIPE, stderr=PIPE).stdout)
def test_freetype(self):
# copy the Liberation Sans Bold truetype file located in the
# <emscripten_root>/tests/freetype to the compilation folder
shutil.copy2(path_from_root('tests/freetype', 'LiberationSansBold.ttf'), os.getcwd())
# build test program with the font file embed in it
building.emcc(path_from_root('tests', 'freetype_test.c'), ['-s', 'USE_FREETYPE=1', '--embed-file', 'LiberationSansBold.ttf'], output_filename='a.out.js')
# the test program will print an ascii representation of a bitmap where the
# 'w' character has been rendered using the Liberation Sans Bold font
expectedOutput = ' \n' + \
' \n' + \
' \n' + \
' \n' + \
'*** +***+ \n' + \
'***+ ***** +\n' + \
'+**+ ***** +\n' + \
'+*** +**+**+ *\n' + \
' ***+ ***+**+ +*\n' + \
' +**+ *** *** +*\n' + \
' +**++**+ +**+**\n' + \
' ***+**+ +**+**\n' + \
' ****** *****\n' + \
' +****+ +****\n' + \
' +****+ +****\n' + \
' **** ****'
self.assertContained(expectedOutput, self.run_process(config.JS_ENGINES[0] + ['a.out.js'], stdout=PIPE, stderr=PIPE).stdout)
def test_link_memcpy(self):
# memcpy can show up *after* optimizations, so after our opportunity to link in libc, so it must be special-cased
create_test_file('main.cpp', r'''
#include <stdio.h>
int main(int argc, char **argv) {
int num = argc + 10;
char buf[num], buf2[num];
for (int i = 0; i < num; i++) {
buf[i] = i*i+i/3;
}
for (int i = 1; i < num; i++) {
buf[i] += buf[i-1];
}
for (int i = 0; i < num; i++) {
buf2[i] = buf[i];
}
for (int i = 1; i < num; i++) {
buf2[i] += buf2[i-1];
}
for (int i = 0; i < num; i++) {
printf("%d:%d\n", i, buf2[i]);
}
return 0;
}
''')
self.run_process([EMCC, '-O2', 'main.cpp'])
output = self.run_js('a.out.js')
self.assertContained('''0:0
1:1
2:6
3:21
4:53
5:111
6:-49
7:98
8:55
9:96
10:-16
''', output)
self.assertNotContained('warning: library.js memcpy should not be running, it is only for testing!', output)
def test_undefined_exported_function(self):
cmd = [EMCC, path_from_root('tests', 'hello_world.cpp')]
self.run_process(cmd)
# adding a missing symbol to EXPORTED_FUNCTIONS should cause failure
cmd += ['-s', "EXPORTED_FUNCTIONS=['foobar']"]
err = self.expect_fail(cmd)
self.assertContained('undefined exported symbol: "foobar"', err)
# setting `-Wno-undefined` should suppress error
cmd += ['-Wno-undefined']
self.run_process(cmd)
def test_undefined_symbols(self):
create_test_file('main.cpp', r'''
#include <stdio.h>
#include <SDL.h>
#include "SDL/SDL_opengl.h"
extern "C" {
void something();
void elsey();
}
int main() {
printf("%p", SDL_GL_GetProcAddress("glGenTextures")); // pull in gl proc stuff, avoid warnings on emulation funcs
something();
elsey();
return 0;
}
''')
for args in ([], ['-O1'], ['-s', 'MAX_WEBGL_VERSION=2']):
for action in ('WARN', 'ERROR', None):
for value in ([0, 1]):
try_delete('a.out.js')
print('checking "%s" %s=%s' % (args, action, value))
extra = ['-s', action + '_ON_UNDEFINED_SYMBOLS=%d' % value] if action else []
proc = self.run_process([EMCC, 'main.cpp'] + extra + args, stderr=PIPE, check=False)
print(proc.stderr)
if value or action is None:
# The default is that we error in undefined symbols
self.assertContained('error: undefined symbol: something', proc.stderr)
self.assertContained('error: undefined symbol: elsey', proc.stderr)
check_success = False
elif action == 'ERROR' and not value:
# Error disables, should only warn
self.assertContained('warning: undefined symbol: something', proc.stderr)
self.assertContained('warning: undefined symbol: elsey', proc.stderr)
self.assertNotContained('undefined symbol: emscripten_', proc.stderr)
check_success = True
elif action == 'WARN' and not value:
# Disabled warning should imply disabling errors
self.assertNotContained('undefined symbol', proc.stderr)
check_success = True
if check_success:
self.assertEqual(proc.returncode, 0)
self.assertTrue(os.path.exists('a.out.js'))
else:
self.assertNotEqual(proc.returncode, 0)
self.assertFalse(os.path.exists('a.out.js'))
def test_GetProcAddress_LEGACY_GL_EMULATION(self):
# without legacy gl emulation, getting a proc from there should fail
self.do_other_test('test_GetProcAddress_LEGACY_GL_EMULATION.cpp', run_args=['0'], emcc_args=['-s', 'LEGACY_GL_EMULATION=0'])
# with it, it should work
self.do_other_test('test_GetProcAddress_LEGACY_GL_EMULATION.cpp', run_args=['1'], emcc_args=['-s', 'LEGACY_GL_EMULATION=1'])
def test_prepost(self):
create_test_file('main.cpp', '''
#include <stdio.h>
int main() {
printf("hello from main\\n");
return 0;
}
''')
create_test_file('pre.js', '''
var Module = {
preRun: function() { out('pre-run') },
postRun: function() { out('post-run') }
};
''')
self.run_process([EMCC, 'main.cpp', '--pre-js', 'pre.js', '-s', 'WASM_ASYNC_COMPILATION=0'])
self.assertContained('pre-run\nhello from main\npost-run\n', self.run_js('a.out.js'))
# addRunDependency during preRun should prevent main, and post-run from
# running.
with open('pre.js', 'a') as f:
f.write('Module.preRun = function() { out("add-dep"); addRunDependency(); }\n')
self.run_process([EMCC, 'main.cpp', '--pre-js', 'pre.js', '-s', 'WASM_ASYNC_COMPILATION=0'])
output = self.run_js('a.out.js')
self.assertContained('add-dep\n', output)
self.assertNotContained('hello from main\n', output)
self.assertNotContained('post-run\n', output)
# noInitialRun prevents run
for no_initial_run, run_dep in [(0, 0), (1, 0), (0, 1)]:
print(no_initial_run, run_dep)
args = ['-s', 'WASM_ASYNC_COMPILATION=0', '-s', 'EXTRA_EXPORTED_RUNTIME_METHODS=["callMain"]']
if no_initial_run:
args += ['-s', 'INVOKE_RUN=0']
if run_dep:
create_test_file('pre.js', 'Module.preRun = function() { addRunDependency("test"); }')
create_test_file('post.js', 'removeRunDependency("test");')
args += ['--pre-js', 'pre.js', '--post-js', 'post.js']
self.run_process([EMCC, 'main.cpp'] + args)
output = self.run_js('a.out.js')
self.assertContainedIf('hello from main', output, not no_initial_run)
if no_initial_run:
# Calling main later should still work, filesystem etc. must be set up.
print('call main later')
src = open('a.out.js').read()
src += '\nModule.callMain();\n'
create_test_file('a.out.js', src)
self.assertContained('hello from main', self.run_js('a.out.js'))
# Use postInit
create_test_file('pre.js', '''
var Module = {
preRun: function() { out('pre-run') },
postRun: function() { out('post-run') },
preInit: function() { out('pre-init') }
};
''')
self.run_process([EMCC, 'main.cpp', '--pre-js', 'pre.js'])
self.assertContained('pre-init\npre-run\nhello from main\npost-run\n', self.run_js('a.out.js'))
def test_prepost2(self):
create_test_file('main.cpp', '''
#include <stdio.h>
int main() {
printf("hello from main\\n");
return 0;
}
''')
create_test_file('pre.js', '''
var Module = {
preRun: function() { out('pre-run') },
};
''')
create_test_file('pre2.js', '''
Module.postRun = function() { out('post-run') };
''')
self.run_process([EMCC, 'main.cpp', '--pre-js', 'pre.js', '--pre-js', 'pre2.js'])
self.assertContained('pre-run\nhello from main\npost-run\n', self.run_js('a.out.js'))
def test_prepre(self):
create_test_file('main.cpp', '''
#include <stdio.h>
int main() {
printf("hello from main\\n");
return 0;
}
''')
create_test_file('pre.js', '''
var Module = {
preRun: [function() { out('pre-run') }],
};
''')
create_test_file('pre2.js', '''
Module.preRun.push(function() { out('prepre') });
''')
self.run_process([EMCC, 'main.cpp', '--pre-js', 'pre.js', '--pre-js', 'pre2.js'])
self.assertContained('prepre\npre-run\nhello from main\n', self.run_js('a.out.js'))
def test_extern_prepost(self):
create_test_file('extern-pre.js', '''
// I am an external pre.
''')
create_test_file('extern-post.js', '''
// I am an external post.
''')
self.run_process([EMCC, '-O2', path_from_root('tests', 'hello_world.c'), '--extern-pre-js', 'extern-pre.js', '--extern-post-js', 'extern-post.js'])
# the files should be included, and externally - not as part of optimized
# code, so they are the very first and last things, and they are not
# minified.
with open('a.out.js') as output:
js = output.read()
pre = js.index('// I am an external pre.')
post = js.index('// I am an external post.')
# ignore some slack - newlines and other things. we just care about the
# big picture here
SLACK = 50
self.assertLess(pre, post)
self.assertLess(pre, SLACK)
self.assertGreater(post, len(js) - SLACK)
# make sure the slack is tiny compared to the whole program
self.assertGreater(len(js), 100 * SLACK)
def test_js_optimizer(self):
ACORN_PASSES = [
'JSDCE',
'AJSDCE',
'applyImportAndExportNameChanges',
'emitDCEGraph',
'applyDCEGraphRemovals',
'growableHeap',
'unsignPointers',
'asanify',
'safeHeap'
]
for input, expected, passes in [
(path_from_root('tests', 'optimizer', 'test-js-optimizer-minifyLocals.js'), open(path_from_root('tests', 'optimizer', 'test-js-optimizer-minifyLocals-output.js')).read(),
['minifyLocals']),
(path_from_root('tests', 'optimizer', 'JSDCE.js'), open(path_from_root('tests', 'optimizer', 'JSDCE-output.js')).read(),
['JSDCE']),
(path_from_root('tests', 'optimizer', 'JSDCE-hasOwnProperty.js'), open(path_from_root('tests', 'optimizer', 'JSDCE-hasOwnProperty-output.js')).read(),
['JSDCE']),
(path_from_root('tests', 'optimizer', 'JSDCE-fors.js'), open(path_from_root('tests', 'optimizer', 'JSDCE-fors-output.js')).read(),
['JSDCE']),
(path_from_root('tests', 'optimizer', 'AJSDCE.js'), open(path_from_root('tests', 'optimizer', 'AJSDCE-output.js')).read(),
['AJSDCE']),
(path_from_root('tests', 'optimizer', 'emitDCEGraph.js'), open(path_from_root('tests', 'optimizer', 'emitDCEGraph-output.js')).read(),
['emitDCEGraph', 'noPrint']),
(path_from_root('tests', 'optimizer', 'emitDCEGraph2.js'), open(path_from_root('tests', 'optimizer', 'emitDCEGraph2-output.js')).read(),
['emitDCEGraph', 'noPrint']),
(path_from_root('tests', 'optimizer', 'emitDCEGraph3.js'), open(path_from_root('tests', 'optimizer', 'emitDCEGraph3-output.js')).read(),
['emitDCEGraph', 'noPrint']),
(path_from_root('tests', 'optimizer', 'emitDCEGraph4.js'), open(path_from_root('tests', 'optimizer', 'emitDCEGraph4-output.js')).read(),
['emitDCEGraph', 'noPrint']),
(path_from_root('tests', 'optimizer', 'emitDCEGraph5.js'), open(path_from_root('tests', 'optimizer', 'emitDCEGraph5-output.js')).read(),
['emitDCEGraph', 'noPrint']),
(path_from_root('tests', 'optimizer', 'minimal-runtime-applyDCEGraphRemovals.js'), open(path_from_root('tests', 'optimizer', 'minimal-runtime-applyDCEGraphRemovals-output.js')).read(),
['applyDCEGraphRemovals']),
(path_from_root('tests', 'optimizer', 'applyDCEGraphRemovals.js'), open(path_from_root('tests', 'optimizer', 'applyDCEGraphRemovals-output.js')).read(),
['applyDCEGraphRemovals']),
(path_from_root('tests', 'optimizer', 'applyImportAndExportNameChanges.js'), open(path_from_root('tests', 'optimizer', 'applyImportAndExportNameChanges-output.js')).read(),
['applyImportAndExportNameChanges']),
(path_from_root('tests', 'optimizer', 'applyImportAndExportNameChanges2.js'), open(path_from_root('tests', 'optimizer', 'applyImportAndExportNameChanges2-output.js')).read(),
['applyImportAndExportNameChanges']),
(path_from_root('tests', 'optimizer', 'minimal-runtime-emitDCEGraph.js'), open(path_from_root('tests', 'optimizer', 'minimal-runtime-emitDCEGraph-output.js')).read(),
['emitDCEGraph', 'noPrint']),
(path_from_root('tests', 'optimizer', 'minimal-runtime-2-emitDCEGraph.js'), open(path_from_root('tests', 'optimizer', 'minimal-runtime-2-emitDCEGraph-output.js')).read(),
['emitDCEGraph', 'noPrint']),
(path_from_root('tests', 'optimizer', 'standalone-emitDCEGraph.js'), open(path_from_root('tests', 'optimizer', 'standalone-emitDCEGraph-output.js')).read(),
['emitDCEGraph', 'noPrint']),
(path_from_root('tests', 'optimizer', 'emittedJSPreservesParens.js'), open(path_from_root('tests', 'optimizer', 'emittedJSPreservesParens-output.js')).read(),
['asm']),
(path_from_root('tests', 'optimizer', 'test-growableHeap.js'), open(path_from_root('tests', 'optimizer', 'test-growableHeap-output.js')).read(),
['growableHeap']),
(path_from_root('tests', 'optimizer', 'test-unsignPointers.js'), open(path_from_root('tests', 'optimizer', 'test-unsignPointers-output.js')).read(),
['unsignPointers']),
(path_from_root('tests', 'optimizer', 'test-asanify.js'), open(path_from_root('tests', 'optimizer', 'test-asanify-output.js')).read(),
['asanify']),
(path_from_root('tests', 'optimizer', 'test-safeHeap.js'), open(path_from_root('tests', 'optimizer', 'test-safeHeap-output.js')).read(),
['safeHeap']),
]:
print(input, passes)
if not isinstance(expected, list):
expected = [expected]
expected = [out.replace('\n\n', '\n').replace('\n\n', '\n') for out in expected]
acorn = any(p in ACORN_PASSES for p in passes)
# test calling optimizer
if not acorn:
print(' js')
output = self.run_process(config.NODE_JS + [path_from_root('tools', 'js-optimizer.js'), input] + passes, stdin=PIPE, stdout=PIPE).stdout
else:
print(' acorn')
output = self.run_process(config.NODE_JS + [path_from_root('tools', 'acorn-optimizer.js'), input] + passes, stdin=PIPE, stdout=PIPE).stdout
def check_js(js, expected):
# print >> sys.stderr, 'chak\n==========================\n', js, '\n===========================\n'
if 'registerizeHarder' in passes:
# registerizeHarder is hard to test, as names vary by chance, nondeterminstically FIXME
def fix(src):
if type(src) is list:
return list(map(fix, src))
src = '\n'.join([line for line in src.split('\n') if 'var ' not in line]) # ignore vars
def reorder(func):
def swap(func, stuff):
# emit EYE_ONE always before EYE_TWO, replacing i1,i2 or i2,i1 etc
for i in stuff:
if i not in func:
return func
indexes = [[i, func.index(i)] for i in stuff]
indexes.sort(key=lambda x: x[1])
for j in range(len(indexes)):
func = func.replace(indexes[j][0], 'STD_' + str(j))
return func
func = swap(func, ['i1', 'i2', 'i3'])
func = swap(func, ['i1', 'i2'])
func = swap(func, ['i4', 'i5'])
return func
src = 'function '.join(map(reorder, src.split('function ')))
return src
js = fix(js)
expected = fix(expected)
self.assertIdentical(expected, js.replace('\r\n', '\n').replace('\n\n', '\n').replace('\n\n', '\n'))
if input not in [ # blacklist of tests that are native-optimizer only
path_from_root('tests', 'optimizer', 'asmLastOpts.js'),
path_from_root('tests', 'optimizer', '3154.js')
]:
check_js(output, expected)
else:
print('(skip non-native)')
@parameterized({
'wasm2js': ('wasm2js', ['minifyNames', 'last']),
'constructor': ('constructor', ['minifyNames'])
})
def test_js_optimizer_py(self, name, passes):
# run the js optimizer python script. this differs from test_js_optimizer
# which runs the internal js optimizer JS script directly (which the python
# script calls)
shutil.copyfile(path_from_root('tests', 'optimizer', name + '.js'), name + '.js')
self.run_process([PYTHON, path_from_root('tools', 'js_optimizer.py'), name + '.js'] + passes)
with open(path_from_root('tests', 'optimizer', name + '-output.js')) as expected:
with open(name + '.js.jsopt.js') as actual:
self.assertIdentical(expected.read(), actual.read())
def test_m_mm(self):
create_test_file('foo.c', '#include <emscripten.h>')
for opt in ['M', 'MM']:
proc = self.run_process([EMCC, 'foo.c', '-' + opt], stdout=PIPE, stderr=PIPE)
self.assertContained('foo.o: ', proc.stdout)
self.assertNotContained('error', proc.stderr)
@uses_canonical_tmp
def test_emcc_debug_files(self):
for opts in [0, 1, 2, 3]:
for debug in [None, '1', '2']:
print(opts, debug)
if os.path.exists(self.canonical_temp_dir):
shutil.rmtree(self.canonical_temp_dir)
env = os.environ.copy()
if debug is None:
env.pop('EMCC_DEBUG', None)
else:
env['EMCC_DEBUG'] = debug
self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-O' + str(opts)], stderr=PIPE, env=env)
if debug is None:
self.assertFalse(os.path.exists(self.canonical_temp_dir))
elif debug == '1':
self.assertExists(os.path.join(self.canonical_temp_dir, 'emcc-3-original.js'))
elif debug == '2':
self.assertExists(os.path.join(self.canonical_temp_dir, 'emcc-3-original.js'))
def test_debuginfo(self):
for args, expect_debug in [
(['-O0'], False),
(['-O0', '-g'], True),
(['-O0', '-g4'], True),
(['-O1'], False),
(['-O1', '-g'], True),
(['-O2'], False),
(['-O2', '-g'], True),
]:
print(args, expect_debug)
err = self.run_process([EMCC, '-v', path_from_root('tests', 'hello_world.cpp')] + args, stdout=PIPE, stderr=PIPE).stderr
lines = err.splitlines()
finalize = [l for l in lines if 'wasm-emscripten-finalize' in l][0]
if expect_debug:
self.assertIn(' -g ', finalize)
else:
self.assertNotIn(' -g ', finalize)
def test_debuginfo_line_tables_only(self):
def test(do_compile):
do_compile([])
no_size = os.path.getsize('a.out.wasm')
do_compile(['-gline-tables-only'])
line_size = os.path.getsize('a.out.wasm')
do_compile(['-g'])
full_size = os.path.getsize('a.out.wasm')
return (no_size, line_size, full_size)
def compile_to_object(compile_args):
self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-c', '-o', 'a.out.wasm'] + compile_args)
no_size, line_size, full_size = test(compile_to_object)
self.assertLess(no_size, line_size)
self.assertLess(line_size, full_size)
def compile_to_executable(compile_args, link_args):
# compile with the specified args
self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-c', '-o', 'a.o'] + compile_args)
# link with debug info
self.run_process([EMCC, 'a.o'] + link_args)
def compile_to_debug_executable(compile_args):
return compile_to_executable(compile_args, ['-g'])
no_size, line_size, full_size = test(compile_to_debug_executable)
self.assertLess(no_size, line_size)
self.assertLess(line_size, full_size)
def compile_to_release_executable(compile_args):
return compile_to_executable(compile_args, ['-O1'])
no_size, line_size, full_size = test(compile_to_release_executable)
self.assertEqual(no_size, line_size)
self.assertEqual(line_size, full_size)
# "-O0 executable" means compiling without optimizations but *also* without
# -g (so, not a true debug build). the results here may change over time,
# since we are telling emcc both to try to do as little as possible during
# link (-O0), but also that debug info is not needed (no -g). if we end up
# doing post-link changes then we will strip the debug info, but if not then
# we don't.
def compile_to_O0_executable(compile_args):
return compile_to_executable(compile_args, [])
no_size, line_size, full_size = test(compile_to_O0_executable)
self.assertEqual(no_size, line_size)
self.assertEqual(line_size, full_size)
def test_dwarf(self):
def compile_with_dwarf(args, output):
# Test that -g enables dwarf info in object files and linked wasm
self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-o', output, '-g'] + args)
def verify(output):
info = self.run_process([LLVM_DWARFDUMP, '--all', output], stdout=PIPE).stdout
self.assertIn('DW_TAG_subprogram', info) # Ensure there's a subprogram entry in .debug_info
self.assertIn('debug_line[0x', info) # Ensure there's a line table
compile_with_dwarf(['-c'], 'a.o')
verify('a.o')
compile_with_dwarf([], 'a.js')
verify('a.wasm')
@unittest.skipIf(not scons_path, 'scons not found in PATH')
@with_env_modify({'EMSCRIPTEN_ROOT': path_from_root()})
def test_scons(self):
# this test copies the site_scons directory alongside the test
shutil.copytree(path_from_root('tests', 'scons'), 'test')
shutil.copytree(path_from_root('tools', 'scons', 'site_scons'), os.path.join('test', 'site_scons'))
with utils.chdir('test'):
self.run_process(['scons'])
output = self.run_js('scons_integration.js', assert_returncode=5)
self.assertContained('If you see this - the world is all right!', output)
@unittest.skipIf(not scons_path, 'scons not found in PATH')
@with_env_modify({'EMSCRIPTEN_TOOLPATH': path_from_root('tools', 'scons', 'site_scons'),
'EMSCRIPTEN_ROOT': path_from_root()})
def test_emscons(self):
# uses the emscons wrapper which requires EMSCRIPTEN_TOOLPATH to find
# site_scons
shutil.copytree(path_from_root('tests', 'scons'), 'test')
with utils.chdir('test'):
self.run_process([path_from_root('emscons'), 'scons'])
output = self.run_js('scons_integration.js', assert_returncode=5)
self.assertContained('If you see this - the world is all right!', output)
def test_embind_fail(self):
out = self.expect_fail([EMCC, path_from_root('tests', 'embind', 'test_unsigned.cpp')])
self.assertContained("undefined symbol: _embind_register_function", out)
def test_embind_asyncify(self):
create_test_file('post.js', '''
addOnPostRun(function() {
Module.sleep(10);
out('done');
});
''')
create_test_file('main.cpp', r'''
#include <emscripten.h>
#include <emscripten/bind.h>
using namespace emscripten;
EMSCRIPTEN_BINDINGS(asyncify) {
function("sleep", &emscripten_sleep);
}
''')
self.run_process([EMCC, 'main.cpp', '--bind', '-s', 'ASYNCIFY=1', '--post-js', 'post.js'])
self.assertContained('done', self.run_js('a.out.js'))
@is_slow_test
def test_embind(self):
environ = os.environ.copy()
environ['EMCC_CLOSURE_ARGS'] = environ.get('EMCC_CLOSURE_ARGS', '') + " --externs " + pipes.quote(path_from_root('tests', 'embind', 'underscore-externs.js'))
test_cases = [
(['--bind']),
(['--bind', '-O1']),
(['--bind', '-O2']),
(['--bind', '-O2', '-s', 'ALLOW_MEMORY_GROWTH=1', path_from_root('tests', 'embind', 'isMemoryGrowthEnabled=true.cpp')]),
]
without_utf8_args = ['-s', 'EMBIND_STD_STRING_IS_UTF8=0']
test_cases_without_utf8 = []
for args in test_cases:
test_cases_without_utf8.append((args + without_utf8_args))
test_cases += test_cases_without_utf8
test_cases.extend([(args[:] + ['-s', 'DYNAMIC_EXECUTION=0']) for args in test_cases])
# closure compiler doesn't work with DYNAMIC_EXECUTION=0
test_cases.append((['--bind', '-O2', '--closure', '1']))
for args in test_cases:
print(args)
self.clear()
testFiles = [
path_from_root('tests', 'embind', 'underscore-1.4.2.js'),
path_from_root('tests', 'embind', 'imvu_test_adapter.js'),
path_from_root('tests', 'embind', 'embind.test.js'),
]
self.run_process(
[EMCC, path_from_root('tests', 'embind', 'embind_test.cpp'),
'--pre-js', path_from_root('tests', 'embind', 'test.pre.js'),
'--post-js', path_from_root('tests', 'embind', 'test.post.js'),
'-s', 'WASM_ASYNC_COMPILATION=0',
'-s', 'IN_TEST_HARNESS=1'] + args,
env=environ)
if 'DYNAMIC_EXECUTION=0' in args:
with open('a.out.js') as js_binary_file:
js_binary_str = js_binary_file.read()
self.assertNotContained('new Function(', js_binary_str)
self.assertNotContained('eval(', js_binary_str)
with open('a.out.js', 'ab') as f:
for tf in testFiles:
f.write(open(tf, 'rb').read())
output = self.run_js('a.out.js')
self.assertNotContained('FAIL', output)
def test_emconfig(self):
output = self.run_process([emconfig, 'LLVM_ROOT'], stdout=PIPE).stdout.strip()
self.assertEqual(output, config.LLVM_ROOT)
# EMSCRIPTEN_ROOT is kind of special since it should always report the locaton of em-config
# itself (its not configurable via the config file but driven by the location for arg0)
output = self.run_process([emconfig, 'EMSCRIPTEN_ROOT'], stdout=PIPE).stdout.strip()
self.assertEqual(output, os.path.dirname(emconfig))
invalid = 'Usage: em-config VAR_NAME'
# Don't accept variables that do not exist
output = self.expect_fail([emconfig, 'VAR_WHICH_DOES_NOT_EXIST']).strip()
self.assertEqual(output, invalid)
# Don't accept no arguments
output = self.expect_fail([emconfig]).strip()
self.assertEqual(output, invalid)
# Don't accept more than one variable
output = self.expect_fail([emconfig, 'LLVM_ROOT', 'EMCC']).strip()
self.assertEqual(output, invalid)
# Don't accept arbitrary python code
output = self.expect_fail([emconfig, 'sys.argv[1]']).strip()
self.assertEqual(output, invalid)
def test_link_s(self):
# -s OPT=VALUE can conflict with -s as a linker option. We warn and ignore
create_test_file('main.cpp', r'''
extern "C" {
void something();
}
int main() {
something();
return 0;
}
''')
create_test_file('supp.cpp', r'''
#include <stdio.h>
extern "C" {
void something() {
printf("yello\n");
}
}
''')
self.run_process([EMCC, '-c', 'main.cpp', '-o', 'main.o'])
self.run_process([EMCC, '-c', 'supp.cpp', '-o', 'supp.o'])
self.run_process([EMCC, 'main.o', '-s', 'supp.o', '-s', 'SAFE_HEAP=1'])
self.assertContained('yello', self.run_js('a.out.js'))
# Check that valid -s option had an effect'
self.assertContained('SAFE_HEAP', open('a.out.js').read())
def test_conftest_s_flag_passing(self):
create_test_file('conftest.c', r'''
int main() {
return 0;
}
''')
with env_modify({'EMMAKEN_JUST_CONFIGURE': '1'}):
cmd = [EMCC, '-s', 'ASSERTIONS=1', 'conftest.c', '-o', 'conftest']
output = self.run_process(cmd, stderr=PIPE)
self.assertNotContained('emcc: warning: treating -s as linker option', output.stderr)
self.assertExists('conftest')
def test_file_packager(self):
ensure_dir('subdir')
create_test_file('data1.txt', 'data1')
os.chdir('subdir')
create_test_file('data2.txt', 'data2')
# relative path to below the current dir is invalid
stderr = self.expect_fail([FILE_PACKAGER, 'test.data', '--preload', '../data1.txt'])
self.assertContained('below the current directory', stderr)
# relative path that ends up under us is cool
proc = self.run_process([FILE_PACKAGER, 'test.data', '--preload', '../subdir/data2.txt'], stderr=PIPE, stdout=PIPE)
self.assertGreater(len(proc.stdout), 0)
self.assertNotContained('below the current directory', proc.stderr)
# direct path leads to the same code being generated - relative path does not make us do anything different
proc2 = self.run_process([FILE_PACKAGER, 'test.data', '--preload', 'data2.txt'], stderr=PIPE, stdout=PIPE)
self.assertGreater(len(proc2.stdout), 0)
self.assertNotContained('below the current directory', proc2.stderr)
def clean(txt):
lines = txt.splitlines()
lines = [l for l in lines if 'PACKAGE_UUID' not in l and 'loadPackage({' not in l]
return ''.join(lines)
self.assertTextDataIdentical(clean(proc.stdout), clean(proc2.stdout))
# verify '--separate-metadata' option produces separate metadata file
os.chdir('..')
self.run_process([FILE_PACKAGER, 'test.data', '--preload', 'data1.txt', '--preload', 'subdir/data2.txt', '--js-output=immutable.js', '--separate-metadata'])
self.assertExists('immutable.js.metadata')
# verify js output JS file is not touched when the metadata is separated
orig_timestamp = os.path.getmtime('immutable.js')
orig_content = open('immutable.js').read()
# ensure some time passes before running the packager again so that if it does touch the
# js file it will end up with the different timestamp.
time.sleep(1.0)
self.run_process([FILE_PACKAGER, 'test.data', '--preload', 'data1.txt', '--preload', 'subdir/data2.txt', '--js-output=immutable.js', '--separate-metadata'])
# assert both file content and timestamp are the same as reference copy
self.assertTextDataIdentical(orig_content, open('immutable.js').read())
self.assertEqual(orig_timestamp, os.path.getmtime('immutable.js'))
# verify the content of metadata file is correct
with open('immutable.js.metadata') as f:
metadata = json.load(f)
self.assertEqual(len(metadata['files']), 2)
assert metadata['files'][0]['start'] == 0 and metadata['files'][0]['end'] == len('data1') and metadata['files'][0]['filename'] == '/data1.txt'
assert metadata['files'][1]['start'] == len('data1') and metadata['files'][1]['end'] == len('data1') + len('data2') and metadata['files'][1]['filename'] == '/subdir/data2.txt'
assert metadata['remote_package_size'] == len('data1') + len('data2')
# can only assert the uuid format is correct, the uuid's value is expected to differ in between invocation
uuid.UUID(metadata['package_uuid'], version=4)
def test_file_packager_unicode(self):
unicode_name = 'unicode…☃'
try:
ensure_dir(unicode_name)
except OSError:
print("we failed to even create a unicode dir, so on this OS, we can't test this")
return
full = os.path.join(unicode_name, 'data.txt')
create_test_file(full, 'data')
proc = self.run_process([FILE_PACKAGER, 'test.data', '--preload', full], stdout=PIPE, stderr=PIPE)
assert len(proc.stdout), proc.stderr
assert unicode_name in proc.stdout, proc.stdout
print(len(proc.stderr))
def test_file_packager_mention_FORCE_FILESYSTEM(self):
MESSAGE = 'Remember to build the main file with -s FORCE_FILESYSTEM=1 so that it includes support for loading this file package'
create_test_file('data.txt', 'data1')
# mention when running standalone
err = self.run_process([FILE_PACKAGER, 'test.data', '--preload', 'data.txt'], stdout=PIPE, stderr=PIPE).stderr
self.assertContained(MESSAGE, err)
# do not mention from emcc
err = self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '--preload-file', 'data.txt'], stdout=PIPE, stderr=PIPE).stderr
self.assertEqual(len(err), 0)
def test_file_packager_returns_error_if_target_equal_to_jsoutput(self):
MESSAGE = 'error: TARGET should not be the same value of --js-output'
result = self.run_process([FILE_PACKAGER, 'test.data', '--js-output=test.data'], check=False, stdout=PIPE, stderr=PIPE)
self.assertEqual(result.returncode, 1)
self.assertContained(MESSAGE, result.stderr)
def test_headless(self):
shutil.copyfile(path_from_root('tests', 'screenshot.png'), 'example.png')
self.run_process([EMCC, path_from_root('tests', 'sdl_headless.c'), '-s', 'HEADLESS=1'])
output = self.run_js('a.out.js')
assert '''Init: 0
Font: 0x1
Sum: 0
you should see two lines of text in different colors and a blue rectangle
SDL_Quit called (and ignored)
done.
''' in output, output
def test_preprocess(self):
# Pass -Werror to prevent regressions such as https://github.com/emscripten-core/emscripten/pull/9661
out = self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-E', '-Werror'], stdout=PIPE).stdout
self.assertNotExists('a.out.js')
self.assertNotExists('a.out')
# Test explicitly that the output contains a line typically written by the preprocessor.
self.assertContained('# 1 ', out)
self.assertContained('hello_world.c"', out)
self.assertContained('printf("hello, world!', out)
def test_preprocess_multi(self):
out = self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), path_from_root('tests', 'hello_world.c'), '-E'], stdout=PIPE).stdout
self.assertEqual(out.count('printf("hello, world!'), 2)
def test_syntax_only_valid(self):
result = self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-fsyntax-only'], stdout=PIPE, stderr=STDOUT)
self.assertEqual(result.stdout, '')
self.assertNotExists('a.out.js')
def test_syntax_only_invalid(self):
create_test_file('src.c', 'int main() {')
result = self.run_process([EMCC, 'src.c', '-fsyntax-only'], stdout=PIPE, check=False, stderr=STDOUT)
self.assertNotEqual(result.returncode, 0)
self.assertContained("src.c:1:13: error: expected '}'", result.stdout)
self.assertNotExists('a.out.js')
def test_demangle(self):
create_test_file('src.cpp', '''
#include <stdio.h>
#include <emscripten.h>
void two(char c) {
EM_ASM(out(stackTrace()));
}
void one(int x) {
two(x % 17);
}
int main() {
EM_ASM(out(demangle('__Znwm'))); // check for no aborts
EM_ASM(out(demangle('_main')));
EM_ASM(out(demangle('__Z2f2v')));
EM_ASM(out(demangle('__Z12abcdabcdabcdi')));
EM_ASM(out(demangle('__ZL12abcdabcdabcdi')));
EM_ASM(out(demangle('__Z4testcsifdPvPiPc')));
EM_ASM(out(demangle('__ZN4test5moarrEcslfdPvPiPc')));
EM_ASM(out(demangle('__ZN4Waka1f12a234123412345pointEv')));
EM_ASM(out(demangle('__Z3FooIiEvv')));
EM_ASM(out(demangle('__Z3FooIidEvi')));
EM_ASM(out(demangle('__ZN3Foo3BarILi5EEEvv')));
EM_ASM(out(demangle('__ZNK10__cxxabiv120__si_class_type_info16search_below_dstEPNS_19__dynamic_cast_infoEPKvib')));
EM_ASM(out(demangle('__Z9parsewordRPKciRi')));
EM_ASM(out(demangle('__Z5multiwahtjmxyz')));
EM_ASM(out(demangle('__Z1aA32_iPA5_c')));
EM_ASM(out(demangle('__ZN21FWakaGLXFleeflsMarfooC2EjjjPKvbjj')));
EM_ASM(out(demangle('__ZN5wakaw2Cm10RasterBaseINS_6watwat9PolocatorEE8merbine1INS4_2OREEEvPKjj'))); // we get this wrong, but at least emit a '?'
one(17);
return 0;
}
''')
# full demangle support
self.run_process([EMCC, 'src.cpp', '-s', 'DEMANGLE_SUPPORT=1'])
output = self.run_js('a.out.js')
self.assertContained('''operator new(unsigned long)
_main
f2()
abcdabcdabcd(int)
abcdabcdabcd(int)
test(char, short, int, float, double, void*, int*, char*)
test::moarr(char, short, long, float, double, void*, int*, char*)
Waka::f::a23412341234::point()
void Foo<int>()
void Foo<int, double>(int)
void Foo::Bar<5>()
__cxxabiv1::__si_class_type_info::search_below_dst(__cxxabiv1::__dynamic_cast_info*, void const*, int, bool) const
parseword(char const*&, int, int&)
multi(wchar_t, signed char, unsigned char, unsigned short, unsigned int, unsigned long, long long, unsigned long long, ...)
a(int [32], char (*) [5])
FWakaGLXFleeflsMarfoo::FWakaGLXFleeflsMarfoo(unsigned int, unsigned int, unsigned int, void const*, bool, unsigned int, unsigned int)
void wakaw::Cm::RasterBase<wakaw::watwat::Polocator>::merbine1<wakaw::Cm::RasterBase<wakaw::watwat::Polocator>::OR>(unsigned int const*, unsigned int)
''', output)
# test for multiple functions in one stack trace
self.run_process([EMCC, 'src.cpp', '-s', 'DEMANGLE_SUPPORT=1', '-g'])
output = self.run_js('a.out.js')
self.assertIn('one(int)', output)
self.assertIn('two(char)', output)
def test_demangle_cpp(self):
create_test_file('src.cpp', '''
#include <stdio.h>
#include <emscripten.h>
#include <cxxabi.h>
#include <assert.h>
int main() {
char out[256];
int status = 1;
size_t length = 255;
abi::__cxa_demangle("_ZN4Waka1f12a234123412345pointEv", out, &length, &status);
assert(status == 0);
printf("%s\\n", out);
return 0;
}
''')
self.run_process([EMCC, 'src.cpp'])
output = self.run_js('a.out.js')
self.assertContained('Waka::f::a23412341234::point()', output)
# Test that malloc() -> OOM -> abort() -> stackTrace() -> jsStackTrace() -> demangleAll() -> demangle() -> malloc()
# cycle will not produce an infinite loop.
def test_demangle_malloc_infinite_loop_crash(self):
self.run_process([EMXX, path_from_root('tests', 'malloc_demangle_infinite_loop.cpp'), '-g', '-s', 'ABORTING_MALLOC=1', '-s', 'DEMANGLE_SUPPORT=1'])
output = self.run_js('a.out.js', assert_returncode=NON_ZERO)
if output.count('Cannot enlarge memory arrays') > 4:
print(output)
self.assertLess(output.count('Cannot enlarge memory arrays'), 5)
def test_module_exports_with_closure(self):
# This test checks that module.export is retained when JavaScript is minified by compiling with --closure 1
# This is important as if module.export is not present the Module object will not be visible to node.js
# Run with ./runner.py other.test_module_exports_with_closure
# First make sure test.js isn't present.
self.clear()
# compile with -O2 --closure 0
self.run_process([EMCC, path_from_root('tests', 'Module-exports', 'test.c'),
'-o', 'test.js', '-O2', '--closure', '0',
'--pre-js', path_from_root('tests', 'Module-exports', 'setup.js'),
'-s', 'EXPORTED_FUNCTIONS=["_bufferTest","_malloc","_free"]',
'-s', 'EXTRA_EXPORTED_RUNTIME_METHODS=["ccall", "cwrap"]',
'-s', 'WASM_ASYNC_COMPILATION=0'])
# Check that compilation was successful
self.assertExists('test.js')
test_js_closure_0 = open('test.js').read()
# Check that test.js compiled with --closure 0 contains "module['exports'] = Module;"
assert ("module['exports'] = Module;" in test_js_closure_0) or ('module["exports"]=Module' in test_js_closure_0) or ('module["exports"] = Module;' in test_js_closure_0)
# Check that main.js (which requires test.js) completes successfully when run in node.js
# in order to check that the exports are indeed functioning correctly.
shutil.copyfile(path_from_root('tests', 'Module-exports', 'main.js'), 'main.js')
if config.NODE_JS in config.JS_ENGINES:
self.assertContained('bufferTest finished', self.run_js('main.js'))
# Delete test.js again and check it's gone.
try_delete('test.js')
self.assertNotExists('test.js')
# compile with -O2 --closure 1
self.run_process([EMCC, path_from_root('tests', 'Module-exports', 'test.c'),
'-o', 'test.js', '-O2', '--closure', '1',
'--pre-js', path_from_root('tests', 'Module-exports', 'setup.js'),
'-s', 'EXPORTED_FUNCTIONS=["_bufferTest","_malloc","_free"]',
'-s', 'EXTRA_EXPORTED_RUNTIME_METHODS=["ccall", "cwrap"]',
'-s', 'WASM_ASYNC_COMPILATION=0'])
# Check that compilation was successful
self.assertExists('test.js')
test_js_closure_1 = open('test.js').read()
# Check that test.js compiled with --closure 1 contains "module.exports", we want to verify that
# "module['exports']" got minified to "module.exports" when compiling with --closure 1
self.assertContained("module.exports", test_js_closure_1)
# Check that main.js (which requires test.js) completes successfully when run in node.js
# in order to check that the exports are indeed functioning correctly.
if config.NODE_JS in config.JS_ENGINES:
self.assertContained('bufferTest finished', self.run_js('main.js', engine=config.NODE_JS))
def test_node_catch_exit(self):
# Test that in node.js exceptions are not caught if NODEJS_EXIT_CATCH=0
if config.NODE_JS not in config.JS_ENGINES:
return
create_test_file('count.c', '''
#include <string.h>
int count(const char *str) {
return (int)strlen(str);
}
''')
create_test_file('index.js', '''
const count = require('./count.js');
console.log(xxx); //< here is the ReferenceError
''')
reference_error_text = 'console.log(xxx); //< here is the ReferenceError'
self.run_process([EMCC, 'count.c', '-o', 'count.js'])
# Check that the ReferenceError is caught and rethrown and thus the original error line is masked
self.assertNotContained(reference_error_text,
self.run_js('index.js', engine=config.NODE_JS, assert_returncode=NON_ZERO))
self.run_process([EMCC, 'count.c', '-o', 'count.js', '-s', 'NODEJS_CATCH_EXIT=0'])
# Check that the ReferenceError is not caught, so we see the error properly
self.assertContained(reference_error_text,
self.run_js('index.js', engine=config.NODE_JS, assert_returncode=NON_ZERO))
def test_extra_exported_methods(self):
# Test with node.js that the EXTRA_EXPORTED_RUNTIME_METHODS setting is considered by libraries
if config.NODE_JS not in config.JS_ENGINES:
self.skipTest("node engine required for this test")
create_test_file('count.c', '''
#include <string.h>
int count(const char *str) {
return (int)strlen(str);
}
''')
create_test_file('index.js', '''
const count = require('./count.js');
console.log(count.FS_writeFile);
''')
reference_error_text = 'undefined'
self.run_process([EMCC, 'count.c', '-s', 'FORCE_FILESYSTEM=1', '-s',
'EXTRA_EXPORTED_RUNTIME_METHODS=["FS_writeFile"]', '-o', 'count.js'])
# Check that the Module.FS_writeFile exists
self.assertNotContained(reference_error_text,
self.run_js('index.js', engine=config.NODE_JS))
self.run_process([EMCC, 'count.c', '-s', 'FORCE_FILESYSTEM=1', '-o', 'count.js'])
# Check that the Module.FS_writeFile is not exported
out = self.run_js('index.js', engine=config.NODE_JS)
self.assertContained(reference_error_text, out)
def test_fs_stream_proto(self):
open('src.cpp', 'wb').write(br'''
#include <stdio.h>
#include <fcntl.h>
#include <unistd.h>
#include <sys/stat.h>
#include <errno.h>
#include <string.h>
int main()
{
long file_size = 0;
int h = open("src.cpp", O_RDONLY, 0666);
if (0 != h)
{
FILE* file = fdopen(h, "rb");
if (0 != file)
{
fseek(file, 0, SEEK_END);
file_size = ftell(file);
fseek(file, 0, SEEK_SET);
}
else
{
printf("fdopen() failed: %s\n", strerror(errno));
return 10;
}
close(h);
printf("File size: %ld\n", file_size);
}
else
{
printf("open() failed: %s\n", strerror(errno));
return 10;
}
return 0;
}
''')
self.run_process([EMCC, 'src.cpp', '--embed-file', 'src.cpp'])
for engine in config.JS_ENGINES:
out = self.run_js('a.out.js', engine=engine)
self.assertContained('File size: 724', out)
def test_node_emscripten_num_logical_cores(self):
# Test with node.js that the emscripten_num_logical_cores method is working
create_test_file('src.cpp', r'''
#include <emscripten/threading.h>
#include <stdio.h>
#include <assert.h>
int main() {
int num = emscripten_num_logical_cores();
assert(num != 0);
puts("ok");
}
''')
self.run_process([EMCC, 'src.cpp', '-s', 'USE_PTHREADS=1', '-s', 'ENVIRONMENT=node'])
ret = self.run_process(config.NODE_JS + ['--experimental-wasm-threads', 'a.out.js'], stdout=PIPE).stdout
self.assertContained('ok', ret)
def test_proxyfs(self):
# This test supposes that 3 different programs share the same directory and files.
# The same JS object is not used for each of them
# But 'require' function caches JS objects.
# If we just load same js-file multiple times like following code,
# these programs (m0,m1,m2) share the same JS object.
#
# var m0 = require('./proxyfs_test.js');
# var m1 = require('./proxyfs_test.js');
# var m2 = require('./proxyfs_test.js');
#
# To separate js-objects for each of them, following 'require' use different js-files.
#
# var m0 = require('./proxyfs_test.js');
# var m1 = require('./proxyfs_test1.js');
# var m2 = require('./proxyfs_test2.js');
#
create_test_file('proxyfs_test_main.js', r'''
var m0 = require('./proxyfs_test.js');
var m1 = require('./proxyfs_test1.js');
var m2 = require('./proxyfs_test2.js');
var section;
function print(str){
process.stdout.write(section+":"+str+":");
}
m0.FS.mkdir('/working');
m0.FS.mount(m0.PROXYFS,{root:'/',fs:m1.FS},'/working');
m0.FS.mkdir('/working2');
m0.FS.mount(m0.PROXYFS,{root:'/',fs:m2.FS},'/working2');
section = "child m1 reads and writes local file.";
print("m1 read embed");
m1.ccall('myreade','number',[],[]);
print("m1 write");console.log("");
m1.ccall('mywrite0','number',['number'],[1]);
print("m1 read");
m1.ccall('myread0','number',[],[]);
section = "child m2 reads and writes local file.";
print("m2 read embed");
m2.ccall('myreade','number',[],[]);
print("m2 write");console.log("");
m2.ccall('mywrite0','number',['number'],[2]);
print("m2 read");
m2.ccall('myread0','number',[],[]);
section = "child m1 reads local file.";
print("m1 read");
m1.ccall('myread0','number',[],[]);
section = "parent m0 reads and writes local and children's file.";
print("m0 read embed");
m0.ccall('myreade','number',[],[]);
print("m0 read m1");
m0.ccall('myread1','number',[],[]);
print("m0 read m2");
m0.ccall('myread2','number',[],[]);
section = "m0,m1 and m2 verify local files.";
print("m0 write");console.log("");
m0.ccall('mywrite0','number',['number'],[0]);
print("m0 read");
m0.ccall('myread0','number',[],[]);
print("m1 read");
m1.ccall('myread0','number',[],[]);
print("m2 read");
m2.ccall('myread0','number',[],[]);
print("m0 read embed");
m0.ccall('myreade','number',[],[]);
print("m1 read embed");
m1.ccall('myreade','number',[],[]);
print("m2 read embed");
m2.ccall('myreade','number',[],[]);
section = "parent m0 writes and reads children's files.";
print("m0 write m1");console.log("");
m0.ccall('mywrite1','number',[],[]);
print("m0 read m1");
m0.ccall('myread1','number',[],[]);
print("m0 write m2");console.log("");
m0.ccall('mywrite2','number',[],[]);
print("m0 read m2");
m0.ccall('myread2','number',[],[]);
print("m1 read");
m1.ccall('myread0','number',[],[]);
print("m2 read");
m2.ccall('myread0','number',[],[]);
print("m0 read m0");
m0.ccall('myread0','number',[],[]);
''')
create_test_file('proxyfs_pre.js', r'''
if (typeof Module === 'undefined') Module = {};
Module["noInitialRun"]=true;
noExitRuntime=true;
''')
create_test_file('proxyfs_embed.txt', 'test\n')
create_test_file('proxyfs_test.c', r'''
#include <stdio.h>
#include <emscripten/emscripten.h>
EMSCRIPTEN_KEEPALIVE int mywrite1() {
FILE* out = fopen("/working/hoge.txt","w");
fprintf(out,"test1\n");
fclose(out);
return 0;
}
EMSCRIPTEN_KEEPALIVE int myread1() {
FILE* in = fopen("/working/hoge.txt","r");
char buf[1024];
int len;
if(in==NULL)
printf("open failed\n");
while(! feof(in)){
if(fgets(buf,sizeof(buf),in)==buf){
printf("%s",buf);
}
}
fclose(in);
return 0;
}
EMSCRIPTEN_KEEPALIVE int mywrite2() {
FILE* out = fopen("/working2/hoge.txt","w");
fprintf(out,"test2\n");
fclose(out);
return 0;
}
EMSCRIPTEN_KEEPALIVE int myread2() {
FILE* in = fopen("/working2/hoge.txt","r");
char buf[1024];
int len;
if(in==NULL)
printf("open failed\n");
while(! feof(in)){
if(fgets(buf,sizeof(buf),in)==buf){
printf("%s",buf);
}
}
fclose(in);
return 0;
}
EMSCRIPTEN_KEEPALIVE int mywrite0(int i) {
FILE* out = fopen("hoge.txt","w");
fprintf(out,"test0_%d\n",i);
fclose(out);
return 0;
}
EMSCRIPTEN_KEEPALIVE int myread0() {
FILE* in = fopen("hoge.txt","r");
char buf[1024];
int len;
if(in==NULL)
printf("open failed\n");
while(! feof(in)){
if(fgets(buf,sizeof(buf),in)==buf){
printf("%s",buf);
}
}
fclose(in);
return 0;
}
EMSCRIPTEN_KEEPALIVE int myreade() {
FILE* in = fopen("proxyfs_embed.txt","r");
char buf[1024];
int len;
if(in==NULL)
printf("open failed\n");
while(! feof(in)){
if(fgets(buf,sizeof(buf),in)==buf){
printf("%s",buf);
}
}
fclose(in);
return 0;
}
''')
self.run_process([EMCC,
'-o', 'proxyfs_test.js', 'proxyfs_test.c',
'--embed-file', 'proxyfs_embed.txt', '--pre-js', 'proxyfs_pre.js',
'-s', 'EXTRA_EXPORTED_RUNTIME_METHODS=["ccall", "cwrap", "FS", "PROXYFS"]',
'-lproxyfs.js',
'-s', 'WASM_ASYNC_COMPILATION=0'])
# Following shutil.copyfile just prevent 'require' of node.js from caching js-object.
# See https://nodejs.org/api/modules.html
shutil.copyfile('proxyfs_test.js', 'proxyfs_test1.js')
shutil.copyfile('proxyfs_test.js', 'proxyfs_test2.js')
out = self.run_js('proxyfs_test_main.js')
section = "child m1 reads and writes local file."
self.assertContained(section + ":m1 read embed:test", out)
self.assertContained(section + ":m1 write:", out)
self.assertContained(section + ":m1 read:test0_1", out)
section = "child m2 reads and writes local file."
self.assertContained(section + ":m2 read embed:test", out)
self.assertContained(section + ":m2 write:", out)
self.assertContained(section + ":m2 read:test0_2", out)
section = "child m1 reads local file."
self.assertContained(section + ":m1 read:test0_1", out)
section = "parent m0 reads and writes local and children's file."
self.assertContained(section + ":m0 read embed:test", out)
self.assertContained(section + ":m0 read m1:test0_1", out)
self.assertContained(section + ":m0 read m2:test0_2", out)
section = "m0,m1 and m2 verify local files."
self.assertContained(section + ":m0 write:", out)
self.assertContained(section + ":m0 read:test0_0", out)
self.assertContained(section + ":m1 read:test0_1", out)
self.assertContained(section + ":m2 read:test0_2", out)
self.assertContained(section + ":m0 read embed:test", out)
self.assertContained(section + ":m1 read embed:test", out)
self.assertContained(section + ":m2 read embed:test", out)
section = "parent m0 writes and reads children's files."
self.assertContained(section + ":m0 write m1:", out)
self.assertContained(section + ":m0 read m1:test1", out)
self.assertContained(section + ":m0 write m2:", out)
self.assertContained(section + ":m0 read m2:test2", out)
self.assertContained(section + ":m1 read:test1", out)
self.assertContained(section + ":m2 read:test2", out)
self.assertContained(section + ":m0 read m0:test0_0", out)
def test_dependency_file(self):
# Issue 1732: -MMD (and friends) create dependency files that need to be
# copied from the temporary directory.
create_test_file('test.cpp', r'''
#include "test.hpp"
void my_function()
{
}
''')
create_test_file('test.hpp', r'''
void my_function();
''')
self.run_process([EMCC, '-MMD', '-c', 'test.cpp', '-o', 'test.o'])
self.assertExists('test.d')
deps = open('test.d').read()
# Look for ': ' instead of just ':' to not confuse C:\path\ notation with make "target: deps" rule. Not perfect, but good enough for this test.
head, tail = deps.split(': ', 2)
self.assertContained('test.o', head)
self.assertContained('test.cpp', tail)
self.assertContained('test.hpp', tail)
def test_dependency_file_2(self):
shutil.copyfile(path_from_root('tests', 'hello_world.c'), 'a.c')
self.run_process([EMCC, 'a.c', '-MMD', '-MF', 'test.d', '-c'])
self.assertContained('a.o: a.c\n', open('test.d').read())
shutil.copyfile(path_from_root('tests', 'hello_world.c'), 'a.c')
self.run_process([EMCC, 'a.c', '-MMD', '-MF', 'test2.d', '-c', '-o', 'test.o'])
self.assertContained('test.o: a.c\n', open('test2.d').read())
shutil.copyfile(path_from_root('tests', 'hello_world.c'), 'a.c')
ensure_dir('obj')
self.run_process([EMCC, 'a.c', '-MMD', '-MF', 'test3.d', '-c', '-o', 'obj/test.o'])
self.assertContained('obj/test.o: a.c\n', open('test3.d').read())
def test_compilation_database(self):
shutil.copyfile(path_from_root('tests', 'hello_world.c'), 'a.c')
self.run_process([EMCC, 'a.c', '-MJ', 'hello.json', '-c', '-o', 'test.o'])
self.assertContained('"file": "a.c", "output": "test.o"', open('hello.json').read())
def test_js_lib_quoted_key(self):
create_test_file('lib.js', r'''
mergeInto(LibraryManager.library, {
__internal_data:{
'<' : 0,
'white space' : 1
},
printf__deps: ['__internal_data', 'fprintf']
});
''')
self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '--js-library', 'lib.js'])
self.assertContained('hello, world!', self.run_js('a.out.js'))
def test_js_lib_exported(self):
create_test_file('lib.js', r'''
mergeInto(LibraryManager.library, {
jslibfunc: function(x) { return 2 * x }
});
''')
create_test_file('src.cpp', r'''
#include <emscripten.h>
#include <stdio.h>
extern "C" int jslibfunc(int x);
int main() {
printf("c calling: %d\n", jslibfunc(6));
EM_ASM({
out('js calling: ' + Module['_jslibfunc'](5) + '.');
});
}
''')
self.run_process([EMCC, 'src.cpp', '--js-library', 'lib.js', '-s', 'EXPORTED_FUNCTIONS=["_main", "_jslibfunc"]'])
self.assertContained('c calling: 12\njs calling: 10.', self.run_js('a.out.js'))
def test_js_lib_using_asm_lib(self):
create_test_file('lib.js', r'''
mergeInto(LibraryManager.library, {
jslibfunc__deps: ['asmlibfunc'],
jslibfunc: function(x) {
return 2 * _asmlibfunc(x);
},
asmlibfunc__asm: true,
asmlibfunc__sig: 'ii',
asmlibfunc: function(x) {
x = x | 0;
return x + 1 | 0;
}
});
''')
create_test_file('src.cpp', r'''
#include <stdio.h>
extern "C" int jslibfunc(int x);
int main() {
printf("c calling: %d\n", jslibfunc(6));
}
''')
self.run_process([EMCC, 'src.cpp', '--js-library', 'lib.js'])
self.assertContained('c calling: 14\n', self.run_js('a.out.js'))
def test_EMCC_BUILD_DIR(self):
# EMCC_BUILD_DIR env var contains the dir we were building in, when running the js compiler (e.g. when
# running a js library). We force the cwd to be src/ for technical reasons, so this lets you find out
# where you were.
create_test_file('lib.js', r'''
printErr('dir was ' + process.env.EMCC_BUILD_DIR);
''')
err = self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '--js-library', 'lib.js'], stderr=PIPE).stderr
self.assertContained('dir was ' + os.path.realpath(os.path.normpath(self.get_dir())), err)
def test_float_h(self):
process = self.run_process([EMCC, path_from_root('tests', 'float+.c')], stdout=PIPE, stderr=PIPE)
assert process.returncode == 0, 'float.h should agree with our system: ' + process.stdout + '\n\n\n' + process.stderr
def test_output_is_dir(self):
ensure_dir('out_dir')
err = self.expect_fail([EMCC, '-c', path_from_root('tests', 'hello_world.c'), '-o', 'out_dir/'])
self.assertContained('error: unable to open output file', err)
def test_default_obj_ext(self):
self.run_process([EMCC, '-c', path_from_root('tests', 'hello_world.c')])
self.assertExists('hello_world.o')
self.run_process([EMCC, '-c', path_from_root('tests', 'hello_world.c'), '--default-obj-ext', 'obj'])
self.assertExists('hello_world.obj')
def test_doublestart_bug(self):
create_test_file('code.cpp', r'''
#include <stdio.h>
#include <emscripten.h>
void main_loop(void) {
static int cnt = 0;
if (++cnt >= 10) emscripten_cancel_main_loop();
}
int main(void) {
printf("This should only appear once.\n");
emscripten_set_main_loop(main_loop, 10, 0);
return 0;
}
''')
create_test_file('pre.js', r'''
if (!Module['preRun']) Module['preRun'] = [];
Module["preRun"].push(function () {
addRunDependency('test_run_dependency');
removeRunDependency('test_run_dependency');
});
''')
self.run_process([EMCC, 'code.cpp', '--pre-js', 'pre.js'])
output = self.run_js('a.out.js')
assert output.count('This should only appear once.') == 1, output
def test_module_print(self):
create_test_file('code.cpp', r'''
#include <stdio.h>
int main(void) {
printf("123456789\n");
return 0;
}
''')
create_test_file('pre.js', r'''
var Module = { print: function(x) { throw '<{(' + x + ')}>' } };
''')
self.run_process([EMCC, 'code.cpp', '--pre-js', 'pre.js'])
output = self.run_js('a.out.js', assert_returncode=NON_ZERO)
self.assertContained(r'<{(123456789)}>', output)
def test_precompiled_headers_warnings(self):
# Check that we don't have any underlying warnings from clang, this can happen if we
# pass any link flags to when building a pch.
create_test_file('header.h', '#define X 5\n')
self.run_process([EMCC, '-Werror', '-xc++-header', 'header.h'])
def test_precompiled_headers(self):
for suffix in ['gch', 'pch']:
print(suffix)
self.clear()
create_test_file('header.h', '#define X 5\n')
self.run_process([EMCC, '-xc++-header', 'header.h', '-c'])
self.assertExists('header.h.gch') # default output is gch
if suffix != 'gch':
self.run_process([EMCC, '-xc++-header', 'header.h', '-o', 'header.h.' + suffix])
self.assertBinaryEqual('header.h.gch', 'header.h.' + suffix)
create_test_file('src.cpp', r'''
#include <stdio.h>
int main() {
printf("|%d|\n", X);
return 0;
}
''')
self.run_process([EMCC, 'src.cpp', '-include', 'header.h'])
output = self.run_js('a.out.js')
self.assertContained('|5|', output)
# also verify that the gch is actually used
err = self.run_process([EMCC, 'src.cpp', '-include', 'header.h', '-Xclang', '-print-stats'], stderr=PIPE).stderr
self.assertTextDataContained('*** PCH/Modules Loaded:\nModule: header.h.' + suffix, err)
# and sanity check it is not mentioned when not
try_delete('header.h.' + suffix)
err = self.run_process([EMCC, 'src.cpp', '-include', 'header.h', '-Xclang', '-print-stats'], stderr=PIPE).stderr
self.assertNotContained('*** PCH/Modules Loaded:\nModule: header.h.' + suffix, err.replace('\r\n', '\n'))
# with specified target via -o
try_delete('header.h.' + suffix)
self.run_process([EMCC, '-xc++-header', 'header.h', '-o', 'my.' + suffix])
self.assertExists('my.' + suffix)
# -include-pch flag
self.run_process([EMCC, '-xc++-header', 'header.h', '-o', 'header.h.' + suffix])
self.run_process([EMCC, 'src.cpp', '-include-pch', 'header.h.' + suffix])
output = self.run_js('a.out.js')
self.assertContained('|5|', output)
def test_LEGACY_VM_SUPPORT(self):
# when modern features are lacking, we can polyfill them or at least warn
create_test_file('pre.js', 'Math.imul = undefined;')
def test(expected, opts=[]):
print(opts)
result = self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '--pre-js', 'pre.js'] + opts, stderr=PIPE, check=False)
if result.returncode == 0:
self.assertContained(expected, self.run_js('a.out.js', assert_returncode=0 if opts else NON_ZERO))
else:
self.assertContained(expected, result.stderr)
# when legacy is needed, we show an error indicating so
test('build with LEGACY_VM_SUPPORT')
# legacy + disabling wasm works
test('hello, world!', ['-s', 'LEGACY_VM_SUPPORT=1', '-s', 'WASM=0'])
def test_on_abort(self):
expected_output = 'Module.onAbort was called'
def add_on_abort_and_verify(extra=''):
with open('a.out.js') as f:
js = f.read()
with open('a.out.js', 'w') as f:
f.write("var Module = { onAbort: function() { console.log('%s') } };\n" % expected_output)
f.write(extra + '\n')
f.write(js)
self.assertContained(expected_output, self.run_js('a.out.js', assert_returncode=NON_ZERO))
# test direct abort() C call
create_test_file('src.c', '''
#include <stdlib.h>
int main() {
abort();
}
''')
self.run_process([EMCC, 'src.c', '-s', 'WASM_ASYNC_COMPILATION=0'])
add_on_abort_and_verify()
# test direct abort() JS call
create_test_file('src.c', '''
#include <emscripten.h>
int main() {
EM_ASM({ abort() });
}
''')
self.run_process([EMCC, 'src.c', '-s', 'WASM_ASYNC_COMPILATION=0'])
add_on_abort_and_verify()
# test throwing in an abort handler, and catching that
create_test_file('src.c', '''
#include <emscripten.h>
int main() {
EM_ASM({
try {
out('first');
abort();
} catch (e) {
out('second');
abort();
throw e;
}
});
}
''')
self.run_process([EMCC, 'src.c', '-s', 'WASM_ASYNC_COMPILATION=0'])
with open('a.out.js') as f:
js = f.read()
with open('a.out.js', 'w') as f:
f.write("var Module = { onAbort: function() { console.log('%s'); throw 're-throw'; } };\n" % expected_output)
f.write(js)
out = self.run_js('a.out.js', assert_returncode=NON_ZERO)
print(out)
self.assertContained(expected_output, out)
self.assertContained('re-throw', out)
self.assertContained('first', out)
self.assertContained('second', out)
self.assertEqual(out.count(expected_output), 2)
# test an abort during startup
self.run_process([EMCC, path_from_root('tests', 'hello_world.c')])
os.remove('a.out.wasm') # trigger onAbort by intentionally causing startup to fail
add_on_abort_and_verify()
def test_no_exit_runtime(self):
create_test_file('code.cpp', r'''
#include <stdio.h>
template<int x>
struct Waste {
Waste() {
printf("coming around %d\n", x);
}
~Waste() {
printf("going away %d\n", x);
}
};
Waste<1> w1;
Waste<2> w2;
Waste<3> w3;
Waste<4> w4;
Waste<5> w5;
int main(int argc, char **argv) {
return 0;
}
''')
for wasm in [0, 1]:
for no_exit in [1, 0]:
for opts in [[], ['-O1'], ['-O2', '-g2'], ['-O2', '-g2', '-flto']]:
print(wasm, no_exit, opts)
cmd = [EMCC] + opts + ['code.cpp', '-s', 'EXIT_RUNTIME=' + str(1 - no_exit), '-s', 'WASM=' + str(wasm)]
if wasm:
cmd += ['--profiling-funcs'] # for function names
self.run_process(cmd)
output = self.run_js('a.out.js')
src = open('a.out.js').read()
if wasm:
src += '\n' + self.get_wasm_text('a.out.wasm')
exit = 1 - no_exit
print(' exit:', exit, 'opts:', opts)
self.assertContained('coming around', output)
self.assertContainedIf('going away', output, exit)
# The wasm backend uses atexit to register destructors when
# constructors are called There is currently no way to exclude
# these destructors from the wasm binary.
# TODO(sbc): Re-enabled these assertions once the wasm backend
# is able to eliminate these.
# assert ('atexit(' in src) == exit, 'atexit should not appear in src when EXIT_RUNTIME=0'
# assert ('_ZN5WasteILi2EED' in src) == exit, 'destructors should not appear if no exit:\n' + src
def test_no_exit_runtime_warnings_flush(self):
# check we warn if there is unflushed info
create_test_file('code.c', r'''
#include <stdio.h>
int main(int argc, char **argv) {
printf("hello\n");
printf("world"); // no newline, not flushed
#if FLUSH
printf("\n");
#endif
}
''')
create_test_file('code.cpp', r'''
#include <iostream>
int main() {
using namespace std;
cout << "hello" << std::endl;
cout << "world"; // no newline, not flushed
#if FLUSH
std::cout << std::endl;
#endif
}
''')
for src in ['code.c', 'code.cpp']:
for no_exit in [0, 1]:
for assertions in [0, 1]:
for flush in [0, 1]:
# TODO: also check FILESYSTEM=0 here. it never worked though, buffered output was not emitted at shutdown
print(src, no_exit, assertions, flush)
cmd = [EMCC, src, '-s', 'EXIT_RUNTIME=%d' % (1 - no_exit), '-s', 'ASSERTIONS=%d' % assertions]
if flush:
cmd += ['-DFLUSH']
self.run_process(cmd)
output = self.run_js('a.out.js')
exit = 1 - no_exit
self.assertContained('hello', output)
assert ('world' in output) == (exit or flush), 'unflushed content is shown only when exiting the runtime'
assert (no_exit and assertions and not flush) == ('stdio streams had content in them that was not flushed. you should set EXIT_RUNTIME to 1' in output), 'warning should be shown'
def test_fs_after_main(self):
for args in [[], ['-O1']]:
print(args)
self.run_process([EMCC, path_from_root('tests', 'fs_after_main.cpp')])
self.assertContained('Test passed.', self.run_js('a.out.js'))
def test_os_oz(self):
for opt in ['-O1', '-O2', '-Os', '-Oz', '-O3']:
print(opt)
proc = self.run_process([EMCC, '-v', path_from_root('tests', 'hello_world.cpp'), opt], stderr=PIPE)
self.assertContained(opt, proc.stderr)
self.assertContained('hello, world!', self.run_js('a.out.js'))
def test_oz_size(self):
sizes = {}
for name, args in [
('0', []),
('1', ['-O1']),
('2', ['-O2']),
('s', ['-Os']),
('z', ['-Oz']),
('3', ['-O3']),
]:
print(name, args)
self.clear()
self.run_process([EMCC, '-c', path_from_root('system', 'lib', 'dlmalloc.c')] + args)
sizes[name] = os.path.getsize('dlmalloc.o')
print(sizes)
opt_min = min(sizes['1'], sizes['2'], sizes['3'], sizes['s'], sizes['z'])
opt_max = max(sizes['1'], sizes['2'], sizes['3'], sizes['s'], sizes['z'])
# 'opt builds are all fairly close'
self.assertLess(opt_min - opt_max, opt_max * 0.1)
# unopt build is quite larger'
self.assertGreater(sizes['0'], (1.20 * opt_max))
@disabled('relies on fastcomp EXIT_RUNTIME=0 optimization not implemented/disabled')
def test_global_inits(self):
create_test_file('inc.h', r'''
#include <stdio.h>
template<int x>
struct Waste {
int state;
Waste() : state(10) {}
void test(int a) {
printf("%d\n", a + state);
}
~Waste() {
printf("going away %d\n", x);
}
};
Waste<3> *getMore();
''')
create_test_file('main.cpp', r'''
#include "inc.h"
Waste<1> mw1;
Waste<2> mw2;
int main(int argc, char **argv) {
printf("argc: %d\n", argc);
mw1.state += argc;
mw2.state += argc;
mw1.test(5);
mw2.test(6);
getMore()->test(0);
return 0;
}
''')
create_test_file('side.cpp', r'''
#include "inc.h"
Waste<3> sw3;
Waste<3> *getMore() {
return &sw3;
}
''')
for opts, has_global in [
(['-O2', '-g', '-s', 'EXIT_RUNTIME=1'], True),
# no-exit-runtime removes the atexits, and then globalgce can work
# it's magic to remove the global initializer entirely
(['-O2', '-g'], False),
(['-Os', '-g', '-s', 'EXIT_RUNTIME=1'], True),
(['-Os', '-g'], False),
(['-O2', '-g', '-flto', '-s', 'EXIT_RUNTIME=1'], True),
(['-O2', '-g', '-flto'], False),
]:
print(opts, has_global)
self.run_process([EMCC, 'main.cpp', '-c'] + opts)
self.run_process([EMCC, 'side.cpp', '-c'] + opts)
self.run_process([EMCC, 'main.o', 'side.o'] + opts)
self.run_js('a.out.js')
src = open('a.out.js').read()
self.assertContained('argc: 1\n16\n17\n10\n', self.run_js('a.out.js'))
self.assertContainedIf('globalCtors', src, has_global)
def test_implicit_func(self):
create_test_file('src.c', r'''
#include <stdio.h>
int main()
{
printf("hello %d\n", strnlen("waka", 2)); // Implicit declaration, no header, for strnlen
int (*my_strnlen)(char*, ...) = strnlen;
printf("hello %d\n", my_strnlen("shaka", 2));
return 0;
}
''')
IMPLICIT_WARNING = "warning: implicit declaration of function 'strnlen' is invalid in C99"
IMPLICIT_ERROR = "error: implicit declaration of function 'strnlen' is invalid in C99"
INCOMPATIBLE_WARNINGS = ('warning: incompatible pointer types', 'warning: incompatible function pointer types')
for opts, expected, compile_expected in [
([], None, [IMPLICIT_ERROR]),
(['-Wno-error=implicit-function-declaration'], ['hello '], [IMPLICIT_WARNING]), # turn error into warning
(['-Wno-implicit-function-declaration'], ['hello '], []), # turn error into nothing at all (runtime output is incorrect)
]:
print(opts, expected)
try_delete('a.out.js')
stderr = self.run_process([EMCC, 'src.c'] + opts, stderr=PIPE, check=False).stderr
for ce in compile_expected + [INCOMPATIBLE_WARNINGS]:
self.assertContained(ce, stderr)
if expected is None:
self.assertNotExists('a.out.js')
else:
output = self.run_js('a.out.js')
for e in expected:
self.assertContained(e, output)
@disabled('upstream llvm produces invalid wasm for sillyfuncast2_noasm.ll')
def test_incorrect_static_call(self):
for wasm in [0, 1]:
for opts in [0, 1]:
for asserts in [0, 1]:
extra = []
if opts != 1 - asserts:
extra = ['-s', 'ASSERTIONS=' + str(asserts)]
cmd = [EMCC, path_from_root('tests', 'sillyfuncast2_noasm.ll'), '-O' + str(opts), '-s', 'WASM=' + str(wasm)] + extra
print(opts, asserts, wasm, cmd)
# Should not need to pipe stdout here but binaryen writes to stdout
# when it really should write to stderr.
stderr = self.run_process(cmd, stdout=PIPE, stderr=PIPE, check=False).stderr
if asserts:
self.assertContained('unexpected', stderr)
self.assertContained("to 'doit'", stderr)
else:
self.assertNotContained('unexpected', stderr)
self.assertNotContained("to 'doit'", stderr)
@requires_native_clang
def test_bad_triple(self):
# compile a minimal program, with as few dependencies as possible, as
# native building on CI may not always work well
create_test_file('minimal.cpp', 'int main() { return 0; }')
self.run_process([CLANG_CXX, 'minimal.cpp', '-target', 'x86_64-linux', '-c', '-emit-llvm', '-o', 'a.bc'] + clang_native.get_clang_native_args(), env=clang_native.get_clang_native_env())
# wasm backend will hard fail where as fastcomp only warns
err = self.expect_fail([EMCC, 'a.bc'])
self.assertContained('machine type must be wasm32', err)
def test_valid_abspath(self):
# Test whether abspath warning appears
abs_include_path = os.path.abspath(self.get_dir())
err = self.run_process([EMCC, '-I%s' % abs_include_path, '-Wwarn-absolute-paths', path_from_root('tests', 'hello_world.c')], stderr=PIPE).stderr
warning = '-I or -L of an absolute path "-I%s" encountered. If this is to a local system header/library, it may cause problems (local system files make sense for compiling natively on your system, but not necessarily to JavaScript).' % abs_include_path
self.assertContained(warning, err)
# Passing an absolute path to a directory inside the emscripten tree is always ok and should not issue a warning.
abs_include_path = path_from_root('tests')
err = self.run_process([EMCC, '-I%s' % abs_include_path, '-Wwarn-absolute-paths', path_from_root('tests', 'hello_world.c')], stderr=PIPE).stderr
warning = '-I or -L of an absolute path "-I%s" encountered. If this is to a local system header/library, it may cause problems (local system files make sense for compiling natively on your system, but not necessarily to JavaScript).' % abs_include_path
self.assertNotContained(warning, err)
# Hide warning for this include path
err = self.run_process([EMCC, '--valid-abspath', abs_include_path, '-I%s' % abs_include_path, '-Wwarn-absolute-paths', path_from_root('tests', 'hello_world.c')], stderr=PIPE).stderr
self.assertNotContained(warning, err)
def test_valid_abspath_2(self):
if WINDOWS:
abs_include_path = 'C:\\nowhere\\at\\all'
else:
abs_include_path = '/nowhere/at/all'
cmd = [EMCC, path_from_root('tests', 'hello_world.c'), '--valid-abspath', abs_include_path, '-I%s' % abs_include_path]
print(' '.join(cmd))
self.run_process(cmd)
self.assertContained('hello, world!', self.run_js('a.out.js'))
def test_warn_dylibs(self):
shared_suffixes = ['.so', '.dylib', '.dll']
for suffix in ['.o', '.bc', '.so', '.dylib', '.js', '.html']:
print(suffix)
cmd = [EMCC, path_from_root('tests', 'hello_world.c'), '-o', 'out' + suffix]
if suffix in ['.o', '.bc']:
cmd.append('-c')
if suffix in ['.dylib', '.so']:
cmd.append('-shared')
err = self.run_process(cmd, stderr=PIPE).stderr
warning = 'linking a library with `-shared` will emit a static object file'
self.assertContainedIf(warning, err, suffix in shared_suffixes)
def test_symbol_map(self):
UNMINIFIED_HEAP8 = 'var HEAP8 = new '
UNMINIFIED_MIDDLE = 'function middle'
for opts in [['-O2'], ['-O3']]:
for wasm in [0, 1, 2]:
print(opts, wasm)
self.clear()
create_test_file('src.c', r'''
#include <emscripten.h>
EM_JS(int, run_js, (), {
out(new Error().stack);
return 0;
});
EMSCRIPTEN_KEEPALIVE
void middle() {
if (run_js()) {
// fake recursion that is never reached, to avoid inlining in binaryen and LLVM
middle();
}
}
int main() {
EM_ASM({ _middle() });
}
''')
cmd = [EMCC, 'src.c', '--emit-symbol-map'] + opts
cmd += ['-s', 'WASM=%d' % wasm]
self.run_process(cmd)
# check that the map is correct
with open('a.out.js.symbols') as f:
symbols = f.read()
lines = [line.split(':') for line in symbols.strip().split('\n')]
minified_middle = None
for minified, full in lines:
# handle both fastcomp and wasm backend notation
if full == '_middle' or full == 'middle':
minified_middle = minified
break
self.assertNotEqual(minified_middle, None)
if wasm:
# stack traces are standardized enough that we can easily check that the
# minified name is actually in the output
stack_trace_reference = 'wasm-function[%s]' % minified_middle
out = self.run_js('a.out.js')
self.assertContained(stack_trace_reference, out)
# make sure there are no symbols in the wasm itself
wat = self.run_process([wasm_dis, 'a.out.wasm'], stdout=PIPE).stdout
for func_start in ('(func $middle', '(func $_middle'):
self.assertNotContained(func_start, wat)
# check we don't keep unnecessary debug info with wasm2js when emitting
# a symbol map
if wasm == 0 and '-O' in str(opts):
with open('a.out.js') as f:
js = f.read()
self.assertNotContained(UNMINIFIED_HEAP8, js)
self.assertNotContained(UNMINIFIED_MIDDLE, js)
# verify those patterns would exist with more debug info
self.run_process(cmd + ['--profiling-funcs'])
with open('a.out.js') as f:
js = f.read()
self.assertContained(UNMINIFIED_HEAP8, js)
self.assertContained(UNMINIFIED_MIDDLE, js)
def test_bc_to_bc(self):
# emcc should 'process' bitcode to bitcode. build systems can request this if
# e.g. they assume our 'executable' extension is bc, and compile an .o to a .bc
# (the user would then need to build bc to js of course, but we need to actually
# emit the bc)
self.run_process([EMCC, '-c', path_from_root('tests', 'hello_world.c')])
self.assertExists('hello_world.o')
self.run_process([EMCC, '-r', 'hello_world.o', '-o', 'hello_world.bc'])
self.assertExists('hello_world.o')
self.assertExists('hello_world.bc')
def test_bad_function_pointer_cast(self):
create_test_file('src.cpp', r'''
#include <stdio.h>
typedef int (*callback) (int, ...);
int impl(int foo) {
printf("Hello, world.\n");
return 0;
}
int main() {
volatile callback f = (callback) impl;
f(0); /* This fails with or without additional arguments. */
return 0;
}
''')
for opts in [0, 1, 2]:
for safe in [0, 1]:
for emulate_casts in [0, 1]:
for relocatable in [0, 1]:
for wasm in [0, 1]:
# wasm2js is not compatible with relocatable mode
if wasm == 0 and relocatable:
continue
cmd = [EMCC, 'src.cpp', '-O' + str(opts)]
if not wasm:
cmd += ['-s', 'WASM=0']
if safe:
cmd += ['-s', 'SAFE_HEAP']
if emulate_casts:
cmd += ['-s', 'EMULATE_FUNCTION_POINTER_CASTS']
if relocatable:
cmd += ['-s', 'RELOCATABLE'] # disables asm-optimized safe heap
print(cmd)
self.run_process(cmd)
returncode = 0 if emulate_casts or wasm == 0 else NON_ZERO
output = self.run_js('a.out.js', assert_returncode=returncode)
if emulate_casts or wasm == 0:
# success!
self.assertContained('Hello, world.', output)
else:
# otherwise, the error depends on the mode we are in
# wasm trap raised by the vm
self.assertContained('function signature mismatch', output)
def test_bad_export(self):
for m in ['', ' ']:
self.clear()
cmd = [EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'EXPORTED_FUNCTIONS=["' + m + '_main"]']
print(cmd)
stderr = self.run_process(cmd, stderr=PIPE, check=False).stderr
if m:
self.assertContained('undefined exported symbol: " _main"', stderr)
else:
self.assertContained('hello, world!', self.run_js('a.out.js'))
def test_no_dynamic_execution(self):
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-O1', '-s', 'DYNAMIC_EXECUTION=0'])
self.assertContained('hello, world!', self.run_js('a.out.js'))
src = open('a.out.js').read()
self.assertNotContained('eval(', src)
self.assertNotContained('eval.', src)
self.assertNotContained('new Function', src)
try_delete('a.out.js')
# Test that --preload-file doesn't add an use of eval().
create_test_file('temp.txt', "foo\n")
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-O1',
'-s', 'DYNAMIC_EXECUTION=0', '--preload-file', 'temp.txt'])
src = open('a.out.js').read()
self.assertNotContained('eval(', src)
self.assertNotContained('eval.', src)
self.assertNotContained('new Function', src)
try_delete('a.out.js')
# Test that -s DYNAMIC_EXECUTION=1 and -s RELOCATABLE=1 are not allowed together.
self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), '-O1',
'-s', 'DYNAMIC_EXECUTION=0', '-s', 'RELOCATABLE=1'])
try_delete('a.out.js')
create_test_file('test.c', r'''
#include <emscripten/emscripten.h>
int main() {
emscripten_run_script("console.log('hello from script');");
return 0;
}
''')
# Test that emscripten_run_script() aborts when -s DYNAMIC_EXECUTION=0
self.run_process([EMCC, 'test.c', '-O1', '-s', 'DYNAMIC_EXECUTION=0'])
self.assertContained('DYNAMIC_EXECUTION=0 was set, cannot eval', self.run_js('a.out.js', assert_returncode=NON_ZERO))
try_delete('a.out.js')
# Test that emscripten_run_script() posts a warning when -s DYNAMIC_EXECUTION=2
self.run_process([EMCC, 'test.c', '-O1', '-s', 'DYNAMIC_EXECUTION=2'])
self.assertContained('Warning: DYNAMIC_EXECUTION=2 was set, but calling eval in the following location:', self.run_js('a.out.js'))
self.assertContained('hello from script', self.run_js('a.out.js'))
try_delete('a.out.js')
def test_init_file_at_offset(self):
create_test_file('src.cpp', r'''
#include <stdio.h>
int main() {
int data = 0x12345678;
FILE *f = fopen("test.dat", "wb");
fseek(f, 100, SEEK_CUR);
fwrite(&data, 4, 1, f);
fclose(f);
int data2;
f = fopen("test.dat", "rb");
fread(&data2, 4, 1, f); // should read 0s, not that int we wrote at an offset
printf("read: %d\n", data2);
fseek(f, 0, SEEK_END);
long size = ftell(f); // should be 104, not 4
fclose(f);
printf("file size is %ld\n", size);
}
''')
self.run_process([EMCC, 'src.cpp'])
self.assertContained('read: 0\nfile size is 104\n', self.run_js('a.out.js'))
def test_unlink(self):
self.do_other_test('test_unlink.cpp')
def test_argv0_node(self):
create_test_file('code.cpp', r'''
#include <stdio.h>
int main(int argc, char **argv) {
printf("I am %s.\n", argv[0]);
return 0;
}
''')
self.run_process([EMCC, 'code.cpp'])
self.assertContained('I am ' + os.path.realpath(self.get_dir()).replace('\\', '/') + '/a.out.js', self.run_js('a.out.js').replace('\\', '/'))
def test_returncode(self):
create_test_file('src.cpp', r'''
#include <stdio.h>
#include <stdlib.h>
int main() {
#if CALL_EXIT
exit(CODE);
#else
return CODE;
#endif
}
''')
for code in [0, 123]:
for no_exit in [0, 1]:
for call_exit in [0, 1]:
for async_compile in [0, 1]:
self.run_process([EMCC, 'src.cpp', '-DCODE=%d' % code, '-s', 'EXIT_RUNTIME=%d' % (1 - no_exit), '-DCALL_EXIT=%d' % call_exit, '-s', 'WASM_ASYNC_COMPILATION=%d' % async_compile])
for engine in config.JS_ENGINES:
# async compilation can't return a code in d8
if async_compile and engine == config.V8_ENGINE:
continue
print(code, no_exit, call_exit, async_compile, engine)
proc = self.run_process(engine + ['a.out.js'], stderr=PIPE, check=False)
# we always emit the right exit code, whether we exit the runtime or not
self.assertEqual(proc.returncode, code)
msg = 'but EXIT_RUNTIME is not set, so halting execution but not exiting the runtime or preventing further async execution (build with EXIT_RUNTIME=1, if you want a true shutdown)'
if no_exit and call_exit:
self.assertContained(msg, proc.stderr)
else:
self.assertNotContained(msg, proc.stderr)
def test_emscripten_force_exit_NO_EXIT_RUNTIME(self):
create_test_file('src.cpp', r'''
#include <emscripten.h>
int main() {
#if CALL_EXIT
emscripten_force_exit(0);
#endif
}
''')
for no_exit in [0, 1]:
for call_exit in [0, 1]:
self.run_process([EMCC, 'src.cpp', '-s', 'EXIT_RUNTIME=%d' % (1 - no_exit), '-DCALL_EXIT=%d' % call_exit])
print(no_exit, call_exit)
out = self.run_js('a.out.js')
assert ('emscripten_force_exit cannot actually shut down the runtime, as the build does not have EXIT_RUNTIME set' in out) == (no_exit and call_exit), out
def test_mkdir_silly(self):
create_test_file('src.cpp', r'''
#include <stdio.h>
#include <dirent.h>
#include <errno.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <unistd.h>
int main(int argc, char **argv) {
printf("\n");
for (int i = 1; i < argc; i++) {
printf("%d:\n", i);
int ok = mkdir(argv[i], S_IRWXU|S_IRWXG|S_IRWXO);
printf(" make %s: %d\n", argv[i], ok);
DIR *dir = opendir(argv[i]);
printf(" open %s: %d\n", argv[i], dir != NULL);
if (dir) {
struct dirent *entry;
while ((entry = readdir(dir))) {
printf(" %s, %d\n", entry->d_name, entry->d_type);
}
}
}
}
''')
self.run_process([EMCC, 'src.cpp'])
# cannot create /, can open
self.assertContained(r'''
1:
make /: -1
open /: 1
., 4
.., 4
tmp, 4
home, 4
dev, 4
proc, 4
''', self.run_js('a.out.js', args=['/']))
# cannot create empty name, cannot open
self.assertContained(r'''
1:
make : -1
open : 0
''', self.run_js('a.out.js', args=['']))
# can create unnormalized path, can open
self.assertContained(r'''
1:
make /a//: 0
open /a//: 1
., 4
.., 4
''', self.run_js('a.out.js', args=['/a//']))
# can create child unnormalized
self.assertContained(r'''
1:
make /a: 0
open /a: 1
., 4
.., 4
2:
make /a//b//: 0
open /a//b//: 1
., 4
.., 4
''', self.run_js('a.out.js', args=['/a', '/a//b//']))
def test_stat_silly(self):
create_test_file('src.cpp', r'''
#include <stdio.h>
#include <errno.h>
#include <sys/stat.h>
int main(int argc, char **argv) {
for (int i = 1; i < argc; i++) {
const char *path = argv[i];
struct stat path_stat;
if (stat(path, &path_stat) != 0) {
printf("Failed to stat path: %s; errno=%d\n", path, errno);
} else {
printf("ok on %s\n", path);
}
}
}
''')
self.run_process([EMCC, 'src.cpp'])
# cannot stat ""
self.assertContained(r'''Failed to stat path: /a; errno=44
Failed to stat path: ; errno=44
''', self.run_js('a.out.js', args=['/a', '']))
def test_symlink_silly(self):
create_test_file('src.cpp', r'''
#include <dirent.h>
#include <errno.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <unistd.h>
#include <stdio.h>
int main(int argc, char **argv) {
if (symlink(argv[1], argv[2]) != 0) {
printf("Failed to symlink paths: %s, %s; errno=%d\n", argv[1], argv[2], errno);
} else {
printf("ok\n");
}
}
''')
self.run_process([EMCC, 'src.cpp'])
# cannot symlink nonexistents
self.assertContained(r'Failed to symlink paths: , abc; errno=44', self.run_js('a.out.js', args=['', 'abc']))
self.assertContained(r'Failed to symlink paths: , ; errno=44', self.run_js('a.out.js', args=['', '']))
self.assertContained(r'ok', self.run_js('a.out.js', args=['123', 'abc']))
self.assertContained(r'Failed to symlink paths: abc, ; errno=44', self.run_js('a.out.js', args=['abc', '']))
def test_rename_silly(self):
create_test_file('src.cpp', r'''
#include <stdio.h>
#include <errno.h>
int main(int argc, char **argv) {
if (rename(argv[1], argv[2]) != 0) {
printf("Failed to rename paths: %s, %s; errno=%d\n", argv[1], argv[2], errno);
} else {
printf("ok\n");
}
}
''')
self.run_process([EMCC, 'src.cpp'])
# cannot symlink nonexistents
self.assertContained(r'Failed to rename paths: , abc; errno=44', self.run_js('a.out.js', args=['', 'abc']))
self.assertContained(r'Failed to rename paths: , ; errno=44', self.run_js('a.out.js', args=['', '']))
self.assertContained(r'Failed to rename paths: 123, abc; errno=44', self.run_js('a.out.js', args=['123', 'abc']))
self.assertContained(r'Failed to rename paths: abc, ; errno=44', self.run_js('a.out.js', args=['abc', '']))
def test_readdir_r_silly(self):
create_test_file('src.cpp', r'''
#include <iostream>
#include <cstring>
#include <cerrno>
#include <unistd.h>
#include <fcntl.h>
#include <cstdlib>
#include <dirent.h>
#include <sys/stat.h>
#include <sys/types.h>
using std::endl;
namespace
{
void check(const bool result)
{
if(not result) {
std::cout << "Check failed!" << endl;
throw "bad";
}
}
// Do a recursive directory listing of the directory whose path is specified
// by \a name.
void ls(const std::string& name, std::size_t indent = 0)
{
::DIR *dir;
struct ::dirent *entry;
if(indent == 0) {
std::cout << name << endl;
++indent;
}
// Make sure we can open the directory. This should also catch cases where
// the empty string is passed in.
if (not (dir = ::opendir(name.c_str()))) {
const int error = errno;
std::cout
<< "Failed to open directory: " << name << "; " << error << endl;
return;
}
// Just checking the sanity.
if (name.empty()) {
std::cout
<< "Managed to open a directory whose name was the empty string.."
<< endl;
check(::closedir(dir) != -1);
return;
}
// Iterate over the entries in the directory.
while ((entry = ::readdir(dir))) {
const std::string entryName(entry->d_name);
if (entryName == "." || entryName == "..") {
// Skip the dot entries.
continue;
}
const std::string indentStr(indent * 2, ' ');
if (entryName.empty()) {
std::cout
<< indentStr << "\"\": Found empty string as a "
<< (entry->d_type == DT_DIR ? "directory" : "file")
<< " entry!" << endl;
continue;
} else {
std::cout << indentStr << entryName
<< (entry->d_type == DT_DIR ? "/" : "") << endl;
}
if (entry->d_type == DT_DIR) {
// We found a subdirectory; recurse.
ls(std::string(name + (name == "/" ? "" : "/" ) + entryName),
indent + 1);
}
}
// Close our handle.
check(::closedir(dir) != -1);
}
void touch(const std::string &path)
{
const int fd = ::open(path.c_str(), O_CREAT | O_TRUNC, 0644);
check(fd != -1);
check(::close(fd) != -1);
}
}
int main()
{
check(::mkdir("dir", 0755) == 0);
touch("dir/a");
touch("dir/b");
touch("dir/c");
touch("dir/d");
touch("dir/e");
std::cout << "Before:" << endl;
ls("dir");
std::cout << endl;
// Attempt to delete entries as we walk the (single) directory.
::DIR * const dir = ::opendir("dir");
check(dir != NULL);
struct ::dirent *entry;
while((entry = ::readdir(dir)) != NULL) {
const std::string name(entry->d_name);
// Skip "." and "..".
if(name == "." || name == "..") {
continue;
}
// Unlink it.
std::cout << "Unlinking " << name << endl;
check(::unlink(("dir/" + name).c_str()) != -1);
}
check(::closedir(dir) != -1);
std::cout << "After:" << endl;
ls("dir");
std::cout << endl;
return 0;
}
''')
self.run_process([EMCC, 'src.cpp'])
# cannot symlink nonexistents
self.assertContained(r'''Before:
dir
a
b
c
d
e
Unlinking a
Unlinking b
Unlinking c
Unlinking d
Unlinking e
After:
dir
''', self.run_js('a.out.js', args=['', 'abc']))
def test_emversion(self):
create_test_file('src.cpp', r'''
#include <stdio.h>
int main() {
printf("major: %d\n", __EMSCRIPTEN_major__);
printf("minor: %d\n", __EMSCRIPTEN_minor__);
printf("tiny: %d\n", __EMSCRIPTEN_tiny__);
}
''')
self.run_process([EMCC, 'src.cpp'])
expected = '''\
major: %d
minor: %d
tiny: %d
''' % (shared.EMSCRIPTEN_VERSION_MAJOR, shared.EMSCRIPTEN_VERSION_MINOR, shared.EMSCRIPTEN_VERSION_TINY)
self.assertContained(expected, self.run_js('a.out.js'))
def test_libc_files_without_syscalls(self):
# a program which includes FS due to libc js library support, but has no syscalls,
# so full FS support would normally be optimized out
create_test_file('src.cpp', r'''
#include <sys/time.h>
#include <stddef.h>
int main() {
return utimes(NULL, NULL);
}''')
self.run_process([EMCC, 'src.cpp'])
def test_syscall_without_filesystem(self):
# a program which includes a non-trivial syscall, but disables the filesystem.
create_test_file('src.c', r'''
#include <sys/time.h>
#include <stddef.h>
extern int __sys_openat(int);
int main() {
return __sys_openat(0);
}''')
self.run_process([EMCC, 'src.c', '-s', 'NO_FILESYSTEM=1'])
def test_dashS(self):
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-S'])
self.assertExists('hello_world.s')
def assertIsLLVMAsm(self, filename):
bitcode = open(filename).read()
self.assertContained('target triple = "', bitcode)
def test_dashS_ll_input(self):
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-S', '-emit-llvm'])
self.assertIsLLVMAsm('hello_world.ll')
self.run_process([EMCC, 'hello_world.ll', '-S', '-emit-llvm', '-o', 'another.ll'])
self.assertIsLLVMAsm('another.ll')
def test_dashS_stdout(self):
stdout = self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-S', '-o', '-'], stdout=PIPE).stdout
self.assertEqual(os.listdir('.'), [])
self.assertContained('hello_world.c', stdout)
def test_emit_llvm(self):
# TODO(https://github.com/emscripten-core/emscripten/issues/9016):
# We shouldn't need to copy the file here but if we don't then emcc will
# internally clobber the hello_world.ll in tests.
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-S', '-emit-llvm'])
self.assertIsLLVMAsm('hello_world.ll')
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-c', '-emit-llvm'])
self.assertTrue(building.is_bitcode('hello_world.bc'))
def test_dashE(self):
create_test_file('src.cpp', r'''#include <emscripten.h>
__EMSCRIPTEN_major__ __EMSCRIPTEN_minor__ __EMSCRIPTEN_tiny__ EMSCRIPTEN_KEEPALIVE
''')
def test(args=[]):
print(args)
out = self.run_process([EMCC, 'src.cpp', '-E'] + args, stdout=PIPE).stdout
self.assertContained('%d %d %d __attribute__((used))' % (shared.EMSCRIPTEN_VERSION_MAJOR, shared.EMSCRIPTEN_VERSION_MINOR, shared.EMSCRIPTEN_VERSION_TINY), out)
test()
test(['--bind'])
def test_dashE_respect_dashO(self):
# issue #3365
with_dash_o = self.run_process([EMXX, path_from_root('tests', 'hello_world.cpp'), '-E', '-o', 'ignored.js'], stdout=PIPE, stderr=PIPE).stdout
without_dash_o = self.run_process([EMXX, path_from_root('tests', 'hello_world.cpp'), '-E'], stdout=PIPE, stderr=PIPE).stdout
self.assertEqual(len(with_dash_o), 0)
self.assertNotEqual(len(without_dash_o), 0)
def test_dashM(self):
out = self.run_process([EMXX, path_from_root('tests', 'hello_world.cpp'), '-M'], stdout=PIPE).stdout
self.assertContained('hello_world.o:', out) # Verify output is just a dependency rule instead of bitcode or js
def test_dashM_respect_dashO(self):
# issue #3365
with_dash_o = self.run_process([EMXX, path_from_root('tests', 'hello_world.cpp'), '-M', '-o', 'ignored.js'], stdout=PIPE).stdout
without_dash_o = self.run_process([EMXX, path_from_root('tests', 'hello_world.cpp'), '-M'], stdout=PIPE).stdout
self.assertEqual(len(with_dash_o), 0)
self.assertNotEqual(len(without_dash_o), 0)
def test_malloc_implicit(self):
self.do_other_test('test_malloc_implicit.cpp')
def test_switch64phi(self):
# issue 2539, fastcomp segfault on phi-i64 interaction
create_test_file('src.cpp', r'''
#include <cstdint>
#include <limits>
#include <cstdio>
//============================================================================
namespace
{
class int_adapter {
public:
typedef ::int64_t int_type;
int_adapter(int_type v = 0)
: value_(v) {}
static const int_adapter pos_infinity() {
return (::std::numeric_limits<int_type>::max)();
}
static const int_adapter neg_infinity() {
return (::std::numeric_limits<int_type>::min)();
}
static const int_adapter not_a_number() {
return (::std::numeric_limits<int_type>::max)()-1;
}
static bool is_neg_inf(int_type v) {
return (v == neg_infinity().as_number());
}
static bool is_pos_inf(int_type v) {
return (v == pos_infinity().as_number());
}
static bool is_not_a_number(int_type v) {
return (v == not_a_number().as_number());
}
bool is_infinity() const {
return (value_ == neg_infinity().as_number() ||
value_ == pos_infinity().as_number());
}
bool is_special() const {
return(is_infinity() || value_ == not_a_number().as_number());
}
bool operator<(const int_adapter& rhs) const {
if(value_ == not_a_number().as_number()
|| rhs.value_ == not_a_number().as_number()) {
return false;
}
if(value_ < rhs.value_) return true;
return false;
}
int_type as_number() const {
return value_;
}
int_adapter operator-(const int_adapter& rhs) const {
if(is_special() || rhs.is_special()) {
if (rhs.is_pos_inf(rhs.as_number())) {
return int_adapter(1);
}
if (rhs.is_neg_inf(rhs.as_number())) {
return int_adapter();
}
}
return int_adapter();
}
private:
int_type value_;
};
class time_iterator {
public:
time_iterator(int_adapter t, int_adapter d)
: current_(t),
offset_(d)
{}
time_iterator& operator--() {
current_ = int_adapter(current_ - offset_);
return *this;
}
bool operator>=(const int_adapter& t) {
return not (current_ < t);
}
private:
int_adapter current_;
int_adapter offset_;
};
void iterate_backward(const int_adapter *answers, const int_adapter& td) {
int_adapter end = answers[0];
time_iterator titr(end, td);
std::puts("");
for (; titr >= answers[0]; --titr) {
}
}
}
int main() {
const int_adapter answer1[] = {};
iterate_backward(NULL, int_adapter());
iterate_backward(answer1, int_adapter());
}
''')
self.run_process([EMCC, 'src.cpp', '-O2', '-s', 'SAFE_HEAP=1'])
@parameterized({
'none': [{'EMCC_FORCE_STDLIBS': None}, False],
# forced libs is ok, they were there anyhow
'normal': [{'EMCC_FORCE_STDLIBS': 'libc,libc++abi,libc++'}, False],
# partial list, but ok since we grab them as needed
'parial': [{'EMCC_FORCE_STDLIBS': 'libc++'}, False],
# fail! not enough stdlibs
'partial_only': [{'EMCC_FORCE_STDLIBS': 'libc++,libc,libc++abi', 'EMCC_ONLY_FORCED_STDLIBS': '1'}, True],
# force all the needed stdlibs, so this works even though we ignore the input file
'full_only': [{'EMCC_FORCE_STDLIBS': 'libc,libc++abi,libc++,libmalloc', 'EMCC_ONLY_FORCED_STDLIBS': '1'}, False],
})
def test_only_force_stdlibs(self, env, fail):
with env_modify(env):
self.run_process([EMXX, path_from_root('tests', 'hello_libcxx.cpp'), '-s', 'WARN_ON_UNDEFINED_SYMBOLS=0'])
if fail:
output = self.expect_fail(config.NODE_JS + ['a.out.js'], stdout=PIPE)
self.assertContained('missing function', output)
else:
self.assertContained('hello, world!', self.run_js('a.out.js'))
def test_only_force_stdlibs_2(self):
create_test_file('src.cpp', r'''
#include <iostream>
#include <stdexcept>
int main()
{
try {
throw std::exception();
std::cout << "got here" << std::endl;
}
catch (const std::exception& ex) {
std::cout << "Caught exception: " << ex.what() << std::endl;
}
}
''')
with env_modify({'EMCC_FORCE_STDLIBS': 'libc,libc++abi,libc++,libmalloc', 'EMCC_ONLY_FORCED_STDLIBS': '1'}):
self.run_process([EMXX, 'src.cpp', '-s', 'DISABLE_EXCEPTION_CATCHING=0'])
self.assertContained('Caught exception: std::exception', self.run_js('a.out.js'))
def test_strftime_zZ(self):
create_test_file('src.cpp', r'''
#include <cerrno>
#include <cstring>
#include <ctime>
#include <iostream>
int main()
{
// Buffer to hold the current hour of the day. Format is HH + nul
// character.
char hour[3];
// Buffer to hold our ISO 8601 formatted UTC offset for the current
// timezone. Format is [+-]hhmm + nul character.
char utcOffset[6];
// Buffer to hold the timezone name or abbreviation. Just make it
// sufficiently large to hold most timezone names.
char timezone[128];
std::tm tm;
// Get the current timestamp.
const std::time_t now = std::time(NULL);
// What time is that here?
if (::localtime_r(&now, &tm) == NULL) {
const int error = errno;
std::cout
<< "Failed to get localtime for timestamp=" << now << "; errno=" << error
<< "; " << std::strerror(error) << std::endl;
return 1;
}
size_t result = 0;
// Get the formatted hour of the day.
if ((result = std::strftime(hour, 3, "%H", &tm)) != 2) {
const int error = errno;
std::cout
<< "Failed to format hour for timestamp=" << now << "; result="
<< result << "; errno=" << error << "; " << std::strerror(error)
<< std::endl;
return 1;
}
std::cout << "The current hour of the day is: " << hour << std::endl;
// Get the formatted UTC offset in ISO 8601 format.
if ((result = std::strftime(utcOffset, 6, "%z", &tm)) != 5) {
const int error = errno;
std::cout
<< "Failed to format UTC offset for timestamp=" << now << "; result="
<< result << "; errno=" << error << "; " << std::strerror(error)
<< std::endl;
return 1;
}
std::cout << "The current timezone offset is: " << utcOffset << std::endl;
// Get the formatted timezone name or abbreviation. We don't know how long
// this will be, so just expect some data to be written to the buffer.
if ((result = std::strftime(timezone, 128, "%Z", &tm)) == 0) {
const int error = errno;
std::cout
<< "Failed to format timezone for timestamp=" << now << "; result="
<< result << "; errno=" << error << "; " << std::strerror(error)
<< std::endl;
return 1;
}
std::cout << "The current timezone is: " << timezone << std::endl;
std::cout << "ok!\n";
}
''')
self.run_process([EMCC, 'src.cpp'])
self.assertContained('ok!', self.run_js('a.out.js'))
def test_strptime_symmetry(self):
building.emcc(path_from_root('tests', 'strptime_symmetry.cpp'), output_filename='a.out.js')
self.assertContained('TEST PASSED', self.run_js('a.out.js'))
def test_truncate_from_0(self):
create_test_file('src.cpp', r'''
#include <cerrno>
#include <cstring>
#include <iostream>
#include <fcntl.h>
#include <sys/stat.h>
#include <sys/types.h>
#include <unistd.h>
using std::endl;
//============================================================================
// :: Helpers
namespace
{
// Returns the size of the regular file specified as 'path'.
::off_t getSize(const char* const path)
{
// Stat the file and make sure that it's the expected size.
struct ::stat path_stat;
if (::stat(path, &path_stat) != 0) {
const int error = errno;
std::cout
<< "Failed to lstat path: " << path << "; errno=" << error << "; "
<< std::strerror(error) << endl;
return -1;
}
std::cout
<< "Size of file is: " << path_stat.st_size << endl;
return path_stat.st_size;
}
// Causes the regular file specified in 'path' to have a size of 'length'
// bytes.
void resize(const char* const path,
const ::off_t length)
{
std::cout
<< "Truncating file=" << path << " to length=" << length << endl;
if (::truncate(path, length) == -1)
{
const int error = errno;
std::cout
<< "Failed to truncate file=" << path << "; errno=" << error
<< "; " << std::strerror(error) << endl;
}
const ::off_t size = getSize(path);
if (size != length) {
std::cout
<< "Failed to truncate file=" << path << " to length=" << length
<< "; got size=" << size << endl;
}
}
// Helper to create a file with the given content.
void createFile(const std::string& path, const std::string& content)
{
std::cout
<< "Creating file: " << path << " with content=" << content << endl;
const int fd = ::open(path.c_str(), O_CREAT | O_WRONLY, 0644);
if (fd == -1) {
const int error = errno;
std::cout
<< "Failed to open file for writing: " << path << "; errno=" << error
<< "; " << std::strerror(error) << endl;
return;
}
if (::write(fd, content.c_str(), content.size()) != content.size()) {
const int error = errno;
std::cout
<< "Failed to write content=" << content << " to file=" << path
<< "; errno=" << error << "; " << std::strerror(error) << endl;
// Fall through to close FD.
}
::close(fd);
}
}
//============================================================================
// :: Entry Point
int main()
{
const char* const file = "/tmp/file";
createFile(file, "This is some content");
getSize(file);
resize(file, 32);
resize(file, 17);
resize(file, 0);
// This throws a JS exception.
resize(file, 32);
return 0;
}
''')
self.run_process([EMCC, 'src.cpp'])
self.assertContained(r'''Creating file: /tmp/file with content=This is some content
Size of file is: 20
Truncating file=/tmp/file to length=32
Size of file is: 32
Truncating file=/tmp/file to length=17
Size of file is: 17
Truncating file=/tmp/file to length=0
Size of file is: 0
Truncating file=/tmp/file to length=32
Size of file is: 32
''', self.run_js('a.out.js'))
def test_create_readonly(self):
create_test_file('src.cpp', r'''
#include <cerrno>
#include <cstring>
#include <iostream>
#include <fcntl.h>
#include <unistd.h>
using std::endl;
//============================================================================
// :: Helpers
namespace
{
// Helper to create a read-only file with content.
void readOnlyFile(const std::string& path, const std::string& content)
{
std::cout
<< "Creating file: " << path << " with content of size="
<< content.size() << endl;
const int fd = ::open(path.c_str(), O_CREAT | O_WRONLY, 0400);
if (fd == -1) {
const int error = errno;
std::cout
<< "Failed to open file for writing: " << path << "; errno=" << error
<< "; " << std::strerror(error) << endl;
return;
}
// Write the content to the file.
ssize_t result = 0;
if ((result = ::write(fd, content.data(), content.size()))
!= ssize_t(content.size()))
{
const int error = errno;
std::cout
<< "Failed to write to file=" << path << "; errno=" << error
<< "; " << std::strerror(error) << endl;
// Fall through to close the file.
}
else {
std::cout
<< "Data written to file=" << path << "; successfully wrote "
<< result << " bytes" << endl;
}
::close(fd);
}
}
//============================================================================
// :: Entry Point
int main() {
const char* const file = "/tmp/file";
unlink(file);
readOnlyFile(file, "This content should get written because the file "
"does not yet exist and so, only the mode of the "
"containing directory will influence my ability to "
"create and open the file. The mode of the file only "
"applies to opening of the stream, not subsequent stream "
"operations after stream has opened.\n\n");
readOnlyFile(file, "This should not get written because the file already "
"exists and is read-only.\n\n");
}
''')
self.run_process([EMCC, 'src.cpp'])
self.assertContained(r'''Creating file: /tmp/file with content of size=292
Data written to file=/tmp/file; successfully wrote 292 bytes
Creating file: /tmp/file with content of size=79
Failed to open file for writing: /tmp/file; errno=2; Permission denied
''', self.run_js('a.out.js'))
def test_embed_file_large(self):
# If such long files are encoded on one line,
# they overflow the interpreter's limit
large_size = int(1500000)
create_test_file('large.txt', 'x' * large_size)
create_test_file('src.c', r'''
#include <stdio.h>
#include <unistd.h>
int main() {
FILE* fp = fopen("large.txt", "r");
if (fp) {
printf("ok\n");
fseek(fp, 0L, SEEK_END);
printf("%ld\n", ftell(fp));
} else {
printf("failed to open large file.txt\n");
}
return 0;
}
''')
self.run_process([EMCC, 'src.c', '--embed-file', 'large.txt'])
for engine in config.JS_ENGINES:
if engine == config.V8_ENGINE:
continue # ooms
print(engine)
self.assertContained('ok\n' + str(large_size) + '\n', self.run_js('a.out.js', engine=engine))
def test_force_exit(self):
create_test_file('src.c', r'''
#include <emscripten/emscripten.h>
EMSCRIPTEN_KEEPALIVE void callback() {
EM_ASM({ out('callback pre()') });
emscripten_force_exit(42);
EM_ASM({ out('callback post()') });
}
int main() {
EM_ASM({ setTimeout(function() { out("calling callback()"); _callback() }, 100) });
emscripten_exit_with_live_runtime();
return 123;
}
''')
self.run_process([EMCC, 'src.c'])
output = self.run_js('a.out.js', assert_returncode=42)
self.assertContained('callback pre()', output)
self.assertNotContained('callback post()', output)
def test_bad_locale(self):
create_test_file('src.cpp', r'''
#include <locale.h>
#include <stdio.h>
#include <wctype.h>
int main(const int argc, const char * const * const argv) {
const char * const locale = (argc > 1 ? argv[1] : "C");
const char * const actual = setlocale(LC_ALL, locale);
if(actual == NULL) {
printf("%s locale not supported\n", locale);
return 0;
}
printf("locale set to %s: %s\n", locale, actual);
}
''')
self.run_process([EMCC, 'src.cpp'])
self.assertContained('locale set to C: C;C;C;C;C;C',
self.run_js('a.out.js', args=['C']))
self.assertContained('locale set to waka: waka;waka;waka;waka;waka;waka',
self.run_js('a.out.js', args=['waka']))
def test_browser_language_detection(self):
# Test HTTP Accept-Language parsing by simulating navigator.languages #8751
self.run_process([EMCC,
path_from_root('tests', 'test_browser_language_detection.c')])
self.assertContained('C.UTF-8', self.run_js('a.out.js'))
# Accept-Language: fr,fr-FR;q=0.8,en-US;q=0.5,en;q=0.3
create_test_file('preamble.js', r'''navigator = {};
navigator.languages = [ "fr", "fr-FR", "en-US", "en" ];''')
self.run_process([EMCC, '--pre-js', 'preamble.js',
path_from_root('tests', 'test_browser_language_detection.c')])
self.assertContained('fr.UTF-8', self.run_js('a.out.js'))
# Accept-Language: fr-FR,fr;q=0.8,en-US;q=0.5,en;q=0.3
create_test_file('preamble.js', r'''navigator = {};
navigator.languages = [ "fr-FR", "fr", "en-US", "en" ];''')
self.run_process([EMCC, '--pre-js', 'preamble.js',
path_from_root('tests', 'test_browser_language_detection.c')])
self.assertContained('fr_FR.UTF-8', self.run_js('a.out.js'))
def test_js_main(self):
# try to add a main() from JS, at runtime. this is not supported (the
# compiler needs to know at compile time about main).
create_test_file('pre_main.js', r'''
var Module = {
'_main': function() {
}
};
''')
create_test_file('src.cpp', '')
self.run_process([EMCC, 'src.cpp', '--pre-js', 'pre_main.js'])
self.assertContained('compiled without a main, but one is present. if you added it from JS, use Module["onRuntimeInitialized"]',
self.run_js('a.out.js', assert_returncode=NON_ZERO))
def test_locale_wrong(self):
create_test_file('src.cpp', r'''
#include <locale>
#include <iostream>
#include <stdexcept>
int main(const int argc, const char * const * const argv) {
const char * const name = argc > 1 ? argv[1] : "C";
try {
const std::locale locale(name);
std::cout
<< "Constructed locale \"" << name << "\"\n"
<< "This locale is "
<< (locale == std::locale::global(locale) ? "" : "not ")
<< "the global locale.\n"
<< "This locale is " << (locale == std::locale::classic() ? "" : "not ")
<< "the C locale." << std::endl;
} catch(const std::runtime_error &ex) {
std::cout
<< "Can't construct locale \"" << name << "\": " << ex.what()
<< std::endl;
return 1;
} catch(...) {
std::cout
<< "FAIL: Unexpected exception constructing locale \"" << name << '\"'
<< std::endl;
return 127;
}
}
''')
self.run_process([EMCC, 'src.cpp', '-s', 'EXIT_RUNTIME=1', '-s', 'DISABLE_EXCEPTION_CATCHING=0'])
self.assertContained('Constructed locale "C"\nThis locale is the global locale.\nThis locale is the C locale.', self.run_js('a.out.js', args=['C']))
self.assertContained('''Can't construct locale "waka": collate_byname<char>::collate_byname failed to construct for waka''', self.run_js('a.out.js', args=['waka'], assert_returncode=1))
def test_cleanup_os(self):
# issue 2644
def test(args, be_clean):
print(args)
self.clear()
shutil.copyfile(path_from_root('tests', 'hello_world.c'), 'a.c')
create_test_file('b.c', ' ')
self.run_process([EMCC, 'a.c', 'b.c'] + args)
clutter = glob.glob('*.o')
if be_clean:
assert len(clutter) == 0, 'should not leave clutter ' + str(clutter)
else:
assert len(clutter) == 2, 'should leave .o files'
test(['-o', 'c.so', '-r'], True)
test(['-o', 'c.js'], True)
test(['-o', 'c.html'], True)
test(['-c'], False)
def test_dash_g_bc(self):
def test(opts):
print(opts)
self.run_process([EMCC, '-c', path_from_root('tests', 'hello_world.c'), '-o', 'a_.bc'] + opts)
sizes = {'_': os.path.getsize('a_.bc')}
self.run_process([EMCC, '-c', path_from_root('tests', 'hello_world.c'), '-g', '-o', 'ag.bc'] + opts)
sizes['g'] = os.path.getsize('ag.bc')
for i in range(0, 5):
self.run_process([EMCC, '-c', path_from_root('tests', 'hello_world.c'), '-g' + str(i), '-o', 'a' + str(i) + '.bc'] + opts)
sizes[i] = os.path.getsize('a' + str(i) + '.bc')
print(' ', sizes)
assert sizes['_'] == sizes[0] == sizes[1] == sizes[2], 'no debug means no llvm debug info ' + str(sizes)
assert sizes['g'] == sizes[3] == sizes[4], '-g or -g4 means llvm debug info ' + str(sizes)
assert sizes['_'] < sizes['g'], 'llvm debug info has positive size ' + str(sizes)
test([])
test(['-O1'])
def test_no_filesystem(self):
FS_MARKER = 'var FS'
# fopen forces full filesystem support
self.run_process([EMCC, path_from_root('tests', 'hello_world_fopen.c'), '-s', 'ASSERTIONS=0'])
yes_size = os.path.getsize('a.out.js')
self.assertContained('hello, world!', self.run_js('a.out.js'))
self.assertContained(FS_MARKER, open('a.out.js').read())
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'ASSERTIONS=0'])
no_size = os.path.getsize('a.out.js')
self.assertContained('hello, world!', self.run_js('a.out.js'))
self.assertNotContained(FS_MARKER, open('a.out.js').read())
print('yes fs, no fs:', yes_size, no_size)
# ~100K of FS code is removed
self.assertGreater(yes_size - no_size, 90000)
self.assertLess(no_size, 360000)
def test_no_filesystem_libcxx(self):
self.run_process([EMCC, path_from_root('tests', 'hello_libcxx.cpp'), '-s', 'FILESYSTEM=0'])
self.assertContained('hello, world!', self.run_js('a.out.js'))
@is_slow_test
def test_no_nuthin(self):
# check FILESYSTEM is automatically set, and effective
def test(opts, absolute):
print('opts, absolute:', opts, absolute)
sizes = {}
def do(name, source, moar_opts):
self.clear()
# pad the name to a common length so that doesn't effect the size of the
# output
padded_name = name + '_' * (20 - len(name))
self.run_process([EMCC, path_from_root('tests', source), '-o', padded_name + '.js'] + opts + moar_opts)
sizes[name] = os.path.getsize(padded_name + '.js')
if os.path.exists(padded_name + '.wasm'):
sizes[name] += os.path.getsize(padded_name + '.wasm')
self.assertContained('hello, world!', self.run_js(padded_name + '.js'))
do('normal', 'hello_world_fopen.c', [])
do('no_fs', 'hello_world.c', []) # without fopen, we should auto-detect we do not need full fs support and can do FILESYSTEM=0
do('no_fs_manual', 'hello_world.c', ['-s', 'FILESYSTEM=0'])
print(' ', sizes)
self.assertLess(sizes['no_fs'], sizes['normal'])
self.assertLess(sizes['no_fs'], absolute)
# manual can usually remove a tiny bit more
self.assertLess(sizes['no_fs_manual'], sizes['no_fs'] + 30)
test(['-s', 'ASSERTIONS=0'], 120000) # we don't care about code size with assertions
test(['-O1'], 91000)
test(['-O2'], 46000)
test(['-O3', '--closure', '1'], 17000)
# js too
test(['-O3', '--closure', '1', '-s', 'WASM=0'], 36000)
test(['-O3', '--closure', '2', '-s', 'WASM=0'], 33000) # might change now and then
def test_no_browser(self):
BROWSER_INIT = 'var Browser'
self.run_process([EMCC, path_from_root('tests', 'hello_world.c')])
self.assertNotContained(BROWSER_INIT, open('a.out.js').read())
self.run_process([EMCC, path_from_root('tests', 'browser_main_loop.c')]) # uses emscripten_set_main_loop, which needs Browser
self.assertContained(BROWSER_INIT, open('a.out.js').read())
def test_EXPORTED_RUNTIME_METHODS(self):
def test(opts, has, not_has):
print(opts, has, not_has)
self.clear()
# check without assertions, as with assertions we add stubs for the things we remove (which
# print nice error messages)
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'ASSERTIONS=0'] + opts)
self.assertContained('hello, world!', self.run_js('a.out.js'))
src = open('a.out.js').read()
self.assertContained(has, src)
self.assertNotContained(not_has, src)
test([], 'Module["', 'Module["waka')
test(['-s', 'EXPORTED_RUNTIME_METHODS=[]'], '', 'Module["addRunDependency')
test(['-s', 'EXPORTED_RUNTIME_METHODS=["addRunDependency"]'], 'Module["addRunDependency', 'Module["waka')
test(['-s', 'EXPORTED_RUNTIME_METHODS=[]', '-s', 'EXTRA_EXPORTED_RUNTIME_METHODS=["addRunDependency"]'], 'Module["addRunDependency', 'Module["waka')
def test_stat_fail_alongtheway(self):
create_test_file('src.cpp', r'''
#include <errno.h>
#include <stdio.h>
#include <sys/types.h>
#include <sys/stat.h>
#include <unistd.h>
#include <stdlib.h>
#include <fcntl.h>
#include <string.h>
#define CHECK(expression) \
if(!(expression)) { \
error = errno; \
printf("FAIL: %s\n", #expression); fail = 1; \
} else { \
error = errno; \
printf("pass: %s\n", #expression); \
} \
int main() {
int error;
int fail = 0;
CHECK(mkdir("path", 0777) == 0);
CHECK(close(open("path/file", O_CREAT | O_WRONLY, 0644)) == 0);
{
struct stat st;
CHECK(stat("path", &st) == 0);
CHECK(st.st_mode = 0777);
}
{
struct stat st;
CHECK(stat("path/nosuchfile", &st) == -1);
printf("info: errno=%d %s\n", error, strerror(error));
CHECK(error == ENOENT);
}
{
struct stat st;
CHECK(stat("path/file", &st) == 0);
CHECK(st.st_mode = 0666);
}
{
struct stat st;
CHECK(stat("path/file/impossible", &st) == -1);
printf("info: errno=%d %s\n", error, strerror(error));
CHECK(error == ENOTDIR);
}
{
struct stat st;
CHECK(lstat("path/file/impossible", &st) == -1);
printf("info: errno=%d %s\n", error, strerror(error));
CHECK(error == ENOTDIR);
}
return fail;
}
''')
self.run_process([EMCC, 'src.cpp'])
self.assertContained(r'''pass: mkdir("path", 0777) == 0
pass: close(open("path/file", O_CREAT | O_WRONLY, 0644)) == 0
pass: stat("path", &st) == 0
pass: st.st_mode = 0777
pass: stat("path/nosuchfile", &st) == -1
info: errno=44 No such file or directory
pass: error == ENOENT
pass: stat("path/file", &st) == 0
pass: st.st_mode = 0666
pass: stat("path/file/impossible", &st) == -1
info: errno=54 Not a directory
pass: error == ENOTDIR
pass: lstat("path/file/impossible", &st) == -1
info: errno=54 Not a directory
pass: error == ENOTDIR
''', self.run_js('a.out.js'))
def test_link_with_a_static(self):
create_test_file('x.c', r'''
int init_weakref(int a, int b) {
return a + b;
}
''')
create_test_file('y.c', r'''
static int init_weakref(void) { // inlined in -O2, not in -O0 where it shows up in llvm-nm as 't'
return 150;
}
int testy(void) {
return init_weakref();
}
''')
create_test_file('z.c', r'''
extern int init_weakref(int, int);
extern int testy(void);
int main(void) {
return testy() + init_weakref(5, 6);
}
''')
self.run_process([EMCC, '-c', 'x.c', '-o', 'x.o'])
self.run_process([EMCC, '-c', 'y.c', '-o', 'y.o'])
self.run_process([EMCC, '-c', 'z.c', '-o', 'z.o'])
try_delete('libtest.a')
self.run_process([EMAR, 'rc', 'libtest.a', 'y.o'])
self.run_process([EMAR, 'rc', 'libtest.a', 'x.o'])
self.run_process([EMRANLIB, 'libtest.a'])
for args in [[], ['-O2']]:
print('args:', args)
self.run_process([EMCC, 'z.o', 'libtest.a', '-s', 'EXIT_RUNTIME=1'] + args)
self.run_js('a.out.js', assert_returncode=161)
def test_link_with_bad_o_in_a(self):
# when building a .a, we force-include all the objects inside it. but, some
# may not be valid bitcode, e.g. if it contains metadata or something else
# weird. we should just ignore those
self.run_process([EMCC, '-c', path_from_root('tests', 'hello_world.c'), '-o', 'hello_world.o'])
create_test_file('bad.obj', 'this is not a good file, it should be ignored!')
self.run_process([LLVM_AR, 'cr', 'libfoo.a', 'hello_world.o', 'bad.obj'])
self.run_process([EMCC, 'libfoo.a'])
self.assertContained('hello, world!', self.run_js('a.out.js'))
def test_require(self):
inname = path_from_root('tests', 'hello_world.c')
building.emcc(inname, args=['-s', 'ASSERTIONS=0'], output_filename='a.out.js')
output = self.run_process(config.NODE_JS + ['-e', 'require("./a.out.js")'], stdout=PIPE, stderr=PIPE)
assert output.stdout == 'hello, world!\n' and output.stderr == '', 'expected no output, got\n===\nSTDOUT\n%s\n===\nSTDERR\n%s\n===\n' % (output.stdout, output.stderr)
def test_require_modularize(self):
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'MODULARIZE=1', '-s', 'ASSERTIONS=0'])
src = open('a.out.js').read()
self.assertContained('module.exports = Module;', src)
output = self.run_process(config.NODE_JS + ['-e', 'var m = require("./a.out.js"); m();'], stdout=PIPE, stderr=PIPE)
self.assertFalse(output.stderr)
self.assertEqual(output.stdout, 'hello, world!\n')
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'MODULARIZE=1', '-s', 'EXPORT_NAME="NotModule"', '-s', 'ASSERTIONS=0'])
src = open('a.out.js').read()
self.assertContained('module.exports = NotModule;', src)
output = self.run_process(config.NODE_JS + ['-e', 'var m = require("./a.out.js"); m();'], stdout=PIPE, stderr=PIPE)
self.assertFalse(output.stderr)
self.assertEqual(output.stdout, 'hello, world!\n')
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'MODULARIZE=1'])
# We call require() twice to ensure it returns wrapper function each time
output = self.run_process(config.NODE_JS + ['-e', 'require("./a.out.js")();var m = require("./a.out.js"); m();'], stdout=PIPE, stderr=PIPE)
self.assertFalse(output.stderr)
self.assertEqual(output.stdout, 'hello, world!\nhello, world!\n')
def test_define_modularize(self):
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'MODULARIZE=1', '-s', 'ASSERTIONS=0'])
with open('a.out.js') as f:
src = 'var module = 0; ' + f.read()
create_test_file('a.out.js', src)
assert "define([], function() { return Module; });" in src
output = self.run_process(config.NODE_JS + ['-e', 'var m; (global.define = function(deps, factory) { m = factory(); }).amd = true; require("./a.out.js"); m();'], stdout=PIPE, stderr=PIPE)
assert output.stdout == 'hello, world!\n' and output.stderr == '', 'expected output, got\n===\nSTDOUT\n%s\n===\nSTDERR\n%s\n===\n' % (output.stdout, output.stderr)
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'MODULARIZE=1', '-s', 'EXPORT_NAME="NotModule"', '-s', 'ASSERTIONS=0'])
with open('a.out.js') as f:
src = 'var module = 0; ' + f.read()
create_test_file('a.out.js', src)
assert "define([], function() { return NotModule; });" in src
output = self.run_process(config.NODE_JS + ['-e', 'var m; (global.define = function(deps, factory) { m = factory(); }).amd = true; require("./a.out.js"); m();'], stdout=PIPE, stderr=PIPE)
assert output.stdout == 'hello, world!\n' and output.stderr == '', 'expected output, got\n===\nSTDOUT\n%s\n===\nSTDERR\n%s\n===\n' % (output.stdout, output.stderr)
def test_EXPORT_NAME_with_html(self):
result = self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-o', 'a.html', '-s', 'EXPORT_NAME=Other'], stdout=PIPE, check=False, stderr=STDOUT)
self.assertNotEqual(result.returncode, 0)
self.assertContained('Customizing EXPORT_NAME requires that the HTML be customized to use that name', result.stdout)
def test_modularize_sync_compilation(self):
create_test_file('post.js', r'''
console.log('before');
var result = Module();
// It should be an object.
console.log(typeof result);
// And it should have the exports that Module has, showing it is Module in fact.
console.log(typeof result._main);
// And it should not be a Promise.
console.log(typeof result.then);
console.log('after');
''')
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'),
'-s', 'MODULARIZE=1',
'-s', 'WASM_ASYNC_COMPILATION=0',
'--extern-post-js', 'post.js'])
self.assertContained('''\
before
hello, world!
object
function
undefined
after
''', self.run_js('a.out.js'))
def test_export_all_3142(self):
create_test_file('src.cpp', r'''
typedef unsigned int Bit32u;
struct S_Descriptor {
Bit32u limit_0_15 :16;
Bit32u base_0_15 :16;
Bit32u base_16_23 :8;
};
class Descriptor {
public:
Descriptor() { saved.fill[0] = saved.fill[1] = 0; }
union {
S_Descriptor seg;
Bit32u fill[2];
} saved;
};
Descriptor desc;
''')
try_delete('a.out.js')
self.run_process([EMCC, 'src.cpp', '-O2', '-s', 'EXPORT_ALL'])
self.assertExists('a.out.js')
def test_emmake_emconfigure(self):
def check(what, args, fail=True, expect=''):
args = [what] + args
print(what, args, fail, expect)
output = self.run_process(args, stdout=PIPE, stderr=PIPE, check=False)
assert ('is a helper for' in output.stderr) == fail
assert ('Typical usage' in output.stderr) == fail
self.assertContained(expect, output.stdout)
check(emmake, [])
check(emconfigure, [])
check(emmake, ['--version'])
check(emconfigure, ['--version'])
check(emmake, ['make'], fail=False)
check(emconfigure, ['configure'], fail=False)
check(emconfigure, ['./configure'], fail=False)
check(emcmake, ['cmake'], fail=False)
create_test_file('test.py', '''
import os
print(os.environ.get('CROSS_COMPILE'))
''')
check(emconfigure, [PYTHON, 'test.py'], expect=path_from_root('em'), fail=False)
check(emmake, [PYTHON, 'test.py'], expect=path_from_root('em'), fail=False)
create_test_file('test.py', '''
import os
print(os.environ.get('NM'))
''')
check(emconfigure, [PYTHON, 'test.py'], expect=shared.LLVM_NM, fail=False)
def test_emmake_python(self):
# simulates a configure/make script that looks for things like CC, AR, etc., and which we should
# not confuse by setting those vars to something containing `python X` as the script checks for
# the existence of an executable.
self.run_process([emmake, PYTHON, path_from_root('tests', 'emmake', 'make.py')])
def test_sdl2_config(self):
for args, expected in [
[['--version'], '2.0.10'],
[['--cflags'], '-s USE_SDL=2'],
[['--libs'], '-s USE_SDL=2'],
[['--cflags', '--libs'], '-s USE_SDL=2'],
]:
print(args, expected)
out = self.run_process([PYTHON, path_from_root('system', 'bin', 'sdl2-config')] + args, stdout=PIPE, stderr=PIPE).stdout
assert expected in out, out
print('via emmake')
out = self.run_process([emmake, 'sdl2-config'] + args, stdout=PIPE, stderr=PIPE).stdout
assert expected in out, out
def test_module_onexit(self):
create_test_file('src.cpp', r'''
#include <emscripten.h>
int main() {
EM_ASM({
Module['onExit'] = function(status) { out('exiting now, status ' + status) };
});
return 14;
}
''')
try_delete('a.out.js')
self.run_process([EMCC, 'src.cpp', '-s', 'EXIT_RUNTIME=1'])
self.assertContained('exiting now, status 14', self.run_js('a.out.js', assert_returncode=14))
def test_NO_aliasing(self):
# the NO_ prefix flips boolean options
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'EXIT_RUNTIME=1'])
exit_1 = open('a.out.js').read()
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'NO_EXIT_RUNTIME=0'])
no_exit_0 = open('a.out.js').read()
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'EXIT_RUNTIME=0'])
exit_0 = open('a.out.js').read()
assert exit_1 == no_exit_0
assert exit_1 != exit_0
def test_underscore_exit(self):
create_test_file('src.cpp', r'''
#include <unistd.h>
int main() {
_exit(0); // should not end up in an infinite loop with non-underscore exit
}
''')
self.run_process([EMCC, 'src.cpp'])
self.assertContained('', self.run_js('a.out.js'))
def test_file_packager_huge(self):
MESSAGE = 'warning: file packager is creating an asset bundle of 257 MB. this is very large, and browsers might have trouble loading it'
create_test_file('huge.dat', 'a' * (1024 * 1024 * 257))
create_test_file('tiny.dat', 'a')
err = self.run_process([FILE_PACKAGER, 'test.data', '--preload', 'tiny.dat'], stdout=PIPE, stderr=PIPE).stderr
self.assertNotContained(MESSAGE, err)
err = self.run_process([FILE_PACKAGER, 'test.data', '--preload', 'huge.dat'], stdout=PIPE, stderr=PIPE).stderr
self.assertContained(MESSAGE, err)
self.clear()
@parameterized({
'': (True,),
'wasm2js': (False,),
})
def test_massive_alloc(self, wasm):
create_test_file('main.cpp', r'''
#include <stdio.h>
#include <stdlib.h>
int main() {
volatile int x = (int)malloc(1024 * 1024 * 1400);
return x == 0; // can't alloc it, but don't fail catastrophically, expect null
}
''')
cmd = [EMCC, 'main.cpp', '-s', 'ALLOW_MEMORY_GROWTH']
if not wasm:
cmd += ['-s', 'WASM=0']
self.run_process(cmd)
# just care about message regarding allocating over 1GB of memory
output = self.run_js('a.out.js')
if not wasm:
self.assertContained('Warning: Enlarging memory arrays, this is not fast! 16777216,1473314816\n', output)
def test_failing_alloc(self):
for pre_fail, post_fail, opts in [
('', '', []),
('EM_ASM( Module.temp = _sbrk() );', 'EM_ASM( assert(Module.temp === _sbrk(), "must not adjust brk when an alloc fails!") );', []),
# also test non-wasm in normal mode
('', '', ['-s', 'WASM=0']),
('EM_ASM( Module.temp = _sbrk() );', 'EM_ASM( assert(Module.temp === _sbrk(), "must not adjust brk when an alloc fails!") );', ['-s', 'WASM=0']),
]:
for growth in [0, 1]:
for aborting_args in [[], ['-s', 'ABORTING_MALLOC=0'], ['-s', 'ABORTING_MALLOC=1']]:
create_test_file('main.cpp', r'''
#include <stdio.h>
#include <stdlib.h>
#include <vector>
#include <assert.h>
#include <emscripten.h>
#define CHUNK_SIZE (10 * 1024 * 1024)
int main() {
std::vector<void*> allocs;
bool has = false;
while (1) {
printf("trying an allocation\n");
%s
void* curr = malloc(CHUNK_SIZE);
if (!curr) {
%s
break;
}
has = true;
printf("allocated another chunk, %%zu so far\n", allocs.size());
allocs.push_back(curr);
}
assert(has);
printf("an allocation failed!\n");
#ifdef SPLIT
return 0;
#endif
while (1) {
assert(allocs.size() > 0);
void *curr = allocs.back();
allocs.pop_back();
free(curr);
printf("freed one\n");
if (malloc(CHUNK_SIZE)) break;
}
printf("managed another malloc!\n");
}
''' % (pre_fail, post_fail))
args = [EMCC, 'main.cpp', '-s', 'EXPORTED_FUNCTIONS=[_main,_sbrk]'] + opts + aborting_args
args += ['-s', 'TEST_MEMORY_GROWTH_FAILS=1'] # In this test, force memory growing to fail
if growth:
args += ['-s', 'ALLOW_MEMORY_GROWTH=1']
# growth disables aborting by default, but it can be overridden
aborting = 'ABORTING_MALLOC=1' in aborting_args or (not aborting_args and not growth)
print('test_failing_alloc', args, pre_fail)
self.run_process(args)
# growth also disables aborting
can_manage_another = not aborting
split = '-DSPLIT' in args
print('can manage another:', can_manage_another, 'split:', split, 'aborting:', aborting)
output = self.run_js('a.out.js', assert_returncode=0 if can_manage_another else NON_ZERO)
if can_manage_another:
self.assertContained('an allocation failed!\n', output)
if not split:
# split memory allocation may fail due to GC objects no longer being allocatable,
# and we can't expect to recover from that deterministically. So just check we
# get to the fail.
# otherwise, we should fail eventually, then free, then succeed
self.assertContained('managed another malloc!\n', output)
else:
# we should see an abort
self.assertContained('abort(Cannot enlarge memory arrays', output)
if growth:
# when growth is enabled, the default is to not abort, so just explain that
self.assertContained('If you want malloc to return NULL (0) instead of this abort, do not link with -s ABORTING_MALLOC=1', output)
else:
# when growth is not enabled, suggest 3 possible solutions (start with more memory, allow growth, or don't abort)
self.assertContained(('higher than the current value 16777216,', 'higher than the current value 33554432,'), output)
self.assertContained('compile with -s ALLOW_MEMORY_GROWTH=1 ', output)
self.assertContained('compile with -s ABORTING_MALLOC=0 ', output)
def test_failing_growth_2gb(self):
create_test_file('test.cpp', r'''
#include <stdio.h>
#include <stdlib.h>
void* out;
int main() {
while (1) {
puts("loop...");
out = malloc(1024 * 1024);
if (!out) {
puts("done");
return 0;
}
}
}
''')
self.run_process([EMCC, '-O1', 'test.cpp', '-s', 'ALLOW_MEMORY_GROWTH'])
self.assertContained('done', self.run_js('a.out.js'))
def test_libcxx_minimal(self):
create_test_file('vector.cpp', r'''
#include <vector>
int main(int argc, char** argv) {
std::vector<void*> v;
for (int i = 0 ; i < argc; i++) {
v.push_back(nullptr);
}
return v.size();
}
''')
self.run_process([EMCC, '-O2', 'vector.cpp', '-o', 'vector.js'])
self.run_process([EMCC, '-O2', path_from_root('tests', 'hello_libcxx.cpp'), '-o', 'iostream.js'])
vector = os.path.getsize('vector.js')
iostream = os.path.getsize('iostream.js')
print(vector, iostream)
self.assertGreater(vector, 1000)
# we can strip out almost all of libcxx when just using vector
self.assertLess(2.25 * vector, iostream)
@parameterized({
'': (True,),
# TODO(sbc): make dynamic linking work with wasm2js
# 'wasm2js': (False,)
})
def test_minimal_dynamic(self, wasm):
library_file = 'library.wasm' if wasm else 'library.js'
def test(main_args, library_args=[], expected='hello from main\nhello from library', assert_returncode=0):
print('testing', main_args, library_args)
self.clear()
create_test_file('library.c', r'''
#include <stdio.h>
void library_func() {
#ifdef USE_PRINTF
printf("hello from library: %p\n", &library_func);
#else
puts("hello from library");
#endif
}
''')
# -fno-builtin to prevent printf -> iprintf optimization
self.run_process([EMCC, 'library.c', '-fno-builtin', '-s', 'SIDE_MODULE=1', '-O2', '-o', library_file, '-s', 'WASM=' + str(wasm), '-s', 'EXPORT_ALL'] + library_args)
create_test_file('main.c', r'''
#include <dlfcn.h>
#include <stdio.h>
int main() {
puts("hello from main");
void *lib_handle = dlopen("%s", RTLD_NOW);
if (!lib_handle) {
puts("cannot load side module");
puts(dlerror());
return 1;
}
typedef void (*voidfunc)();
voidfunc x = (voidfunc)dlsym(lib_handle, "library_func");
if (!x) puts("cannot find side function");
else x();
}
''' % library_file)
self.run_process([EMCC, 'main.c', '--embed-file', library_file, '-O2', '-s', 'WASM=' + str(wasm)] + main_args)
self.assertContained(expected, self.run_js('a.out.js', assert_returncode=assert_returncode))
size = os.path.getsize('a.out.js')
if wasm:
size += os.path.getsize('a.out.wasm')
side_size = os.path.getsize(library_file)
print(' sizes:', size, side_size)
return (size, side_size)
def percent_diff(x, y):
small = min(x, y)
large = max(x, y)
return float(100 * large) / small - 100
full = test(main_args=['-s', 'MAIN_MODULE=1'])
# printf is not used in main, but libc was linked in, so it's there
printf = test(main_args=['-s', 'MAIN_MODULE=1'], library_args=['-DUSE_PRINTF'])
# main module tests
# dce in main, and it fails since puts is not exported
dce = test(main_args=['-s', 'MAIN_MODULE=2'], expected=('cannot', 'undefined'), assert_returncode=NON_ZERO)
# with exporting, it works
dce = test(main_args=['-s', 'MAIN_MODULE=2', '-s', 'EXPORTED_FUNCTIONS=["_main", "_puts"]'])
# printf is not used in main, and we dce, so we failz
dce_fail = test(main_args=['-s', 'MAIN_MODULE=2'], library_args=['-DUSE_PRINTF'], expected=('cannot', 'undefined'), assert_returncode=NON_ZERO)
# exporting printf in main keeps it alive for the library
dce_save = test(main_args=['-s', 'MAIN_MODULE=2', '-s', 'EXPORTED_FUNCTIONS=["_main", "_printf", "_puts"]'], library_args=['-DUSE_PRINTF'])
self.assertLess(percent_diff(full[0], printf[0]), 4)
self.assertLess(percent_diff(dce[0], dce_fail[0]), 4)
self.assertLess(dce[0], 0.2 * full[0]) # big effect, 80%+ is gone
self.assertGreater(dce_save[0], 1.05 * dce[0]) # save exported all of printf
# side module tests
# mode 2, so dce in side, but library_func is not exported, so it is dce'd
side_dce_fail = test(main_args=['-s', 'MAIN_MODULE=1'], library_args=['-s', 'SIDE_MODULE=2'], expected='cannot find side function')
# mode 2, so dce in side, and library_func is not exported
side_dce_work = test(main_args=['-s', 'MAIN_MODULE=1'], library_args=['-s', 'SIDE_MODULE=2', '-s', 'EXPORTED_FUNCTIONS=["_library_func"]'], expected='hello from library')
self.assertLess(side_dce_fail[1], 0.95 * side_dce_work[1]) # removing that function saves a chunk
def test_ld_library_path(self):
create_test_file('hello1.c', r'''
#include <stdio.h>
void hello1() {
printf("Hello1\n");
return;
}
''')
create_test_file('hello2.c', r'''
#include <stdio.h>
void hello2() {
printf("Hello2\n");
return;
}
''')
create_test_file('hello3.c', r'''
#include <stdio.h>
void hello3() {
printf ("Hello3\n");
return;
}
''')
create_test_file('hello4.c', r'''
#include <stdio.h>
#include <math.h>
double hello4(double x) {
printf("Hello4\n");
return fmod(x, 2.0);
}
''')
create_test_file('pre.js', r'''
Module['preRun'].push(function (){
ENV['LD_LIBRARY_PATH']='/lib:/usr/lib';
});
''')
create_test_file('main.c', r'''
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <dlfcn.h>
int main() {
void *h;
void (*f)();
double (*f2)(double);
h = dlopen("libhello1.wasm", RTLD_NOW);
f = dlsym(h, "hello1");
f();
dlclose(h);
h = dlopen("libhello2.wasm", RTLD_NOW);
f = dlsym(h, "hello2");
f();
dlclose(h);
h = dlopen("libhello3.wasm", RTLD_NOW);
f = dlsym(h, "hello3");
f();
dlclose(h);
h = dlopen("/usr/local/lib/libhello4.wasm", RTLD_NOW);
f2 = dlsym(h, "hello4");
double result = f2(5.5);
dlclose(h);
if (result == 1.5) {
printf("Ok\n");
}
return 0;
}
''')
self.run_process([EMCC, '-o', 'libhello1.wasm', 'hello1.c', '-s', 'SIDE_MODULE=1', '-s', 'EXPORT_ALL=1'])
self.run_process([EMCC, '-o', 'libhello2.wasm', 'hello2.c', '-s', 'SIDE_MODULE=1', '-s', 'EXPORT_ALL=1'])
self.run_process([EMCC, '-o', 'libhello3.wasm', 'hello3.c', '-s', 'SIDE_MODULE=1', '-s', 'EXPORT_ALL=1'])
self.run_process([EMCC, '-o', 'libhello4.wasm', 'hello4.c', '-s', 'SIDE_MODULE=1', '-s', 'EXPORT_ALL=1'])
self.run_process([EMCC, '-o', 'main.js', 'main.c', '-s', 'MAIN_MODULE=1', '-s', 'INITIAL_MEMORY=' + str(32 * 1024 * 1024),
'--embed-file', 'libhello1.wasm@/lib/libhello1.wasm',
'--embed-file', 'libhello2.wasm@/usr/lib/libhello2.wasm',
'--embed-file', 'libhello3.wasm@/libhello3.wasm',
'--embed-file', 'libhello4.wasm@/usr/local/lib/libhello4.wasm',
'--pre-js', 'pre.js'])
out = self.run_js('main.js')
self.assertContained('Hello1', out)
self.assertContained('Hello2', out)
self.assertContained('Hello3', out)
self.assertContained('Hello4', out)
self.assertContained('Ok', out)
def test_dlopen_bad_flags(self):
create_test_file('main.c', r'''
#include <dlfcn.h>
#include <stdio.h>
int main() {
void* h = dlopen("lib.so", 0);
if (h) {
printf("expected dlopen to fail\n");
return 1;
}
printf("%s\n", dlerror());
return 0;
}
''')
self.run_process([EMCC, 'main.c', '-s', 'MAIN_MODULE=2'])
out = self.run_js('a.out.js')
self.assertContained('invalid mode for dlopen(): Either RTLD_LAZY or RTLD_NOW is required', out)
def test_dlopen_rtld_global(self):
# This test checks RTLD_GLOBAL where a module is loaded
# before the module providing a global it needs is. in asm.js we use JS
# to create a redirection function. In wasm we just have wasm, so we
# need to introspect the wasm module. Browsers may add that eventually,
# or we could ship a little library that does it.
create_test_file('hello1.c', r'''
#include <stdio.h>
extern int hello1_val;
int hello1_val = 3;
void hello1(int i) {
printf("hello1_val by hello1:%d\n",hello1_val);
printf("Hello%d\n",i);
}
''')
create_test_file('hello2.c', r'''
#include <stdio.h>
extern int hello1_val;
extern void hello1(int);
void hello2(int i) {
void (*f) (int);
printf("hello1_val by hello2:%d\n",hello1_val);
f = hello1;
f(i);
}
''')
create_test_file('main.c', r'''
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <dlfcn.h>
int main(int argc,char** argv) {
void *h;
void *h2;
void (*f) (int);
h = dlopen("libhello1.wasm", RTLD_NOW|RTLD_GLOBAL);
h2 = dlopen("libhello2.wasm", RTLD_NOW|RTLD_GLOBAL);
f = dlsym(h, "hello1");
f(1);
f = dlsym(h2, "hello2");
f(2);
dlclose(h);
dlclose(h2);
return 0;
}
''')
self.run_process([EMCC, '-o', 'libhello1.wasm', 'hello1.c', '-s', 'SIDE_MODULE=1', '-s', 'EXPORT_ALL=1'])
self.run_process([EMCC, '-o', 'libhello2.wasm', 'hello2.c', '-s', 'SIDE_MODULE=1', '-s', 'EXPORT_ALL=1'])
self.run_process([EMCC, '-o', 'main.js', 'main.c', '-s', 'MAIN_MODULE=1',
'--embed-file', 'libhello1.wasm',
'--embed-file', 'libhello2.wasm'])
out = self.run_js('main.js')
self.assertContained('Hello1', out)
self.assertContained('Hello2', out)
self.assertContained('hello1_val by hello1:3', out)
self.assertContained('hello1_val by hello2:3', out)
def test_dlsym_rtld_default(self):
create_test_file('main.c', r'''
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <dlfcn.h>
#include <emscripten.h>
EMSCRIPTEN_KEEPALIVE int foo() {
return 42;
}
EMSCRIPTEN_KEEPALIVE int64_t foo64() {
return 64;
}
int main(int argc, char** argv) {
int (*f)();
f = dlsym(RTLD_DEFAULT, "foo");
if (!f) {
printf("dlsym failed: %s\n", dlerror());
return 1;
}
printf("foo -> %d\n", f());
int64_t (*f64)();
f64 = dlsym(RTLD_DEFAULT, "foo64");
if (!f64) {
printf("dlsym failed: %s\n", dlerror());
return 1;
}
printf("foo64 -> %lld\n", f64());
f = dlsym(RTLD_DEFAULT, "bar");
printf("bar -> %p\n", f);
return 0;
}
''')
self.run_process([EMCC, 'main.c', '-s', 'MAIN_MODULE=2'])
out = self.run_js('a.out.js')
self.assertContained('foo -> 42', out)
self.assertContained('foo64 -> 64', out)
self.assertContained('bar -> 0', out)
def test_dlsym_rtld_default_js_symbol(self):
create_test_file('lib.js', '''
mergeInto(LibraryManager.library, {
foo__sig: 'ii',
foo: function(f) { return f + 10; },
bar: function(f) { return f + 10; },
});
''')
create_test_file('main.c', r'''
#include <stdio.h>
#include <utime.h>
#include <sys/types.h>
#include <dlfcn.h>
typedef int (*func_type_t)(int arg);
int main(int argc, char** argv) {
func_type_t fp = (func_type_t)dlsym(RTLD_DEFAULT, argv[1]);
if (!fp) {
printf("dlsym failed: %s\n", dlerror());
return 1;
}
printf("%s -> %d\n", argv[1], fp(10));
return 0;
}
''')
self.run_process([EMCC, 'main.c',
'--js-library=lib.js',
'-sMAIN_MODULE=2',
'-sDEFAULT_LIBRARY_FUNCS_TO_INCLUDE=[foo,bar]',
'-sEXPORTED_FUNCTIONS=[_main,_foo,_bar]'])
# Fist test the successful use of a JS function with dlsym
out = self.run_js('a.out.js', args=['foo'])
self.assertContained('foo -> 20', out)
# Now test the failure case for when __sig is not present
out = self.run_js('a.out.js', args=['bar'], assert_returncode=NON_ZERO)
self.assertContained('Missing signature argument to addFunction: function _bar', out)
def test_main_module_without_exceptions_message(self):
# A side module that needs exceptions needs a main module with that
# support enabled; show a clear message in that case.
create_test_file('side.cpp', r'''
#include <exception>
#include <stdio.h>
extern "C" void test_throw() {
try {
throw 42;
} catch(int x) {
printf("catch %d.\n", x);
return;
}
puts("bad location");
}
''')
create_test_file('main.cpp', r'''
#include <assert.h>
#include <stdio.h>
#include <stdlib.h>
#include <string.h>
#include <dlfcn.h>
typedef void (*voidf)();
int main() {
void* h = dlopen("libside.wasm", RTLD_NOW);
assert(h);
voidf f = (voidf)dlsym(h, "test_throw");
assert(f);
f();
return 0;
}
''')
self.run_process([EMCC, '-o', 'libside.wasm', 'side.cpp', '-s', 'SIDE_MODULE=1', '-fexceptions'])
def build_main(args):
print(args)
with env_modify({'EMCC_FORCE_STDLIBS': 'libc++abi'}):
self.run_process([EMCC, 'main.cpp', '-s', 'MAIN_MODULE=1',
'--embed-file', 'libside.wasm'] + args)
build_main([])
out = self.run_js('a.out.js', assert_returncode=NON_ZERO)
self.assertContained('Exception catching is disabled, this exception cannot be caught.', out)
self.assertContained('note: in dynamic linking, if a side module wants exceptions, the main module must be built with that support', out)
build_main(['-fexceptions'])
out = self.run_js('a.out.js')
self.assertContained('catch 42', out)
def test_debug_asmLastOpts(self):
create_test_file('src.c', r'''
#include <stdio.h>
struct Dtlink_t { struct Dtlink_t* right; /* right child */
union
{ unsigned int _hash; /* hash value */
struct Dtlink_t* _left; /* left child */
} hl;
};
int treecount(register struct Dtlink_t* e) {
return e ? treecount(e->hl._left) + treecount(e->right) + 1 : 0;
}
int main() {
printf("hello, world!\n");
}
''')
self.run_process([EMCC, 'src.c', '-s', 'EXPORTED_FUNCTIONS=["_main", "_treecount"]', '--minify', '0', '-g4', '-Oz'])
self.assertContained('hello, world!', self.run_js('a.out.js'))
def test_emscripten_print_double(self):
create_test_file('src.c', r'''
#include <stdio.h>
#include <assert.h>
#include <emscripten.h>
void test(double d) {
char buffer[100], buffer2[100];
unsigned len, len2, len3;
len = emscripten_print_double(d, NULL, -1);
len2 = emscripten_print_double(d, buffer, len+1);
assert(len == len2);
buffer[len] = 0;
len3 = snprintf(buffer2, 100, "%g", d);
printf("|%g : %u : %s : %s : %d|\n", d, len, buffer, buffer2, len3);
}
int main() {
printf("\n");
test(0);
test(1);
test(-1);
test(1.234);
test(-1.234);
test(1.1234E20);
test(-1.1234E20);
test(1.1234E-20);
test(-1.1234E-20);
test(1.0/0.0);
test(-1.0/0.0);
}
''')
self.run_process([EMCC, 'src.c'])
out = self.run_js('a.out.js')
self.assertContained('''
|0 : 1 : 0 : 0 : 1|
|1 : 1 : 1 : 1 : 1|
|-1 : 2 : -1 : -1 : 2|
|1.234 : 5 : 1.234 : 1.234 : 5|
|-1.234 : 6 : -1.234 : -1.234 : 6|
|1.1234e+20 : 21 : 112340000000000000000 : 1.1234e+20 : 10|
|-1.1234e+20 : 22 : -112340000000000000000 : -1.1234e+20 : 11|
|1.1234e-20 : 10 : 1.1234e-20 : 1.1234e-20 : 10|
|-1.1234e-20 : 11 : -1.1234e-20 : -1.1234e-20 : 11|
|inf : 8 : Infinity : inf : 3|
|-inf : 9 : -Infinity : -inf : 4|
''', out)
def test_emscripten_scan_stack(self):
create_test_file('src.cpp', r'''
#include <set>
#include <emscripten.h>
#include <stdio.h>
#include <assert.h>
std::set<int> seenInts;
void scan(void* x, void* y) {
printf("scan\n");
int* p = (int*)x;
int* q = (int*)y;
// The callback sends us the [low, high) range.
assert(p < q);
// The range is of a reasonable size - not all of memory.
assert(q - p < 100);
while (p < q) {
seenInts.insert(*p);
p++;
}
}
int main() {
int x;
int* y = &x;
*y = 12345678;
emscripten_scan_stack(scan);
assert(seenInts.count(12345678));
puts("ok");
}
''')
self.run_process([EMCC, 'src.cpp'])
self.assertContained('ok', self.run_js('a.out.js'))
def test_no_warn_exported_jslibfunc(self):
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'),
'-s', 'DEFAULT_LIBRARY_FUNCS_TO_INCLUDE=["alGetError"]',
'-s', 'EXPORTED_FUNCTIONS=["_main", "_alGetError"]'])
# Same again but with `_alGet` wich does not exist. This is a regression
# test for a bug we had where any prefix of a valid function was accepted.
err = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'),
'-s', 'DEFAULT_LIBRARY_FUNCS_TO_INCLUDE=["alGetError"]',
'-s', 'EXPORTED_FUNCTIONS=["_main", "_alGet"]'])
self.assertContained('undefined exported symbol: "_alGet"', err)
def test_musl_syscalls(self):
self.run_process([EMCC, path_from_root('tests', 'hello_world.c')])
src = open('a.out.js').read()
# there should be no musl syscalls in hello world output
self.assertNotContained('__syscall', src)
def test_emcc_dev_null(self):
out = self.run_process([EMCC, '-dM', '-E', '-x', 'c', os.devnull], stdout=PIPE).stdout
self.assertContained('#define __EMSCRIPTEN__ 1', out) # all our defines should show up
def test_umask_0(self):
create_test_file('src.c', r'''\
#include <sys/stat.h>
#include <stdio.h>
int main() {
umask(0);
printf("hello, world!\n");
}
''')
self.run_process([EMCC, 'src.c'])
self.assertContained('hello, world!', self.run_js('a.out.js'))
def test_no_missing_symbols(self): # simple hello world should not show any missing symbols
self.run_process([EMCC, path_from_root('tests', 'hello_world.c')])
# main() is implemented in C, and even if requested from JS, we should not warn
create_test_file('library_foo.js', '''
mergeInto(LibraryManager.library, {
my_js__deps: ['main'],
my_js: (function() {
return function() {
console.log("hello " + _nonexistingvariable);
};
}()),
});
''')
create_test_file('test.cpp', '''\
#include <stdio.h>
#include <stdlib.h>
extern "C" {
extern void my_js();
}
int main() {
my_js();
return EXIT_SUCCESS;
}
''')
self.run_process([EMCC, 'test.cpp', '--js-library', 'library_foo.js'])
# but we do error on a missing js var
create_test_file('library_foo_missing.js', '''
mergeInto(LibraryManager.library, {
my_js__deps: ['main', 'nonexistingvariable'],
my_js: (function() {
return function() {
console.log("hello " + _nonexistingvariable);
};
}()),
});
''')
err = self.expect_fail([EMCC, 'test.cpp', '--js-library', 'library_foo_missing.js'])
self.assertContained('undefined symbol: nonexistingvariable', err)
# and also for missing C code, of course (without the --js-library, it's just a missing C method)
err = self.expect_fail([EMCC, 'test.cpp'])
self.assertContained('undefined symbol: my_js', err)
def test_js_lib_to_system_lib(self):
# memset is in compiled code, so a js library __deps can't access it. it
# would need to be in deps_info.json or EXPORTED_FUNCTIONS
create_test_file('lib.js', r'''
mergeInto(LibraryManager.library, {
depper__deps: ['memset'],
depper: function(ptr) {
_memset(ptr, 'd'.charCodeAt(0), 10);
},
});
''')
create_test_file('test.cpp', r'''
#include <string.h>
#include <stdio.h>
extern "C" {
extern void depper(char*);
}
int main(int argc, char** argv) {
char buffer[11];
buffer[10] = '\0';
// call by a pointer, to force linking of memset, no llvm intrinsic here
volatile auto ptr = memset;
(*ptr)(buffer, 'a', 10);
depper(buffer);
puts(buffer);
}
''')
err = self.expect_fail([EMCC, 'test.cpp', '--js-library', 'lib.js'])
self.assertContained('_memset may need to be added to EXPORTED_FUNCTIONS if it arrives from a system library', err)
# without the dep, and with EXPORTED_FUNCTIONS, it works ok
create_test_file('lib.js', r'''
mergeInto(LibraryManager.library, {
depper: function(ptr) {
_memset(ptr, 'd'.charCodeAt(0), 10);
},
});
''')
self.run_process([EMCC, 'test.cpp', '--js-library', 'lib.js', '-s', 'EXPORTED_FUNCTIONS=[_main,_memset]'])
self.assertContained('dddddddddd', self.run_js('a.out.js'))
def test_realpath(self):
create_test_file('src.c', r'''
#include <stdlib.h>
#include <stdio.h>
#include <errno.h>
int main(int argc, char **argv) {
char *t_realpath_buf = realpath("/boot/README.txt", NULL);
if (!t_realpath_buf) {
perror("Resolve failed");
return 1;
}
printf("Resolved: %s\n", t_realpath_buf);
free(t_realpath_buf);
return 0;
}
''')
ensure_dir('boot')
create_test_file(os.path.join('boot', 'README.txt'), ' ')
self.run_process([EMCC, 'src.c', '-s', 'SAFE_HEAP', '--embed-file', 'boot'])
self.assertContained('Resolved: /boot/README.txt', self.run_js('a.out.js'))
def test_realpath_nodefs(self):
create_test_file('src.c', r'''
#include <stdlib.h>
#include <stdio.h>
#include <errno.h>
#include <emscripten.h>
#define TEST_PATH "/working/TEST_NODEFS.txt"
int main(int argc, char **argv) {
errno = 0;
EM_ASM({
FS.mkdir('/working');
FS.mount(NODEFS, { root: '.' }, '/working');
});
char *t_realpath_buf = realpath(TEST_PATH, NULL);
if (NULL == t_realpath_buf) {
perror("Resolve failed");
return 1;
} else {
printf("Resolved: %s\n", t_realpath_buf);
free(t_realpath_buf);
return 0;
}
}
''')
create_test_file('TEST_NODEFS.txt', ' ')
self.run_process([EMCC, 'src.c', '-lnodefs.js'])
self.assertContained('Resolved: /working/TEST_NODEFS.txt', self.run_js('a.out.js'))
def test_realpath_2(self):
ensure_dir('Folder')
create_test_file('src.c', r'''
#include <stdlib.h>
#include <stdio.h>
#include <errno.h>
int testrealpath(const char* path) {
errno = 0;
char *t_realpath_buf = realpath(path, NULL);
if (NULL == t_realpath_buf) {
printf("Resolve failed: \"%s\"\n",path);fflush(stdout);
return 1;
} else {
printf("Resolved: \"%s\" => \"%s\"\n", path, t_realpath_buf);fflush(stdout);
free(t_realpath_buf);
return 0;
}
}
int main(int argc, char **argv)
{
// files:
testrealpath("testfile.txt");
testrealpath("Folder/testfile.txt");
testrealpath("testnonexistentfile.txt");
// folders
testrealpath("Folder");
testrealpath("/Folder");
testrealpath("./");
testrealpath("");
testrealpath("/");
return 0;
}
''')
create_test_file('testfile.txt', '')
create_test_file(os.path.join('Folder', 'testfile.txt'), '')
self.run_process([EMCC, 'src.c', '--embed-file', 'testfile.txt', '--embed-file', 'Folder'])
self.assertContained('''Resolved: "testfile.txt" => "/testfile.txt"
Resolved: "Folder/testfile.txt" => "/Folder/testfile.txt"
Resolve failed: "testnonexistentfile.txt"
Resolved: "Folder" => "/Folder"
Resolved: "/Folder" => "/Folder"
Resolved: "./" => "/"
Resolve failed: ""
Resolved: "/" => "/"
''', self.run_js('a.out.js'))
def test_no_warnings(self):
# build once before to make sure system libs etc. exist
self.run_process([EMCC, path_from_root('tests', 'hello_libcxx.cpp')])
# check that there is nothing in stderr for a regular compile
err = self.run_process([EMCC, path_from_root('tests', 'hello_libcxx.cpp')], stderr=PIPE).stderr
self.assertEqual(err, '')
def test_dlmalloc_modes(self):
create_test_file('src.cpp', r'''
#include <stdlib.h>
#include <stdio.h>
int main() {
void* c = malloc(1024);
free(c);
free(c);
printf("double-freed\n");
}
''')
self.run_process([EMCC, 'src.cpp'])
self.assertContained('double-freed', self.run_js('a.out.js'))
# in debug mode, the double-free is caught
self.run_process([EMCC, 'src.cpp', '-s', 'ASSERTIONS=2'])
seen_error = False
out = '?'
try:
out = self.run_js('a.out.js')
except Exception:
seen_error = True
self.assertTrue(seen_error, out)
def test_mallocs(self):
def run(opts):
print(opts)
sizes = {}
for malloc, name in (
('dlmalloc', 'dlmalloc'),
(None, 'default'),
('emmalloc', 'emmalloc')
):
print(malloc, name)
cmd = [EMCC, path_from_root('tests', 'hello_libcxx.cpp'), '-o', 'a.out.js'] + opts
if malloc:
cmd += ['-s', 'MALLOC="%s"' % malloc]
print(cmd)
self.run_process(cmd)
sizes[name] = os.path.getsize('a.out.wasm')
print(sizes)
# dlmalloc is the default
self.assertEqual(sizes['dlmalloc'], sizes['default'])
# emmalloc is much smaller
self.assertLess(sizes['emmalloc'], sizes['dlmalloc'] - 5000)
run([])
run(['-O2'])
def test_emmalloc_2GB(self):
def test(args, text=None):
if text:
stderr = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'MALLOC=emmalloc'] + args)
self.assertContained(text, stderr)
else:
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'MALLOC=emmalloc'] + args)
test(['-s', 'INITIAL_MEMORY=2GB'], 'INITIAL_MEMORY must be less than 2GB due to current spec limitations')
# emmalloc allows growth by default (as the max size is fine), but not if
# a too-high max is set
test(['-s', 'ALLOW_MEMORY_GROWTH'])
test(['-s', 'ALLOW_MEMORY_GROWTH', '-s', 'MAXIMUM_MEMORY=1GB'])
test(['-s', 'ALLOW_MEMORY_GROWTH', '-s', 'MAXIMUM_MEMORY=3GB'], 'emmalloc only works on <2GB of memory. Use the default allocator, or decrease MAXIMUM_MEMORY')
def test_2GB_plus(self):
# when the heap size can be over 2GB, we rewrite pointers to be unsigned
def test(page_diff):
args = [EMCC, path_from_root('tests', 'hello_world.c'), '-O2', '-s', 'ALLOW_MEMORY_GROWTH']
if page_diff is not None:
args += ['-s', 'MAXIMUM_MEMORY=%d' % (2**31 + page_diff * 64 * 1024)]
print(args)
self.run_process(args)
return os.path.getsize('a.out.js')
less = test(-1)
equal = test(0)
more = test(1)
none = test(None)
# exactly 2GB still doesn't require unsigned pointers, as we can't address
# the 2GB location in memory
self.assertEqual(less, equal)
self.assertLess(equal, more)
# not specifying maximum memory does not result in unsigned pointers, as the
# default maximum memory is 2GB.
self.assertEqual(less, none)
@parameterized({
'normal': (['-s', 'WASM_BIGINT=0'], 'testbind.js'),
'bigint': (['-s', 'WASM_BIGINT=1'], 'testbind_bigint.js'),
})
def test_sixtyfour_bit_return_value(self, args, bind_js):
# This test checks that the most significant 32 bits of a 64 bit long are correctly made available
# to native JavaScript applications that wish to interact with compiled code returning 64 bit longs.
# The MS 32 bits should be available in Runtime.getTempRet0() even when compiled with -O2 --closure 1
# Compile test.c and wrap it in a native JavaScript binding so we can call our compiled function from JS.
self.run_process([EMCC, path_from_root('tests', 'return64bit', 'test.c'),
'--pre-js', path_from_root('tests', 'return64bit', 'testbindstart.js'),
'--pre-js', path_from_root('tests', 'return64bit', bind_js),
'--post-js', path_from_root('tests', 'return64bit', 'testbindend.js'),
'-s', 'DEFAULT_LIBRARY_FUNCS_TO_INCLUDE=[$dynCall]',
'-s', 'EXPORTED_FUNCTIONS=[_test_return64]', '-o', 'test.js', '-O2',
'--closure', '1', '-g1', '-s', 'WASM_ASYNC_COMPILATION=0'] + args)
# Simple test program to load the test.js binding library and call the binding to the
# C function returning the 64 bit long.
create_test_file('testrun.js', '''
var test = require("./test.js");
test.runtest();
''')
# Run the test and confirm the output is as expected.
out = self.run_js('testrun.js', engine=config.NODE_JS + ['--experimental-wasm-bigint'])
self.assertContained('''\
input = 0xaabbccdd11223344
low = 5678
high = 1234
input = 0xabcdef1912345678
low = 5678
high = 1234
''', out)
def test_lib_include_flags(self):
self.run_process([EMCC] + '-l m -l c -I'.split() + [path_from_root('tests', 'include_test'), path_from_root('tests', 'lib_include_flags.c')])
def test_dash_s_link_flag(self):
# -s is also a valid link flag. We try to distingish between this case and when
# its used to set a settings based on looking at the argument that follows.
# Test the case when -s is the last flag
self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-s'])
self.assertContained('hello, world!', self.run_js('a.out.js'))
# Test the case when the following flag is all uppercase but starts with a `-`
self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-s', '-DFOO'])
self.assertContained('hello, world!', self.run_js('a.out.js'))
# Test that case when the following flag is not all uppercase
self.run_process([EMCC, '-s', path_from_root('tests', 'hello_world.cpp')])
self.assertContained('hello, world!', self.run_js('a.out.js'))
def test_dash_s_response_file_string(self):
create_test_file('response_file', '"MyModule"\n')
self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-s', 'EXPORT_NAME=@response_file'])
def test_dash_s_response_file_list(self):
create_test_file('response_file', '["_main", "_malloc"]\n')
self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-s', 'EXPORTED_FUNCTIONS=@response_file'])
def test_dash_s_response_file_misssing(self):
err = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.cpp'), '-s', 'EXPORTED_FUNCTIONS=@foo'])
self.assertContained('error: foo: file not found parsing argument: EXPORTED_FUNCTIONS=@foo', err)
def test_dash_s_unclosed_quote(self):
# Unclosed quote
err = self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), "-s", "TEST_KEY='MISSING_QUOTE"], stderr=PIPE, check=False).stderr
self.assertNotContained('AssertionError', err) # Do not mention that it is an assertion error
self.assertContained('unclosed opened quoted string. expected final character to be "\'"', err)
def test_dash_s_single_quote(self):
# Only one quote
err = self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), "-s", "TEST_KEY='"], stderr=PIPE, check=False).stderr
self.assertNotContained('AssertionError', err) # Do not mention that it is an assertion error
self.assertContained('unclosed opened quoted string.', err)
def test_dash_s_unclosed_list(self):
# Unclosed list
err = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.cpp'), "-s", "TEST_KEY=[Value1, Value2"])
self.assertNotContained('AssertionError', err) # Do not mention that it is an assertion error
self.assertContained('unclosed opened string list. expected final character to be "]"', err)
def test_dash_s_valid_list(self):
err = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.cpp'), "-s", "TEST_KEY=[Value1, \"Value2\"]"])
self.assertNotContained('a problem occurred in evaluating the content after a "-s", specifically', err)
def test_dash_s_wrong_type(self):
err = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.cpp'), '-s', 'EXPORTED_FUNCTIONS=foo'])
self.assertContained("error: setting `EXPORTED_FUNCTIONS` expects `<class 'list'>` but got `<class 'str'>`", err)
err = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.cpp'), '-s', 'EXIT_RUNTIME=[foo,bar]'])
self.assertContained("error: setting `EXIT_RUNTIME` expects `<class 'int'>` but got `<class 'list'>`", err)
def test_dash_s_typo(self):
# with suggestions
stderr = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'DISABLE_EXCEPTION_CATCH=1'])
self.assertContained("Attempt to set a non-existent setting: 'DISABLE_EXCEPTION_CATCH'", stderr)
self.assertContained('did you mean one of DISABLE_EXCEPTION_CATCHING', stderr)
# no suggestions
stderr = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'CHEEZ=1'])
self.assertContained("perhaps a typo in emcc\'s -s X=Y notation?", stderr)
self.assertContained('(see src/settings.js for valid values)', stderr)
# suggestions do not include renamed legacy settings
stderr = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'ZBINARYEN_ASYNC_COMPILATION'])
self.assertContained("Attempt to set a non-existent setting: 'ZBINARYEN_ASYNC_COMPILATION'", stderr)
self.assertNotContained(' BINARYEN_ASYNC_COMPILATION', stderr)
def test_dash_s_no_space(self):
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-sEXPORT_ALL=1'])
err = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.cpp'), '-sEXPORTED_FUNCTIONS=["foo"]'])
self.assertContained('error: undefined exported symbol: "foo"', err)
def test_zeroinit(self):
create_test_file('src.c', r'''
#include <stdio.h>
int buf[1048576];
int main() {
printf("hello, world! %d\n", buf[123456]);
return 0;
}
''')
self.run_process([EMCC, 'src.c', '-O2', '-g'])
size = os.path.getsize('a.out.wasm')
# size should be much smaller than the size of that zero-initialized buffer
self.assertLess(size, 123456 / 2)
def test_canonicalize_nan_warning(self):
create_test_file('src.cpp', r'''
#include <stdio.h>
union U {
int x;
float y;
} a;
int main() {
a.x = 0x7FC01234;
printf("%f\n", a.y);
printf("0x%x\n", a.x);
return 0;
}
''')
self.run_process([EMCC, 'src.cpp', '-O1'])
out = self.run_js('a.out.js')
self.assertContained('nan\n', out)
self.assertContained('0x7fc01234\n', out)
def test_memory_growth_noasm(self):
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-O2', '-s', 'ALLOW_MEMORY_GROWTH=1'])
src = open('a.out.js').read()
assert 'use asm' not in src
def test_EM_ASM_i64(self):
create_test_file('src.cpp', '''
#include <stdint.h>
#include <emscripten.h>
int main() {
EM_ASM({
out('inputs: ' + $0 + ', ' + $1 + '.');
}, int64_t(0x12345678ABCDEF1FLL));
}
''')
self.expect_fail([EMCC, 'src.cpp', '-Oz'])
def test_eval_ctors_non_terminating(self):
for wasm in (1, 0):
print('wasm', wasm)
src = r'''
struct C {
C() {
volatile int y = 0;
while (y == 0) {}
}
};
C always;
int main() {}
'''
create_test_file('src.cpp', src)
self.run_process([EMCC, 'src.cpp', '-O2', '-s', 'EVAL_CTORS=1', '-profiling-funcs', '-s', 'WASM=%d' % wasm])
@disabled('EVAL_CTORS is currently disabled')
def test_eval_ctors(self):
for wasm in (1, 0):
print('wasm', wasm)
print('check no ctors is ok')
# on by default in -Oz, but user-overridable
def get_size(args):
print('get_size', args)
self.run_process([EMCC, path_from_root('tests', 'hello_libcxx.cpp'), '-s', 'WASM=%d' % wasm] + args)
self.assertContained('hello, world!', self.run_js('a.out.js'))
if wasm:
codesize = self.count_wasm_contents('a.out.wasm', 'funcs')
memsize = self.count_wasm_contents('a.out.wasm', 'memory-data')
else:
codesize = os.path.getsize('a.out.js')
memsize = os.path.getsize('a.out.js.mem')
return (codesize, memsize)
def check_size(left, right):
# can't measure just the mem out of the wasm, so ignore [1] for wasm
if left[0] == right[0] and left[1] == right[1]:
return 0
if left[0] < right[0] and left[1] > right[1]:
return -1 # smaller code, bigger mem
if left[0] > right[0] and left[1] < right[1]:
return 1
assert False, [left, right]
o2_size = get_size(['-O2'])
assert check_size(get_size(['-O2']), o2_size) == 0, 'deterministic'
assert check_size(get_size(['-O2', '-s', 'EVAL_CTORS=1']), o2_size) < 0, 'eval_ctors works if user asks for it'
oz_size = get_size(['-Oz'])
assert check_size(get_size(['-Oz']), oz_size) == 0, 'deterministic'
assert check_size(get_size(['-Oz', '-s', 'EVAL_CTORS=1']), oz_size) == 0, 'eval_ctors is on by default in oz'
assert check_size(get_size(['-Oz', '-s', 'EVAL_CTORS=0']), oz_size) == 1, 'eval_ctors can be turned off'
linkable_size = get_size(['-Oz', '-s', 'EVAL_CTORS=1', '-s', 'LINKABLE=1'])
assert check_size(get_size(['-Oz', '-s', 'EVAL_CTORS=0', '-s', 'LINKABLE=1']), linkable_size) == 1, 'noticeable difference in linkable too'
def test_eval_ctor_ordering(self):
# ensure order of execution remains correct, even with a bad ctor
def test(p1, p2, p3, last, expected):
src = r'''
#include <stdio.h>
#include <stdlib.h>
volatile int total = 0;
struct C {
C(int x) {
volatile int y = x;
y++;
y--;
if (y == 0xf) {
printf("you can't eval me ahead of time\n"); // bad ctor
}
total <<= 4;
total += int(y);
}
};
C __attribute__((init_priority(%d))) c1(0x5);
C __attribute__((init_priority(%d))) c2(0x8);
C __attribute__((init_priority(%d))) c3(%d);
int main() {
printf("total is 0x%%x.\n", total);
}
''' % (p1, p2, p3, last)
create_test_file('src.cpp', src)
self.run_process([EMCC, 'src.cpp', '-O2', '-s', 'EVAL_CTORS=1', '-profiling-funcs', '-s', 'WASM=%d' % wasm])
self.assertContained('total is %s.' % hex(expected), self.run_js('a.out.js'))
shutil.copyfile('a.out.js', 'x' + hex(expected) + '.js')
if wasm:
shutil.copyfile('a.out.wasm', 'x' + hex(expected) + '.wasm')
return self.count_wasm_contents('a.out.wasm', 'funcs')
else:
return open('a.out.js').read().count('function _')
print('no bad ctor')
first = test(1000, 2000, 3000, 0xe, 0x58e) # noqa
second = test(3000, 1000, 2000, 0xe, 0x8e5) # noqa
third = test(2000, 3000, 1000, 0xe, 0xe58) # noqa
print(first, second, third)
assert first == second and second == third
print('with bad ctor')
first = test(1000, 2000, 3000, 0xf, 0x58f) # noqa; 2 will succeed
second = test(3000, 1000, 2000, 0xf, 0x8f5) # noqa; 1 will succedd
third = test(2000, 3000, 1000, 0xf, 0xf58) # noqa; 0 will succeed
print(first, second, third)
assert first < second and second < third, [first, second, third]
@uses_canonical_tmp
@with_env_modify({'EMCC_DEBUG': '1'})
def test_eval_ctors_debug_output(self):
for wasm in (1, 0):
print('wasm', wasm)
create_test_file('lib.js', r'''
mergeInto(LibraryManager.library, {
external_thing: function() {}
});
''')
create_test_file('src.cpp', r'''
extern "C" void external_thing();
struct C {
C() { external_thing(); } // don't remove this!
};
C c;
int main() {}
''')
err = self.run_process([EMCC, 'src.cpp', '--js-library', 'lib.js', '-Oz', '-s', 'WASM=%d' % wasm], stderr=PIPE).stderr
# disabled in the wasm backend
self.assertContained('Ctor evalling in the wasm backend is disabled', err)
self.assertNotContained('ctor_evaller: not successful', err) # with logging
# TODO(sbc): Re-enable onece ctor evaluation is working with llvm backend.
# self.assertContained('external_thing', err) # the failing call should be mentioned
def test_override_js_execution_environment(self):
create_test_file('main.cpp', r'''
#include <emscripten.h>
int main() {
EM_ASM({
out('environment is WEB? ' + ENVIRONMENT_IS_WEB);
out('environment is WORKER? ' + ENVIRONMENT_IS_WORKER);
out('environment is NODE? ' + ENVIRONMENT_IS_NODE);
out('environment is SHELL? ' + ENVIRONMENT_IS_SHELL);
});
}
''')
# use SINGLE_FILE since we don't want to depend on loading a side .wasm file on the environment in this test;
# with the wrong env we have very odd failures
self.run_process([EMCC, 'main.cpp', '-s', 'SINGLE_FILE=1'])
src = open('a.out.js').read()
envs = ['web', 'worker', 'node', 'shell']
for env in envs:
for engine in config.JS_ENGINES:
if engine == config.V8_ENGINE:
continue # ban v8, weird failures
actual = 'NODE' if engine == config.NODE_JS else 'SHELL'
print(env, actual, engine)
module = {'ENVIRONMENT': env}
if env != actual:
# avoid problems with arguments detection, which may cause very odd failures with the wrong environment code
module['arguments'] = []
curr = 'var Module = %s;\n' % str(module)
print(' ' + curr)
create_test_file('test.js', curr + src)
seen = self.run_js('test.js', engine=engine, assert_returncode=NON_ZERO)
self.assertContained('Module.ENVIRONMENT has been deprecated. To force the environment, use the ENVIRONMENT compile-time option (for example, -s ENVIRONMENT=web or -s ENVIRONMENT=node', seen)
def test_override_c_environ(self):
create_test_file('pre.js', r'''
var Module = {
preRun: [function() { ENV.hello = 'world' }]
};
''')
create_test_file('src.cpp', r'''
#include <stdlib.h>
#include <stdio.h>
int main() {
printf("|%s|\n", getenv("hello"));
}
''')
self.run_process([EMCC, 'src.cpp', '--pre-js', 'pre.js'])
self.assertContained('|world|', self.run_js('a.out.js'))
create_test_file('pre.js', r'''
var Module = {
preRun: [function(module) { module.ENV.hello = 'world' }]
};
''')
self.run_process([EMCC, 'src.cpp', '--pre-js', 'pre.js', '-s', 'EXTRA_EXPORTED_RUNTIME_METHODS=["ENV"]'])
self.assertContained('|world|', self.run_js('a.out.js'))
self.run_process([EMCC, 'src.cpp', '--pre-js', 'pre.js', '-s', 'EXTRA_EXPORTED_RUNTIME_METHODS=["ENV"]', '-s', 'MODULARIZE=1'])
output = self.run_process(config.NODE_JS + ['-e', 'require("./a.out.js")();'], stdout=PIPE, stderr=PIPE)
self.assertContained('|world|', output.stdout)
def test_warn_no_filesystem(self):
error = 'Filesystem support (FS) was not included. The problem is that you are using files from JS, but files were not used from C/C++, so filesystem support was not auto-included. You can force-include filesystem support with -s FORCE_FILESYSTEM=1'
self.run_process([EMCC, path_from_root('tests', 'hello_world.c')])
seen = self.run_js('a.out.js')
self.assertNotContained(error, seen)
def test(contents):
create_test_file('src.cpp', r'''
#include <stdio.h>
#include <emscripten.h>
int main() {
EM_ASM({ %s });
printf("hello, world!\n");
return 0;
}
''' % contents)
self.run_process([EMCC, 'src.cpp'])
self.assertContained(error, self.run_js('a.out.js', assert_returncode=NON_ZERO))
# might appear in handwritten code
test("FS.init()")
test("FS.createPreloadedFile('waka waka, just warning check')")
test("FS.createDataFile('waka waka, just warning check')")
test("FS.analyzePath('waka waka, just warning check')")
test("FS.loadFilesFromDB('waka waka, just warning check')")
# might appear in filesystem code from a separate script tag
test("Module['FS_createDataFile']('waka waka, just warning check')")
test("Module['FS_createPreloadedFile']('waka waka, just warning check')")
# text is in the source when needed, but when forcing FS, it isn't there
self.run_process([EMCC, 'src.cpp'])
self.assertContained(error, open('a.out.js').read())
self.run_process([EMCC, 'src.cpp', '-s', 'FORCE_FILESYSTEM=1']) # forcing FS means no need
self.assertNotContained(error, open('a.out.js').read())
self.run_process([EMCC, 'src.cpp', '-s', 'ASSERTIONS=0']) # no assertions, no need
self.assertNotContained(error, open('a.out.js').read())
self.run_process([EMCC, 'src.cpp', '-O2']) # optimized, so no assertions
self.assertNotContained(error, open('a.out.js').read())
def test_warn_module_print_err(self):
error = 'was not exported. add it to EXTRA_EXPORTED_RUNTIME_METHODS (see the FAQ)'
def test(contents, expected, args=[], assert_returncode=0):
create_test_file('src.cpp', r'''
#include <emscripten.h>
int main() {
EM_ASM({ %s });
return 0;
}
''' % contents)
self.run_process([EMCC, 'src.cpp'] + args)
self.assertContained(expected, self.run_js('a.out.js', assert_returncode=assert_returncode))
# error shown (when assertions are on)
test("Module.print('x')", error, assert_returncode=NON_ZERO)
test("Module['print']('x')", error, assert_returncode=NON_ZERO)
test("Module.printErr('x')", error, assert_returncode=NON_ZERO)
test("Module['printErr']('x')", error, assert_returncode=NON_ZERO)
# when exported, all good
test("Module['print']('print'); Module['printErr']('err'); ", 'print\nerr', ['-s', 'EXTRA_EXPORTED_RUNTIME_METHODS=["print", "printErr"]'])
def test_warn_unexported_main(self):
WARNING = 'main() is in the input files, but "_main" is not in EXPORTED_FUNCTIONS, which means it may be eliminated as dead code. Export it if you want main() to run.'
proc = self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'EXPORTED_FUNCTIONS=[]'], stderr=PIPE)
self.assertContained(WARNING, proc.stderr)
def test_source_file_with_fixed_language_mode(self):
create_test_file('src_tmp_fixed_lang', '''
#include <string>
#include <iostream>
int main() {
std::cout << "Test_source_fixed_lang_hello" << std::endl;
return 0;
}
''')
self.run_process([EMCC, '-Wall', '-x', 'c++', 'src_tmp_fixed_lang'])
self.assertContained('Test_source_fixed_lang_hello', self.run_js('a.out.js'))
stderr = self.expect_fail([EMCC, '-Wall', 'src_tmp_fixed_lang'])
self.assertContained('unknown file type: src_tmp_fixed_lang', stderr)
def test_disable_inlining(self):
create_test_file('test.c', r'''
#include <stdio.h>
void foo() {
printf("foo\n");
}
int main() {
foo();
return 0;
}
''')
# Without the 'INLINING_LIMIT=1', -O2 inlines foo()
cmd = [EMCC, '-c', 'test.c', '-O2', '-o', 'test.bc', '-s', 'INLINING_LIMIT=1', '-flto']
self.run_process(cmd)
# If foo() had been wrongly inlined above, internalizing foo and running
# global DCE makes foo DCE'd
opts = ['-internalize', '-internalize-public-api-list=main', '-globaldce']
self.run_process([shared.LLVM_OPT] + opts + ['test.bc', '-o', 'test2.bc'])
# To this test to be successful, foo() shouldn't have been inlined above and
# foo() should be in the function list
syms = building.llvm_nm('test2.bc', include_internal=True)
assert 'foo' in syms.defs, 'foo() should not be inlined'
def test_output_eol(self):
for params in [[], ['--proxy-to-worker'], ['--proxy-to-worker', '-s', 'WASM=0']]:
for output_suffix in ['html', 'js']:
for eol in ['windows', 'linux']:
files = ['a.js']
if output_suffix == 'html':
files += ['a.html']
cmd = [EMCC, path_from_root('tests', 'hello_world.c'), '-o', 'a.' + output_suffix, '--output_eol', eol] + params
self.run_process(cmd)
for f in files:
print(str(cmd) + ' ' + str(params) + ' ' + eol + ' ' + f)
self.assertExists(f)
if eol == 'linux':
expected_ending = '\n'
else:
expected_ending = '\r\n'
ret = line_endings.check_line_endings(f, expect_only=expected_ending)
assert ret == 0
for f in files:
try_delete(f)
def test_binaryen_names(self):
sizes = {}
for args, expect_names in [
([], False),
(['-g'], True),
(['-O1'], False),
(['-O2'], False),
(['-O2', '-g'], True),
(['-O2', '-g1'], False),
(['-O2', '-g2'], True),
(['-O2', '--profiling'], True),
(['-O2', '--profiling-funcs'], True),
]:
print(args, expect_names)
try_delete('a.out.js')
# we use dlmalloc here, as emmalloc has a bunch of asserts that contain the text "malloc" in them, which makes counting harder
self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp')] + args + ['-s', 'MALLOC="dlmalloc"', '-s', 'EXPORTED_FUNCTIONS=[_main,_malloc]'])
code = open('a.out.wasm', 'rb').read()
if expect_names:
# name section adds the name of malloc (there is also another one for the export)
self.assertEqual(code.count(b'malloc'), 2)
else:
# should be just malloc for the export
self.assertEqual(code.count(b'malloc'), 1)
sizes[str(args)] = os.path.getsize('a.out.wasm')
print(sizes)
self.assertLess(sizes["['-O2']"], sizes["['-O2', '--profiling-funcs']"], 'when -profiling-funcs, the size increases due to function names')
def test_binaryen_warn_mem(self):
# if user changes INITIAL_MEMORY at runtime, the wasm module may not accept the memory import if
# it is too big/small
create_test_file('pre.js', 'var Module = { INITIAL_MEMORY: 50 * 1024 * 1024 };\n')
self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-s', 'INITIAL_MEMORY=' + str(16 * 1024 * 1024), '--pre-js', 'pre.js', '-s', 'WASM_ASYNC_COMPILATION=0', '-s', 'IMPORTED_MEMORY'])
out = self.run_js('a.out.js', assert_returncode=NON_ZERO)
self.assertContained('LinkError', out)
self.assertContained('Memory size incompatibility issues may be due to changing INITIAL_MEMORY at runtime to something too large. Use ALLOW_MEMORY_GROWTH to allow any size memory (and also make sure not to set INITIAL_MEMORY at runtime to something smaller than it was at compile time).', out)
self.assertNotContained('hello, world!', out)
# and with memory growth, all should be good
self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-s', 'INITIAL_MEMORY=' + str(16 * 1024 * 1024), '--pre-js', 'pre.js', '-s', 'ALLOW_MEMORY_GROWTH=1', '-s', 'WASM_ASYNC_COMPILATION=0', '-s', 'IMPORTED_MEMORY'])
self.assertContained('hello, world!', self.run_js('a.out.js'))
def test_binaryen_mem(self):
for args, expect_initial, expect_max in [
(['-s', 'INITIAL_MEMORY=20971520'], 320, 320),
(['-s', 'INITIAL_MEMORY=20971520', '-s', 'ALLOW_MEMORY_GROWTH=1'], 320, None),
(['-s', 'INITIAL_MEMORY=20971520', '-s', 'MAXIMUM_MEMORY=41943040'], 320, 640),
(['-s', 'INITIAL_MEMORY=20971520', '-s', 'ALLOW_MEMORY_GROWTH=1', '-s', 'MAXIMUM_MEMORY=41943040'], 320, 640),
]:
cmd = [EMCC, path_from_root('tests', 'hello_world.c'), '-O2'] + args
print(' '.join(cmd))
self.run_process(cmd)
wat = self.run_process([wasm_dis, 'a.out.wasm'], stdout=PIPE).stdout
for line in wat:
if '(import "env" "memory" (memory ' in line:
parts = line.strip().replace('(', '').replace(')', '').split(' ')
print(parts)
self.assertEqual(parts[5], str(expect_initial))
if not expect_max:
self.assertEqual(len(parts), 6)
else:
self.assertEqual(parts[6], str(expect_max))
def test_invalid_mem(self):
# A large amount is fine, multiple of 16MB or not
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'INITIAL_MEMORY=33MB'])
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'INITIAL_MEMORY=32MB'])
# A tiny amount is fine in wasm
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'INITIAL_MEMORY=65536', '-s', 'TOTAL_STACK=1024'])
# And the program works!
self.assertContained('hello, world!', self.run_js('a.out.js'))
# Must be a multiple of 64KB
ret = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'INITIAL_MEMORY=33554433']) # 32MB + 1 byte
self.assertContained('INITIAL_MEMORY must be a multiple of 64KB', ret)
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'MAXIMUM_MEMORY=33MB'])
ret = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'MAXIMUM_MEMORY=34603009']) # 33MB + 1 byte
self.assertContained('MAXIMUM_MEMORY must be a multiple of 64KB', ret)
def test_invalid_output_dir(self):
ret = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), '-o', os.path.join('NONEXISTING_DIRECTORY', 'out.js')])
self.assertContained('specified output file (NONEXISTING_DIRECTORY%sout.js) is in a directory that does not exist' % os.path.sep, ret)
def test_binaryen_ctors(self):
# ctor order must be identical to js builds, deterministically
create_test_file('src.cpp', r'''
#include <stdio.h>
struct A {
A() { puts("constructing A!"); }
};
A a;
struct B {
B() { puts("constructing B!"); }
};
B b;
int main() {}
''')
self.run_process([EMCC, 'src.cpp'])
correct = self.run_js('a.out.js')
for args in [[], ['-s', 'RELOCATABLE=1']]:
print(args)
self.run_process([EMCC, 'src.cpp', '-o', 'b.out.js'] + args)
seen = self.run_js('b.out.js')
assert correct == seen, correct + '\n vs \n' + seen
# test debug info and debuggability of JS output
def test_binaryen_debug(self):
for args, expect_dash_g, expect_emit_text, expect_clean_js, expect_whitespace_js, expect_closured in [
(['-O0'], False, False, False, True, False),
(['-O0', '-g1'], False, False, False, True, False),
(['-O0', '-g2'], True, False, False, True, False), # in -g2+, we emit -g to asm2wasm so function names are saved
(['-O0', '-g'], True, True, False, True, False),
(['-O0', '--profiling-funcs'], True, False, False, True, False),
(['-O1'], False, False, False, True, False),
(['-O2'], False, False, True, False, False),
(['-O2', '-g1'], False, False, True, True, False),
(['-O2', '-g'], True, True, False, True, False),
(['-O2', '--closure', '1'], False, False, True, False, True),
(['-O2', '--closure', '1', '-g1'], False, False, True, True, True),
]:
print(args, expect_dash_g, expect_emit_text)
try_delete('a.out.wat')
cmd = [EMCC, path_from_root('tests', 'hello_world.cpp')] + args
print(' '.join(cmd))
self.run_process(cmd)
js = open('a.out.js').read()
assert expect_clean_js == ('// ' not in js), 'cleaned-up js must not have comments'
assert expect_whitespace_js == ('{\n ' in js), 'whitespace-minified js must not have excess spacing'
assert expect_closured == ('var a;' in js or 'var a,' in js or 'var a=' in js or 'var a ' in js), 'closured js must have tiny variable names'
@uses_canonical_tmp
def test_binaryen_ignore_implicit_traps(self):
sizes = []
with env_modify({'EMCC_DEBUG': '1'}):
for args, expect in [
([], False),
(['-s', 'BINARYEN_IGNORE_IMPLICIT_TRAPS=1'], True),
]:
print(args, expect)
cmd = [EMCC, path_from_root('tests', 'hello_libcxx.cpp'), '-O3'] + args
print(' '.join(cmd))
err = self.run_process(cmd, stdout=PIPE, stderr=PIPE).stderr
self.assertContainedIf('--ignore-implicit-traps ', err, expect)
sizes.append(os.path.getsize('a.out.wasm'))
print('sizes:', sizes)
# sizes must be different, as the flag has an impact
self.assertEqual(len(set(sizes)), 2)
def test_binaryen_passes_extra(self):
def build(args=[]):
return self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-O3'] + args, stdout=PIPE).stdout
build()
base_size = os.path.getsize('a.out.wasm')
out = build(['-s', 'BINARYEN_EXTRA_PASSES="--metrics"'])
# and --metrics output appears
self.assertContained('[funcs]', out)
# adding --metrics should not affect code size
self.assertEqual(base_size, os.path.getsize('a.out.wasm'))
def assertFileContents(self, filename, contents):
contents = contents.replace('\r', '')
if os.environ.get('EMTEST_REBASELINE'):
with open(filename, 'w') as f:
f.write(contents)
return
if not os.path.exists(filename):
self.fail('Test expectation file not found: ' + filename + '.\n' +
'Run with EMTEST_REBASELINE to generate.')
expected_content = open(filename).read()
message = "Run with EMTEST_REBASELINE=1 to automatically update expectations"
self.assertTextDataIdentical(expected_content, contents, message,
filename, filename + '.new')
def run_metadce_test(self, filename, args, expected_exists, expected_not_exists, expected_size,
check_sent=True, check_imports=True, check_exports=True, check_funcs=True):
size_slack = 0.05
# in -Os, -Oz, we remove imports wasm doesn't need
print('Running metadce test: %s:' % filename, args, expected_exists,
expected_not_exists, expected_size, check_sent, check_imports, check_exports, check_funcs)
filename = path_from_root('tests', 'other', 'metadce', filename)
def clean_arg(arg):
return arg.replace('-', '')
def args_to_filename(args):
result = ''
for a in args:
if a == '-s':
continue
a = a.replace('-s', '')
a = a.replace('-', '')
a = a.replace('=1', '')
a = a.replace('=[]', '_NONE')
a = a.replace('=', '_')
if a:
result += '_' + a
return result
expected_basename = os.path.splitext(filename)[0]
expected_basename += args_to_filename(args)
self.run_process([EMCC, filename, '-g2'] + args)
# find the imports we send from JS
js = open('a.out.js').read()
start = js.find('asmLibraryArg = ')
end = js.find('}', start) + 1
start = js.find('{', start)
relevant = js[start + 2:end - 2]
relevant = relevant.replace(' ', '').replace('"', '').replace("'", '').split(',')
sent = [x.split(':')[0].strip() for x in relevant]
sent = [x for x in sent if x]
sent.sort()
for exists in expected_exists:
self.assertIn(exists, sent)
for not_exists in expected_not_exists:
self.assertNotIn(not_exists, sent)
if expected_size is not None:
# measure the wasm size without the name section
self.run_process([wasm_opt, 'a.out.wasm', '--strip-debug', '--all-features', '-o', 'a.out.nodebug.wasm'])
wasm_size = os.path.getsize('a.out.nodebug.wasm')
ratio = abs(wasm_size - expected_size) / float(expected_size)
print(' seen wasm size: %d (expected: %d), ratio to expected: %f' % (wasm_size, expected_size, ratio))
self.assertLess(ratio, size_slack)
imports, exports, funcs = parse_wasm('a.out.wasm')
imports.sort()
exports.sort()
funcs.sort()
# filter out _NNN suffixed that can be the result of bitcode linking when
# internal symbol names collide.
def strip_numeric_suffixes(funcname):
parts = funcname.split('_')
while parts:
if parts[-1].isdigit():
parts.pop()
else:
break
return '_'.join(parts)
funcs = [strip_numeric_suffixes(f) for f in funcs]
if check_sent:
sent_file = expected_basename + '.sent'
sent_data = '\n'.join(sent) + '\n'
self.assertFileContents(sent_file, sent_data)
if check_imports:
filename = expected_basename + '.imports'
data = '\n'.join(imports) + '\n'
self.assertFileContents(filename, data)
if check_exports:
filename = expected_basename + '.exports'
data = '\n'.join(exports) + '\n'
self.assertFileContents(filename, data)
if check_funcs:
filename = expected_basename + '.funcs'
data = '\n'.join(funcs) + '\n'
self.assertFileContents(filename, data)
@parameterized({
'O0': ([], [], ['waka'], 789), # noqa
'O1': (['-O1'], [], ['waka'], 264), # noqa
'O2': (['-O2'], [], ['waka'], 263), # noqa
# in -O3, -Os and -Oz we metadce, and they shrink it down to the minimal output we want
'O3': (['-O3'], [], [], 74), # noqa
'Os': (['-Os'], [], [], 74), # noqa
'Oz': (['-Oz'], [], [], 74), # noqa
'Os_mr': (['-Os', '-s', 'MINIMAL_RUNTIME'], [], [], 74), # noqa
})
def test_metadce_minimal(self, *args):
self.run_metadce_test('minimal.c', *args)
@node_pthreads
def test_metadce_minimal_pthreads(self):
self.run_metadce_test('minimal.c', ['-Oz', '-sUSE_PTHREADS', '-sPROXY_TO_PTHREAD'], [], [], 16135)
@parameterized({
'noexcept': (['-O2'], [], ['waka'], 127740), # noqa
# exceptions increases code size significantly
'except': (['-O2', '-fexceptions'], [], ['waka'], 170231), # noqa
# exceptions does not pull in demangling by default, which increases code size
'mangle': (['-O2', '-fexceptions',
'-s', 'DEMANGLE_SUPPORT'], [], ['waka'], 230258), # noqa
})
def test_metadce_cxx(self, *args):
# do not check functions in this test as there are a lot of libc++ functions
# pulled in here, and small LLVM backend changes can affect their size and
# lead to different inlining decisions which add or remove a function
self.run_metadce_test('hello_libcxx.cpp', *args, check_funcs=False)
@parameterized({
'O0': ([], [], ['waka'], 11689), # noqa
'O1': (['-O1'], [], ['waka'], 2422), # noqa
'O2': (['-O2'], [], ['waka'], 2060), # noqa
'O3': (['-O3'], [], [], 1792), # noqa; in -O3, -Os and -Oz we metadce
'Os': (['-Os'], [], [], 1781), # noqa
'Oz': (['-Oz'], [], [], 1777), # noqa
# finally, check what happens when we export nothing. wasm should be almost empty
'export_nothing':
(['-Os', '-s', 'EXPORTED_FUNCTIONS=[]'], [], [], 55), # noqa
# we don't metadce with linkable code! other modules may want stuff
# TODO(sbc): Investivate why the number of exports is order of magnitude
# larger for wasm backend.
'main_module_2': (['-O3', '-s', 'MAIN_MODULE=2'], [], [], 10297), # noqa
})
def test_metadce_hello(self, *args):
self.run_metadce_test('hello_world.cpp', *args)
@parameterized({
'O3': ('mem.c', ['-O3'],
[], [], 6100), # noqa
# argc/argv support code etc. is in the wasm
'O3_standalone': ('mem.c', ['-O3', '-s', 'STANDALONE_WASM'],
[], [], 6309), # noqa
# without argc/argv, no support code for them is emitted
'O3_standalone_narg': ('mem_no_argv.c', ['-O3', '-s', 'STANDALONE_WASM'],
[], [], 6309), # noqa
# without main, no support code for argc/argv is emitted either
'O3_standalone_lib': ('mem_no_main.c', ['-O3', '-s', 'STANDALONE_WASM', '--no-entry'],
[], [], 6309), # noqa
# Growth support code is in JS, no significant change in the wasm
'O3_grow': ('mem.c', ['-O3', '-s', 'ALLOW_MEMORY_GROWTH'],
[], [], 6098), # noqa
# Growth support code is in the wasm
'O3_grow_standalone': ('mem.c', ['-O3', '-s', 'ALLOW_MEMORY_GROWTH', '-s', 'STANDALONE_WASM'],
[], [], 6449), # noqa
# without argc/argv, no support code for them is emitted, even with lto
'O3_standalone_narg_flto':
('mem_no_argv.c', ['-O3', '-s', 'STANDALONE_WASM', '-flto'],
[], [], 4971), # noqa
})
def test_metadce_mem(self, filename, *args):
self.run_metadce_test(filename, *args)
@parameterized({
'O3': ('libcxxabi_message.cpp', ['-O3'],
[], [], 99), # noqa
# argc/argv support code etc. is in the wasm
'O3_standalone': ('libcxxabi_message.cpp', ['-O3', '-s', 'STANDALONE_WASM'],
[], [], 178), # noqa
})
def test_metadce_libcxxabi_message(self, filename, *args):
self.run_metadce_test(filename, *args)
# ensures runtime exports work, even with metadce
def test_extra_runtime_exports(self):
exports = ['stackSave', 'stackRestore', 'stackAlloc', 'FS']
self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-Os', '-s', 'EXTRA_EXPORTED_RUNTIME_METHODS=%s' % str(exports)])
js = open('a.out.js').read()
for export in exports:
assert ('Module["%s"]' % export) in js, export
def test_legalize_js_ffi(self):
# test disabling of JS FFI legalization
for (args, js_ffi) in [
(['-s', 'LEGALIZE_JS_FFI=1', '-s', 'SIDE_MODULE=1', '-O1', '-s', 'EXPORT_ALL=1'], True),
(['-s', 'LEGALIZE_JS_FFI=0', '-s', 'SIDE_MODULE=1', '-O1', '-s', 'EXPORT_ALL=1'], False),
(['-s', 'LEGALIZE_JS_FFI=0', '-s', 'SIDE_MODULE=1', '-O0', '-s', 'EXPORT_ALL=1'], False),
(['-s', 'LEGALIZE_JS_FFI=0', '-s', 'WARN_ON_UNDEFINED_SYMBOLS=0', '-O0'], False),
]:
if 'SIDE_MODULE=1' in args:
continue
print(args)
try_delete('a.out.wasm')
try_delete('a.out.wat')
cmd = [EMCC, path_from_root('tests', 'other', 'ffi.c'), '-g', '-o', 'a.out.wasm'] + args
print(' '.join(cmd))
self.run_process(cmd)
self.run_process([wasm_dis, 'a.out.wasm', '-o', 'a.out.wat'])
text = open('a.out.wat').read()
# remove internal comments and extra whitespace
text = re.sub(r'\(;[^;]+;\)', '', text)
text = re.sub(r'\$var\$*.', '', text)
text = re.sub(r'param \$\d+', 'param ', text)
text = re.sub(r' +', ' ', text)
# TODO: remove the unecessary ".*" in e_* regexs after binaryen #2510 lands
e_add_f32 = re.search(r'func \$_?add_f .*\(param f32\) \(param f32\) \(result f32\)', text)
i_i64_i32 = re.search(r'import .*"_?import_ll" .*\(param i32 i32\) \(result i32\)', text)
i_f32_f64 = re.search(r'import .*"_?import_f" .*\(param f64\) \(result f64\)', text)
i_i64_i64 = re.search(r'import .*"_?import_ll" .*\(param i64\) \(result i64\)', text)
i_f32_f32 = re.search(r'import .*"_?import_f" .*\(param f32\) \(result f32\)', text)
e_i64_i32 = re.search(r'func \$_?add_ll .*\(param i32\) \(param i32\) \(param i32\) \(param i32\) \(result i32\)', text)
e_f32_f64 = re.search(r'func \$legalstub\$_?add_f .*\(param f64\) \(param f64\) \(result f64\)', text)
e_i64_i64 = re.search(r'func \$_?add_ll .*\(param i64\) \(param i64\) \(result i64\)', text)
assert e_add_f32, 'add_f export missing'
if js_ffi:
assert i_i64_i32, 'i64 not converted to i32 in imports'
assert i_f32_f64, 'f32 not converted to f64 in imports'
assert not i_i64_i64, 'i64 not converted to i32 in imports'
assert not i_f32_f32, 'f32 not converted to f64 in imports'
assert e_i64_i32, 'i64 not converted to i32 in exports'
assert not e_f32_f64, 'f32 not converted to f64 in exports'
assert not e_i64_i64, 'i64 not converted to i64 in exports'
else:
assert not i_i64_i32, 'i64 converted to i32 in imports'
assert not i_f32_f64, 'f32 converted to f64 in imports'
assert i_i64_i64, 'i64 converted to i32 in imports'
assert i_f32_f32, 'f32 converted to f64 in imports'
assert not e_i64_i32, 'i64 converted to i32 in exports'
assert not e_f32_f64, 'f32 converted to f64 in exports'
assert e_i64_i64, 'i64 converted to i64 in exports'
def test_no_legalize_js_ffi(self):
# test minimal JS FFI legalization for invoke and dyncalls
for (args, js_ffi) in [
(['-s', 'LEGALIZE_JS_FFI=0', '-s', 'MAIN_MODULE=2', '-O3', '-s', 'DISABLE_EXCEPTION_CATCHING=0'], False),
]:
print(args)
try_delete('a.out.wasm')
try_delete('a.out.wat')
with env_modify({'EMCC_FORCE_STDLIBS': 'libc++'}):
cmd = [EMCC, path_from_root('tests', 'other', 'noffi.cpp'), '-g', '-o', 'a.out.js'] + args
print(' '.join(cmd))
self.run_process(cmd)
self.run_process([wasm_dis, 'a.out.wasm', '-o', 'a.out.wat'])
text = open('a.out.wat').read()
# remove internal comments and extra whitespace
text = re.sub(r'\(;[^;]+;\)', '', text)
text = re.sub(r'\$var\$*.', '', text)
text = re.sub(r'param \$\d+', 'param ', text)
text = re.sub(r' +', ' ', text)
# print("text: %s" % text)
i_legalimport_i64 = re.search(r'\(import.*\$legalimport\$invoke_j.*', text)
e_legalstub_i32 = re.search(r'\(func.*\$legalstub\$dyn.*\(result i32\)', text)
assert i_legalimport_i64, 'legal import not generated for invoke call'
assert e_legalstub_i32, 'legal stub not generated for dyncall'
def test_export_aliasee(self):
# build side module
args = ['-s', 'SIDE_MODULE=1']
cmd = [EMCC, path_from_root('tests', 'other', 'alias', 'side.c'), '-g', '-o', 'side.wasm'] + args
print(' '.join(cmd))
self.run_process(cmd)
# build main module
args = ['-s', 'EXPORTED_FUNCTIONS=["_main", "_foo"]', '-s', 'MAIN_MODULE=2', '-s', 'EXIT_RUNTIME=1', '-lnodefs.js']
cmd = [EMCC, path_from_root('tests', 'other', 'alias', 'main.c'), '-o', 'main.js'] + args
print(' '.join(cmd))
self.run_process(cmd)
# run the program
self.assertContained('success', self.run_js('main.js'))
def test_sysconf_phys_pages(self):
def run(args, expected):
cmd = [EMCC, path_from_root('tests', 'unistd', 'sysconf_phys_pages.c')] + args
print(str(cmd))
self.run_process(cmd)
result = self.run_js('a.out.js').strip()
self.assertEqual(result, str(expected) + ', errno: 0')
run([], 1024)
run(['-s', 'INITIAL_MEMORY=32MB'], 2048)
run(['-s', 'INITIAL_MEMORY=32MB', '-s', 'ALLOW_MEMORY_GROWTH=1'], (2 * 1024 * 1024 * 1024) // 16384)
run(['-s', 'INITIAL_MEMORY=32MB', '-s', 'ALLOW_MEMORY_GROWTH=1', '-s', 'WASM=0'], (2 * 1024 * 1024 * 1024) // 16384)
def test_wasm_target_and_STANDALONE_WASM(self):
# STANDALONE_WASM means we never minify imports and exports.
for opts, potentially_expect_minified_exports_and_imports in (
([], False),
(['-s', 'STANDALONE_WASM'], False),
(['-O2'], False),
(['-O3'], True),
(['-O3', '-s', 'STANDALONE_WASM'], False),
(['-Os'], True),
):
# targeting .wasm (without .js) means we enable STANDALONE_WASM automatically, and don't minify imports/exports
for target in ('out.js', 'out.wasm'):
expect_minified_exports_and_imports = potentially_expect_minified_exports_and_imports and target.endswith('.js')
standalone = target.endswith('.wasm') or 'STANDALONE_WASM' in opts
print(opts, potentially_expect_minified_exports_and_imports, target, ' => ', expect_minified_exports_and_imports, standalone)
self.clear()
self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-o', target] + opts)
self.assertExists('out.wasm')
if target.endswith('.wasm'):
# only wasm requested
self.assertNotExists('out.js')
wat = self.run_process([wasm_dis, 'out.wasm'], stdout=PIPE).stdout
wat_lines = wat.split('\n')
exports = [line.strip().split(' ')[1].replace('"', '') for line in wat_lines if "(export " in line]
imports = [line.strip().split(' ')[2].replace('"', '') for line in wat_lines if "(import " in line]
exports_and_imports = exports + imports
print(' exports', exports)
print(' imports', imports)
if expect_minified_exports_and_imports:
assert 'a' in exports_and_imports
else:
assert 'a' not in exports_and_imports
if standalone:
assert 'fd_write' in exports_and_imports, 'standalone mode preserves import names for WASI APIs'
# verify the wasm runs with the JS
if target.endswith('.js'):
self.assertContained('hello, world!', self.run_js('out.js'))
# verify a standalone wasm
if standalone:
for engine in config.WASM_ENGINES:
print(engine)
self.assertContained('hello, world!', self.run_js('out.wasm', engine=engine))
def test_wasm_targets_side_module(self):
# side modules do allow a wasm target
for opts, target in [([], 'a.out.wasm'),
(['-o', 'lib.wasm'], 'lib.wasm'),
(['-o', 'lib.so'], 'lib.so'),
(['-o', 'foo.bar'], 'foo.bar')]:
# specified target
print('building: ' + target)
self.clear()
self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-s', 'SIDE_MODULE=1', '-Werror'] + opts)
for x in os.listdir('.'):
self.assertFalse(x.endswith('.js'))
self.assertTrue(building.is_wasm(target))
wasm_data = open(target, 'rb').read()
self.assertEqual(wasm_data.count(b'dylink'), 1)
@is_slow_test
def test_wasm_backend_lto(self):
# test building of non-wasm-object-files libraries, building with them, and running them
src = path_from_root('tests', 'hello_libcxx.cpp')
# test codegen in lto mode, and compare to normal (wasm object) mode
for args in [[], ['-O1'], ['-O2'], ['-O3'], ['-Os'], ['-Oz']]:
print(args)
print('wasm in object')
self.run_process([EMXX, src] + args + ['-c', '-o', 'hello_obj.o'])
self.assertTrue(building.is_wasm('hello_obj.o'))
self.assertFalse(building.is_bitcode('hello_obj.o'))
print('bitcode in object')
self.run_process([EMXX, src] + args + ['-c', '-o', 'hello_bitcode.o', '-flto'])
self.assertFalse(building.is_wasm('hello_bitcode.o'))
self.assertTrue(building.is_bitcode('hello_bitcode.o'))
print('use bitcode object (LTO)')
self.run_process([EMXX, 'hello_bitcode.o'] + args + ['-flto'])
self.assertContained('hello, world!', self.run_js('a.out.js'))
print('use bitcode object (non-LTO)')
self.run_process([EMXX, 'hello_bitcode.o'] + args)
self.assertContained('hello, world!', self.run_js('a.out.js'))
print('use native object (LTO)')
self.run_process([EMXX, 'hello_obj.o'] + args + ['-flto'])
self.assertContained('hello, world!', self.run_js('a.out.js'))
print('use native object (non-LTO)')
self.run_process([EMXX, 'hello_obj.o'] + args)
self.assertContained('hello, world!', self.run_js('a.out.js'))
@parameterized({
'except': [],
'noexcept': ['-s', 'DISABLE_EXCEPTION_CATCHING=0']
})
def test_wasm_backend_lto_libcxx(self, *args):
self.run_process([EMXX, path_from_root('tests', 'hello_libcxx.cpp'), '-flto'] + list(args))
def test_lto_flags(self):
for flags, expect_bitcode in [
([], False),
(['-flto'], True),
(['-flto=thin'], True),
(['-s', 'WASM_OBJECT_FILES=0'], True),
(['-s', 'WASM_OBJECT_FILES=1'], False),
]:
self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp')] + flags + ['-c', '-o', 'a.o'])
seen_bitcode = building.is_bitcode('a.o')
self.assertEqual(expect_bitcode, seen_bitcode, 'must emit LTO-capable bitcode when flags indicate so (%s)' % str(flags))
def test_wasm_nope(self):
for opts in [[], ['-O2']]:
print(opts)
# check we show a good error message if there is no wasm support
create_test_file('pre.js', 'WebAssembly = undefined;\n')
self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '--pre-js', 'pre.js'] + opts)
out = self.run_js('a.out.js', assert_returncode=NON_ZERO)
self.assertContained('no native wasm support detected', out)
def test_jsrun(self):
print(config.NODE_JS)
jsrun.WORKING_ENGINES = {}
# Test that engine check passes
self.assertTrue(jsrun.check_engine(config.NODE_JS))
# Run it a second time (cache hit)
self.assertTrue(jsrun.check_engine(config.NODE_JS))
# Test that engine check fails
bogus_engine = ['/fake/inline4']
self.assertFalse(jsrun.check_engine(bogus_engine))
self.assertFalse(jsrun.check_engine(bogus_engine))
# Test the other possible way (list vs string) to express an engine
if type(config.NODE_JS) is list:
engine2 = config.NODE_JS[0]
else:
engine2 = [config.NODE_JS]
self.assertTrue(jsrun.check_engine(engine2))
# Test that self.run_js requires the engine
self.run_js(path_from_root('tests', 'hello_world.js'), config.NODE_JS)
caught_exit = 0
try:
self.run_js(path_from_root('tests', 'hello_world.js'), bogus_engine)
except SystemExit as e:
caught_exit = e.code
self.assertEqual(1, caught_exit, 'Did not catch SystemExit with bogus JS engine')
def test_error_on_missing_libraries(self):
# -llsomenonexistingfile is an error by default
err = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.cpp'), '-lsomenonexistingfile'])
self.assertContained('wasm-ld: error: unable to find library -lsomenonexistingfile', err)
# Tests that if user accidentally attempts to link native object code, we show an error
def test_native_link_error_message(self):
self.run_process([CLANG_CC, '-c', path_from_root('tests', 'hello_123.c'), '-o', 'hello_123.o'])
err = self.expect_fail([EMCC, 'hello_123.o', '-o', 'hello_123.js'])
self.assertContained('unknown file type: hello_123.o', err)
# Tests that we should give a clear error on INITIAL_MEMORY not being enough for static initialization + stack
def test_clear_error_on_massive_static_data(self):
with open('src.cpp', 'w') as f:
f.write('''
char muchData[128 * 1024];
int main() {
return (int)&muchData;
}
''')
err = self.expect_fail([EMCC, 'src.cpp', '-s', 'TOTAL_STACK=1KB', '-s', 'INITIAL_MEMORY=64KB'])
self.assertContained('wasm-ld: error: initial memory too small', err)
def test_o_level_clamp(self):
for level in [3, 4, 20]:
err = self.run_process([EMCC, '-O' + str(level), path_from_root('tests', 'hello_world.c')], stderr=PIPE).stderr
self.assertContainedIf("optimization level '-O" + str(level) + "' is not supported; using '-O3' instead", err, level > 3)
# Tests that if user specifies multiple -o output directives, then the last one will take precedence
def test_multiple_o_files(self):
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-o', 'a.js', '-o', 'b.js'])
assert os.path.isfile('b.js')
assert not os.path.isfile('a.js')
# Tests that Emscripten-provided header files can be cleanly included in C code
@is_slow_test
def test_include_system_header_in_c(self):
for std in [[], ['-std=c89']]: # Test oldest C standard, and the default C standard
for directory, headers in [
('emscripten', ['dom_pk_codes.h', 'em_asm.h', 'emscripten.h', 'fetch.h', 'html5.h', 'key_codes.h', 'threading.h', 'trace.h', 'vr.h']), # This directory has also bind.h, val.h and wire.h, which require C++11
('AL', ['al.h', 'alc.h']),
('EGL', ['egl.h', 'eglplatform.h']),
('GL', ['freeglut_std.h', 'gl.h', 'glew.h', 'glfw.h', 'glu.h', 'glut.h']),
('GLES', ['gl.h', 'glplatform.h']),
('GLES2', ['gl2.h', 'gl2platform.h']),
('GLES3', ['gl3.h', 'gl3platform.h', 'gl31.h', 'gl32.h']),
('GLFW', ['glfw3.h']),
('KHR', ['khrplatform.h'])]:
for h in headers:
inc = '#include <' + directory + '/' + h + '>'
print(inc)
create_test_file('a.c', inc)
create_test_file('b.c', inc)
self.run_process([EMCC] + std + ['a.c', 'b.c'])
@is_slow_test
def test_single_file(self):
for (single_file_enabled,
meminit1_enabled,
debug_enabled,
closure_enabled,
wasm_enabled) in itertools.product([True, False], repeat=5):
# skip unhelpful option combinations
if wasm_enabled and meminit1_enabled:
continue
if closure_enabled and debug_enabled:
continue
expect_wasm = wasm_enabled
expect_meminit = meminit1_enabled and not wasm_enabled
cmd = [EMCC, path_from_root('tests', 'hello_world.c')]
if single_file_enabled:
expect_meminit = False
expect_wasm = False
cmd += ['-s', 'SINGLE_FILE=1']
if meminit1_enabled:
cmd += ['--memory-init-file', '1']
if debug_enabled:
cmd += ['-g']
if closure_enabled:
cmd += ['--closure', '1']
if not wasm_enabled:
cmd += ['-s', 'WASM=0']
self.clear()
def do_test(cmd):
print(' '.join(cmd))
self.run_process(cmd)
print(os.listdir('.'))
assert expect_meminit == (os.path.exists('a.out.mem') or os.path.exists('a.out.js.mem'))
assert expect_wasm == os.path.exists('a.out.wasm')
assert not os.path.exists('a.out.wat')
self.assertContained('hello, world!', self.run_js('a.out.js'))
do_test(cmd)
# additional combinations that are not part of the big product()
if debug_enabled:
separate_dwarf_cmd = cmd + ['-gseparate-dwarf']
if wasm_enabled:
do_test(separate_dwarf_cmd)
self.assertExists('a.out.wasm.debug.wasm')
else:
self.expect_fail(separate_dwarf_cmd)
def test_emar_M(self):
create_test_file('file1', ' ')
create_test_file('file2', ' ')
self.run_process([EMAR, 'cr', 'file1.a', 'file1'])
self.run_process([EMAR, 'cr', 'file2.a', 'file2'])
self.run_process([EMAR, '-M'], input='''create combined.a
addlib file1.a
addlib file2.a
save
end
''')
result = self.run_process([EMAR, 't', 'combined.a'], stdout=PIPE).stdout
self.assertContained('file1', result)
self.assertContained('file2', result)
def test_emar_duplicate_inputs(self):
# Verify the we can supply the same intput muliple times without
# confusing emar.py:
# See https://github.com/emscripten-core/emscripten/issues/9733
create_test_file('file1', ' ')
self.run_process([EMAR, 'cr', 'file1.a', 'file1', 'file1'])
# Temporarily disabled to allow this llvm change to roll
# https://reviews.llvm.org/D69665
@no_windows('Temporarily disabled under windows')
def test_emar_response_file(self):
# Test that special character such as single quotes in filenames survive being
# sent via response file
create_test_file("file'1", ' ')
create_test_file("file'2", ' ')
building.emar('cr', 'libfoo.a', ("file'1", "file'2"))
def test_archive_empty(self):
# This test added because we had an issue with the AUTO_ARCHIVE_INDEXES failing on empty
# archives (which inherently don't have indexes).
self.run_process([EMAR, 'crS', 'libfoo.a'])
self.run_process([EMCC, '-Werror', 'libfoo.a', path_from_root('tests', 'hello_world.c')])
def test_archive_no_index(self):
create_test_file('foo.c', 'int foo = 1;')
self.run_process([EMCC, '-c', 'foo.c'])
self.run_process([EMCC, '-c', path_from_root('tests', 'hello_world.c')])
# The `S` flag means don't add an archive index
self.run_process([EMAR, 'crS', 'libfoo.a', 'foo.o'])
# The llvm backend (link GNU ld and lld) doesn't support linking archives with no index.
# However we have logic that will automatically add indexes (unless running with
# NO_AUTO_ARCHIVE_INDEXES).
stderr = self.expect_fail([EMCC, '-s', 'NO_AUTO_ARCHIVE_INDEXES', 'libfoo.a', 'hello_world.o'])
self.assertContained('libfoo.a: archive has no index; run ranlib to add one', stderr)
# The default behavior is to add archive indexes automatically.
self.run_process([EMCC, 'libfoo.a', 'hello_world.o'])
def test_archive_non_objects(self):
create_test_file('file.txt', 'test file')
self.run_process([EMCC, '-c', path_from_root('tests', 'hello_world.c')])
# No index added.
# --format=darwin (the default on OSX has a strange issue where it add extra
# newlines to files: https://bugs.llvm.org/show_bug.cgi?id=42562
self.run_process([EMAR, 'crS', '--format=gnu', 'libfoo.a', 'file.txt', 'hello_world.o'])
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), 'libfoo.a'])
def test_flag_aliases(self):
def assert_aliases_match(flag1, flag2, flagarg, extra_args=[]):
results = {}
for f in (flag1, flag2):
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', f + '=' + flagarg] + extra_args)
with open('a.out.js') as out:
results[f + '.js'] = out.read()
with open('a.out.wasm', 'rb') as out:
results[f + '.wasm'] = out.read()
self.assertEqual(results[flag1 + '.js'], results[flag2 + '.js'], 'js results should be identical')
self.assertEqual(results[flag1 + '.wasm'], results[flag2 + '.wasm'], 'wasm results should be identical')
assert_aliases_match('INITIAL_MEMORY', 'TOTAL_MEMORY', '16777216')
assert_aliases_match('INITIAL_MEMORY', 'TOTAL_MEMORY', '64MB')
assert_aliases_match('MAXIMUM_MEMORY', 'WASM_MEM_MAX', '16777216', ['-s', 'ALLOW_MEMORY_GROWTH'])
assert_aliases_match('MAXIMUM_MEMORY', 'BINARYEN_MEM_MAX', '16777216', ['-s', 'ALLOW_MEMORY_GROWTH'])
def test_IGNORE_CLOSURE_COMPILER_ERRORS(self):
create_test_file('pre.js', r'''
// make closure compiler very very angry
var dupe = 1;
var dupe = 2;
function Node() {
throw 'Node is a DOM thing too, and use the ' + dupe;
}
function Node() {
throw '(duplicate) Node is a DOM thing too, and also use the ' + dupe;
}
''')
def test(check, extra=[]):
cmd = [EMCC, path_from_root('tests', 'hello_world.c'), '-O2', '--closure', '1', '--pre-js', 'pre.js'] + extra
proc = self.run_process(cmd, check=check, stderr=PIPE)
if not check:
self.assertNotEqual(proc.returncode, 0)
return proc
WARNING = 'Variable dupe declared more than once'
proc = test(check=False)
self.assertContained(WARNING, proc.stderr)
proc = test(check=True, extra=['-s', 'IGNORE_CLOSURE_COMPILER_ERRORS=1'])
self.assertNotContained(WARNING, proc.stderr)
def test_closure_full_js_library(self):
# test for closure errors in the entire JS library
# We must ignore various types of errors that are expected in this situation, as we
# are including a lot of JS without corresponding compiled code for it. This still
# lets us catch all other errors.
with env_modify({'EMCC_CLOSURE_ARGS': '--jscomp_off undefinedVars'}):
# USE_WEBGPU is specified here to make sure that it's closure-safe.
# It can be removed if USE_WEBGPU is later included in INCLUDE_FULL_LIBRARY.
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-O1', '--closure', '1', '-g1', '-s', 'INCLUDE_FULL_LIBRARY=1', '-s', 'USE_WEBGPU=1', '-s', 'ERROR_ON_UNDEFINED_SYMBOLS=0'])
# Tests --closure-args command line flag
def test_closure_externs(self):
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '--closure', '1', '--pre-js', path_from_root('tests', 'test_closure_externs_pre_js.js'), '--closure-args', '--externs "' + path_from_root('tests', 'test_closure_externs.js') + '"'])
def test_toolchain_profiler(self):
environ = os.environ.copy()
environ['EM_PROFILE_TOOLCHAIN'] = '1'
# replaced subprocess functions should not cause errors
self.run_process([EMCC, path_from_root('tests', 'hello_world.c')], env=environ)
def test_noderawfs(self):
fopen_write = open(path_from_root('tests', 'asmfs', 'fopen_write.cpp')).read()
create_test_file('main.cpp', fopen_write)
self.run_process([EMCC, 'main.cpp', '-s', 'NODERAWFS=1'])
self.assertContained("read 11 bytes. Result: Hello data!", self.run_js('a.out.js'))
# NODERAWFS should directly write on OS file system
self.assertEqual("Hello data!", open('hello_file.txt').read())
def test_noderawfs_disables_embedding(self):
expected = '--preload-file and --embed-file cannot be used with NODERAWFS which disables virtual filesystem'
base = [EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'NODERAWFS=1']
create_test_file('somefile', 'foo')
err = self.expect_fail(base + ['--preload-file', 'somefile'])
self.assertContained(expected, err)
err = self.expect_fail(base + ['--embed-file', 'somefile'])
self.assertContained(expected, err)
def test_node_code_caching(self):
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'),
'-s', 'NODE_CODE_CACHING',
'-s', 'WASM_ASYNC_COMPILATION=0'])
def get_cached():
cached = glob.glob('a.out.wasm.*.cached')
if not cached:
return None
self.assertEqual(len(cached), 1)
return cached[0]
# running the program makes it cache the code
self.assertFalse(get_cached())
self.assertEqual('hello, world!', self.run_js('a.out.js').strip())
self.assertTrue(get_cached(), 'should be a cache file')
# hard to test it actually uses it to speed itself up, but test that it
# does try to deserialize it at least
with open(get_cached(), 'w') as f:
f.write('waka waka')
ERROR = 'NODE_CODE_CACHING: failed to deserialize, bad cache file?'
self.assertContained(ERROR, self.run_js('a.out.js'))
# we cached proper code after showing that error
with open(get_cached(), 'rb') as f:
self.assertEqual(f.read().count(b'waka'), 0)
self.assertNotContained(ERROR, self.run_js('a.out.js'))
def test_autotools_shared_check(self):
env = os.environ.copy()
env['LC_ALL'] = 'C'
expected = ': supported targets:.* elf'
out = self.run_process([EMCC, '--help'], stdout=PIPE, env=env).stdout
assert re.search(expected, out)
def test_ioctl_window_size(self):
self.do_other_test('test_ioctl_window_size.cpp')
def test_fd_closed(self):
self.do_other_test('test_fd_closed.cpp')
def test_fflush(self):
# fflush without the full filesystem won't quite work
self.do_other_test('test_fflush.cpp')
def test_fflush_fs(self):
# fflush with the full filesystem will flush from libc, but not the JS logging, which awaits a newline
self.do_other_test('test_fflush_fs.cpp', emcc_args=['-s', 'FORCE_FILESYSTEM=1'])
def test_fflush_fs_exit(self):
# on exit, we can send out a newline as no more code will run
self.do_other_test('test_fflush_fs_exit.cpp', emcc_args=['-s', 'FORCE_FILESYSTEM=1', '-s', 'EXIT_RUNTIME=1'])
def test_extern_weak(self):
self.do_other_test('test_extern_weak.c')
@disabled('https://github.com/emscripten-core/emscripten/issues/12819')
def test_extern_weak_dynamic(self):
self.do_other_test('test_extern_weak.c', emcc_args=['-s', 'MAIN_MODULE=2'])
def test_main_module_without_main(self):
create_test_file('pre.js', r'''
var Module = {
onRuntimeInitialized: function() {
Module._foo();
}
};
''')
create_test_file('src.c', r'''
#include <emscripten.h>
EMSCRIPTEN_KEEPALIVE void foo() {
EM_ASM({ console.log("bar") });
}
''')
self.run_process([EMCC, 'src.c', '--pre-js', 'pre.js', '-s', 'MAIN_MODULE=2'])
self.assertContained('bar', self.run_js('a.out.js'))
def test_js_optimizer_parse_error(self):
# check we show a proper understandable error for JS parse problems
create_test_file('src.cpp', r'''
#include <emscripten.h>
int main() {
EM_ASM({
var x = !<->5.; // wtf
});
}
''')
stderr = self.expect_fail([EMCC, 'src.cpp', '-O2'])
# wasm backend output doesn't have spaces in the EM_ASM function bodies
self.assertContained(('''
var ASM_CONSTS = [function() { var x = !<->5.; }];
^
''', '''
1024: function() {var x = !<->5.;}
^
'''), stderr)
def test_js_optimizer_chunk_size_determinism(self):
def build():
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-O3', '-s', 'WASM=0'])
with open('a.out.js') as f:
# FIXME: newline differences can exist, ignore for now
return f.read().replace('\n', '')
normal = build()
with env_modify({
'EMCC_JSOPT_MIN_CHUNK_SIZE': '1',
'EMCC_JSOPT_MAX_CHUNK_SIZE': '1'
}):
tiny = build()
with env_modify({
'EMCC_JSOPT_MIN_CHUNK_SIZE': '4294967296',
'EMCC_JSOPT_MAX_CHUNK_SIZE': '4294967296'
}):
huge = build()
self.assertIdentical(normal, tiny)
self.assertIdentical(normal, huge)
@parameterized({
'': ([],), # noqa
'O3': (['-O3'],), # noqa
'closure': (['--closure', '1'],), # noqa
'closure_O3': (['--closure', '1', '-O3'],), # noqa
})
def test_EM_ASM_ES6(self, args):
create_test_file('src.cpp', r'''
#include <emscripten.h>
int main() {
EM_ASM({
let x = (a, b) => 5; // valid ES6
async function y() {} // valid ES2017
out('hello!');
return x;
});
}
''')
self.run_process([EMCC, 'src.cpp'] + args)
self.assertContained('hello!', self.run_js('a.out.js'))
def test_check_sourcemapurl(self):
if not self.is_wasm():
self.skipTest('only supported with wasm')
self.run_process([EMCC, path_from_root('tests', 'hello_123.c'), '-g4', '-o', 'a.js', '--source-map-base', 'dir/'])
output = open('a.wasm', 'rb').read()
# has sourceMappingURL section content and points to 'dir/a.wasm.map' file
source_mapping_url_content = webassembly.toLEB(len('sourceMappingURL')) + b'sourceMappingURL' + webassembly.toLEB(len('dir/a.wasm.map')) + b'dir/a.wasm.map'
self.assertEqual(output.count(source_mapping_url_content), 1)
# make sure no DWARF debug info sections remain - they would just waste space
self.assertNotIn(b'.debug_', output)
def test_check_source_map_args(self):
# -g4 is needed for source maps; -g is not enough
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-g'])
self.assertNotExists('a.out.wasm.map')
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-g4'])
self.assertExists('a.out.wasm.map')
@parameterized({
'normal': [],
'profiling': ['--profiling'] # -g4 --profiling should still emit a source map; see #8584
})
def test_check_sourcemapurl_default(self, *args):
print(args)
if not self.is_wasm():
self.skipTest('only supported with wasm')
try_delete('a.wasm.map')
self.run_process([EMCC, path_from_root('tests', 'hello_123.c'), '-g4', '-o', 'a.js'] + list(args))
output = open('a.wasm', 'rb').read()
# has sourceMappingURL section content and points to 'a.wasm.map' file
source_mapping_url_content = webassembly.toLEB(len('sourceMappingURL')) + b'sourceMappingURL' + webassembly.toLEB(len('a.wasm.map')) + b'a.wasm.map'
self.assertIn(source_mapping_url_content, output)
def test_wasm_sourcemap(self):
# The no_main.c will be read (from relative location) due to speficied "-s"
shutil.copyfile(path_from_root('tests', 'other', 'wasm_sourcemap', 'no_main.c'), 'no_main.c')
wasm_map_cmd = [PYTHON, path_from_root('tools', 'wasm-sourcemap.py'),
'--sources', '--prefix', '=wasm-src://',
'--load-prefix', '/emscripten/tests/other/wasm_sourcemap=.',
'--dwarfdump-output',
path_from_root('tests', 'other', 'wasm_sourcemap', 'foo.wasm.dump'),
'-o', 'a.out.wasm.map',
path_from_root('tests', 'other', 'wasm_sourcemap', 'foo.wasm'),
'--basepath=' + os.getcwd()]
self.run_process(wasm_map_cmd)
output = open('a.out.wasm.map').read()
# has "sources" entry with file (includes also `--prefix =wasm-src:///` replacement)
self.assertIn('wasm-src:///emscripten/tests/other/wasm_sourcemap/no_main.c', output)
# has "sourcesContent" entry with source code (included with `-s` option)
self.assertIn('int foo()', output)
# has some entries
self.assertRegexpMatches(output, r'"mappings":\s*"[A-Za-z0-9+/]')
def test_wasm_sourcemap_dead(self):
wasm_map_cmd = [PYTHON, path_from_root('tools', 'wasm-sourcemap.py'),
'--dwarfdump-output',
path_from_root('tests', 'other', 'wasm_sourcemap_dead', 't.wasm.dump'),
'-o', 'a.out.wasm.map',
path_from_root('tests', 'other', 'wasm_sourcemap_dead', 't.wasm'),
'--basepath=' + os.getcwd()]
self.run_process(wasm_map_cmd, stdout=PIPE, stderr=PIPE)
output = open('a.out.wasm.map').read()
# has only two entries
self.assertRegexpMatches(output, r'"mappings":\s*"[A-Za-z0-9+/]+,[A-Za-z0-9+/]+"')
def test_wasm_sourcemap_relative_paths(self):
def test(infile, source_map_added_dir=''):
expected_source_map_path = 'a.cpp'
if source_map_added_dir:
expected_source_map_path = source_map_added_dir + '/' + expected_source_map_path
print(infile, expected_source_map_path)
shutil.copyfile(path_from_root('tests', 'hello_123.c'), infile)
infiles = [
infile,
os.path.abspath(infile),
'./' + infile
]
for curr in infiles:
print(' ', curr)
self.run_process([EMCC, curr, '-g4'])
with open('a.out.wasm.map', 'r') as f:
self.assertIn('"%s"' % expected_source_map_path, str(f.read()))
test('a.cpp')
ensure_dir('inner')
test('inner/a.cpp', 'inner')
def test_separate_dwarf(self):
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-g'])
self.assertExists('a.out.wasm')
self.assertNotExists('a.out.wasm.debug.wasm')
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-gseparate-dwarf'])
self.assertExists('a.out.wasm')
self.assertExists('a.out.wasm.debug.wasm')
self.assertLess(os.path.getsize('a.out.wasm'), os.path.getsize('a.out.wasm.debug.wasm'))
# the special section should also exist, that refers to the side debug file
with open('a.out.wasm', 'rb') as f:
wasm = f.read()
self.assertIn(b'external_debug_info', wasm)
self.assertIn(b'a.out.wasm.debug.wasm', wasm)
# building to a subdirectory should still leave a relative path, which
# assumes the debug file is alongside the main one
os.mkdir('subdir')
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'),
'-gseparate-dwarf',
'-o', os.path.join('subdir', 'output.js')])
with open(os.path.join('subdir', 'output.wasm'), 'rb') as f:
wasm = f.read()
self.assertIn(b'output.wasm.debug.wasm', wasm)
# check both unix-style slashes and the system's slashes, so that we don't
# assume the encoding of the section in this test
self.assertNotIn(b'subdir/output.wasm.debug.wasm', wasm)
self.assertNotIn(bytes(os.path.join('subdir', 'output.wasm.debug.wasm'), 'ascii'), wasm)
def test_separate_dwarf_with_filename(self):
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-gseparate-dwarf=with_dwarf.wasm'])
self.assertNotExists('a.out.wasm.debug.wasm')
self.assertExists('with_dwarf.wasm')
# the correct notation is to have exactly one '=' and in the right place
for invalid in ('-gseparate-dwarf=x=', '-gseparate-dwarfy=', '-gseparate-dwarf-hmm'):
stderr = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), invalid])
self.assertContained('invalid -gseparate-dwarf=FILENAME notation', stderr)
# building to a subdirectory, but with the debug file in another place,
# should leave a relative path to the debug wasm
os.mkdir('subdir')
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'),
'-o', os.path.join('subdir', 'output.js'),
'-gseparate-dwarf=with_dwarf2.wasm'])
self.assertExists('with_dwarf2.wasm')
with open(os.path.join('subdir', 'output.wasm'), 'rb') as f:
wasm = f.read()
self.assertIn(b'../with_dwarf2.wasm', wasm)
def test_separate_dwarf_with_filename_and_path(self):
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-gseparate-dwarf=with_dwarf.wasm'])
with open('a.out.wasm', 'rb') as f:
self.assertIn(b'with_dwarf.wasm', f.read())
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-gseparate-dwarf=with_dwarf.wasm',
'-s', 'SEPARATE_DWARF_URL=http://somewhere.com/hosted.wasm'])
with open('a.out.wasm', 'rb') as f:
self.assertIn(b'somewhere.com/hosted.wasm', f.read())
@parameterized({
'O0': (['-O0'],),
'O1': (['-O1'],),
'O2': (['-O2'],),
})
def test_wasm_producers_section(self, args):
self.run_process([EMCC, path_from_root('tests', 'hello_world.c')] + args)
with open('a.out.wasm', 'rb') as f:
data = f.read()
# if there is no producers section expected by default, verify that, and
# see that the flag works to add it.
self.assertNotIn('clang', str(data))
size = os.path.getsize('a.out.wasm')
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'EMIT_PRODUCERS_SECTION=1'] + args)
with open('a.out.wasm', 'rb') as f:
self.assertIn('clang', str(f.read()))
size_with_section = os.path.getsize('a.out.wasm')
self.assertLess(size, size_with_section)
def test_html_preprocess(self):
test_file = path_from_root('tests', 'module', 'test_stdin.c')
output_file = 'test_stdin.html'
shell_file = path_from_root('tests', 'module', 'test_html_preprocess.html')
self.run_process([EMCC, '-o', output_file, test_file, '--shell-file', shell_file, '-s', 'ASSERTIONS=0'], stdout=PIPE, stderr=PIPE)
output = open(output_file).read()
self.assertContained("""<style>
/* Disable preprocessing inside style block as syntax is ambiguous with CSS */
#include {background-color: black;}
#if { background-color: red;}
#else {background-color: blue;}
#endif {background-color: green;}
#xxx {background-color: purple;}
</style>
T1:(else) ASSERTIONS != 1
T2:ASSERTIONS != 1
T3:ASSERTIONS < 2
T4:(else) ASSERTIONS <= 1
T5:(else) ASSERTIONS
T6:!ASSERTIONS""", output)
self.run_process([EMCC, '-o', output_file, test_file, '--shell-file', shell_file, '-s', 'ASSERTIONS=1'], stdout=PIPE, stderr=PIPE)
output = open(output_file).read()
self.assertContained("""<style>
/* Disable preprocessing inside style block as syntax is ambiguous with CSS */
#include {background-color: black;}
#if { background-color: red;}
#else {background-color: blue;}
#endif {background-color: green;}
#xxx {background-color: purple;}
</style>
T1:ASSERTIONS == 1
T2:(else) ASSERTIONS == 1
T3:ASSERTIONS < 2
T4:(else) ASSERTIONS <= 1
T5:ASSERTIONS
T6:(else) !ASSERTIONS""", output)
self.run_process([EMCC, '-o', output_file, test_file, '--shell-file', shell_file, '-s', 'ASSERTIONS=2'], stdout=PIPE, stderr=PIPE)
output = open(output_file).read()
self.assertContained("""<style>
/* Disable preprocessing inside style block as syntax is ambiguous with CSS */
#include {background-color: black;}
#if { background-color: red;}
#else {background-color: blue;}
#endif {background-color: green;}
#xxx {background-color: purple;}
</style>
T1:(else) ASSERTIONS != 1
T2:ASSERTIONS != 1
T3:(else) ASSERTIONS >= 2
T4:ASSERTIONS > 1
T5:ASSERTIONS
T6:(else) !ASSERTIONS""", output)
# Tests that Emscripten-compiled applications can be run from a relative path with node command line that is different than the current working directory.
def test_node_js_run_from_different_directory(self):
ensure_dir('subdir')
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-o', os.path.join('subdir', 'a.js'), '-O3'])
ret = self.run_process(config.NODE_JS + [os.path.join('subdir', 'a.js')], stdout=PIPE).stdout
self.assertContained('hello, world!', ret)
# Tests that a pthreads + modularize build can be run in node js
def test_node_js_pthread_module(self):
# create module loader script
moduleLoader = 'moduleLoader.js'
moduleLoaderContents = '''
const test_module = require("./module");
test_module().then((test_module_instance) => {
test_module_instance._main();
process.exit(0);
});
'''
ensure_dir('subdir')
create_test_file(os.path.join('subdir', moduleLoader), moduleLoaderContents)
# build hello_world.c
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-o', os.path.join('subdir', 'module.js'), '-s', 'USE_PTHREADS=1', '-s', 'PTHREAD_POOL_SIZE=2', '-s', 'MODULARIZE=1', '-s', 'EXPORT_NAME=test_module', '-s', 'ENVIRONMENT=worker,node'])
# run the module
ret = self.run_process(config.NODE_JS + ['--experimental-wasm-threads'] + [os.path.join('subdir', moduleLoader)], stdout=PIPE).stdout
self.assertContained('hello, world!', ret)
@no_windows('node system() does not seem to work, see https://github.com/emscripten-core/emscripten/pull/10547')
def test_node_js_system(self):
self.run_process([EMCC, '-DENV_NODE', path_from_root('tests', 'system.c'), '-o', 'a.js', '-O3'])
ret = self.run_process(config.NODE_JS + ['a.js'], stdout=PIPE).stdout
self.assertContained('OK', ret)
def test_is_bitcode(self):
fname = 'tmp.o'
with open(fname, 'wb') as f:
f.write(b'foo')
self.assertFalse(building.is_bitcode(fname))
with open(fname, 'wb') as f:
f.write(b'\xDE\xC0\x17\x0B')
f.write(16 * b'\x00')
f.write(b'BC')
self.assertTrue(building.is_bitcode(fname))
with open(fname, 'wb') as f:
f.write(b'BC')
self.assertTrue(building.is_bitcode(fname))
def test_is_ar(self):
fname = 'tmp.a'
with open(fname, 'wb') as f:
f.write(b'foo')
self.assertFalse(building.is_ar(fname))
with open(fname, 'wb') as f:
f.write(b'!<arch>\n')
self.assertTrue(building.is_ar(fname))
def test_emcc_parsing(self):
create_test_file('src.c', r'''
#include <stdio.h>
void a() { printf("a\n"); }
void b() { printf("b\n"); }
void c() { printf("c\n"); }
void d() { printf("d\n"); }
''')
create_test_file('response', r'''[
"_a",
"_b",
"_c",
"_d"
]
''')
for export_arg, expected in [
# extra space at end - should be ignored
("EXPORTED_FUNCTIONS=['_a', '_b', '_c', '_d' ]", ''),
# extra newline in response file - should be ignored
("EXPORTED_FUNCTIONS=@response", ''),
# stray slash
("EXPORTED_FUNCTIONS=['_a', '_b', \\'_c', '_d']", '''undefined exported symbol: "\\\\'_c'"'''),
# stray slash
("EXPORTED_FUNCTIONS=['_a', '_b',\\ '_c', '_d']", '''undefined exported symbol: "\\\\ '_c'"'''),
# stray slash
('EXPORTED_FUNCTIONS=["_a", "_b", \\"_c", "_d"]', 'undefined exported symbol: "\\\\"_c""'),
# stray slash
('EXPORTED_FUNCTIONS=["_a", "_b",\\ "_c", "_d"]', 'undefined exported symbol: "\\\\ "_c"'),
# missing comma
('EXPORTED_FUNCTIONS=["_a", "_b" "_c", "_d"]', 'undefined exported symbol: "_b" "_c"'),
]:
print(export_arg)
proc = self.run_process([EMCC, 'src.c', '-s', export_arg], stdout=PIPE, stderr=PIPE, check=not expected)
print(proc.stderr)
if not expected:
self.assertFalse(proc.stderr)
else:
self.assertNotEqual(proc.returncode, 0)
self.assertContained(expected, proc.stderr)
def test_asyncify_escaping(self):
proc = self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'ASYNCIFY=1', '-s', "ASYNCIFY_ONLY=[DOS_ReadFile(unsigned short, unsigned char*, unsigned short*, bool)]"], stdout=PIPE, stderr=PIPE)
self.assertContained('emcc: ASYNCIFY list contains an item without balanced parentheses', proc.stderr)
self.assertContained(' DOS_ReadFile(unsigned short', proc.stderr)
self.assertContained('Try to quote the entire argument', proc.stderr)
def test_asyncify_response_file(self):
return self.skipTest(' TODO remove the support for multiple binaryen versions warning output ("function name" vs "pattern" etc).')
create_test_file('a.txt', r'''[
"DOS_ReadFile(unsigned short, unsigned char*, unsigned short*, bool)"
]
''')
proc = self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'ASYNCIFY=1', '-s', "[email protected]"], stdout=PIPE, stderr=PIPE)
# we should parse the response file properly, and then issue a proper warning for the missing function
self.assertContained(
'Asyncify onlylist contained a non-matching pattern: DOS_ReadFile(unsigned short, unsigned char*, unsigned short*, bool)',
proc.stderr)
def test_asyncify_advise(self):
src = path_from_root('tests', 'other', 'asyncify_advise.c')
self.set_setting('ASYNCIFY', 1)
self.set_setting('ASYNCIFY_ADVISE', 1)
self.set_setting('ASYNCIFY_IMPORTS', ['async_func'])
out = self.run_process([EMCC, src, '-o', 'asyncify_advise.js'] + self.get_emcc_args(), stdout=PIPE).stdout
self.assertContained('[asyncify] main can', out)
self.assertContained('[asyncify] a can', out)
self.assertContained('[asyncify] c can', out)
self.assertContained('[asyncify] e can', out)
self.assertContained('[asyncify] g can', out)
self.assertContained('[asyncify] i can', out)
self.set_setting('ASYNCIFY_REMOVE', ['e'])
out = self.run_process([EMCC, src, '-o', 'asyncify_advise.js'] + self.get_emcc_args(), stdout=PIPE).stdout
self.assertContained('[asyncify] main can', out)
self.assertNotContained('[asyncify] a can', out)
self.assertNotContained('[asyncify] c can', out)
self.assertNotContained('[asyncify] e can', out)
self.assertContained('[asyncify] g can', out)
self.assertContained('[asyncify] i can', out)
# Sockets and networking
def test_inet(self):
self.do_runf(path_from_root('tests', 'sha1.c'), 'SHA1=15dd99a1991e0b3826fede3deffc1feba42278e6')
src = r'''
#include <stdio.h>
#include <arpa/inet.h>
int main() {
printf("*%x,%x,%x,%x,%x,%x*\n", htonl(0xa1b2c3d4), htonl(0xfe3572e0), htonl(0x07abcdf0), htons(0xabcd), ntohl(0x43211234), ntohs(0xbeaf));
in_addr_t i = inet_addr("190.180.10.78");
printf("%x\n", i);
return 0;
}
'''
self.do_run(src, '*d4c3b2a1,e07235fe,f0cdab07,cdab,34122143,afbe*\n4e0ab4be\n')
def test_inet2(self):
src = r'''
#include <stdio.h>
#include <arpa/inet.h>
int main() {
struct in_addr x, x2;
int *y = (int*)&x;
*y = 0x12345678;
printf("%s\n", inet_ntoa(x));
int r = inet_aton(inet_ntoa(x), &x2);
printf("%s\n", inet_ntoa(x2));
return 0;
}
'''
self.do_run(src, '120.86.52.18\n120.86.52.18\n')
def test_inet3(self):
src = r'''
#include <stdio.h>
#include <arpa/inet.h>
#include <sys/socket.h>
int main() {
char dst[64];
struct in_addr x, x2;
int *y = (int*)&x;
*y = 0x12345678;
printf("%s\n", inet_ntop(AF_INET,&x,dst,sizeof dst));
int r = inet_aton(inet_ntoa(x), &x2);
printf("%s\n", inet_ntop(AF_INET,&x2,dst,sizeof dst));
return 0;
}
'''
self.do_run(src, '120.86.52.18\n120.86.52.18\n')
def test_inet4(self):
src = r'''
#include <stdio.h>
#include <arpa/inet.h>
#include <sys/socket.h>
void test(const char *test_addr, bool first=true){
char str[40];
struct in6_addr addr;
unsigned char *p = (unsigned char*)&addr;
int ret;
ret = inet_pton(AF_INET6,test_addr,&addr);
if(ret == -1) return;
if(ret == 0) return;
if(inet_ntop(AF_INET6,&addr,str,sizeof(str)) == NULL ) return;
printf("%02x%02x:%02x%02x:%02x%02x:%02x%02x:%02x%02x:%02x%02x:%02x%02x:%02x%02x - %s\n",
p[0],p[1],p[2],p[3],p[4],p[5],p[6],p[7],p[8],p[9],p[10],p[11],p[12],p[13],p[14],p[15],str);
if (first) test(str, false); // check again, on our output
}
int main(){
test("::");
test("::1");
test("::1.2.3.4");
test("::17.18.19.20");
test("::ffff:1.2.3.4");
test("1::ffff");
test("::255.255.255.255");
test("0:ff00:1::");
test("0:ff::");
test("abcd::");
test("ffff::a");
test("ffff::a:b");
test("ffff::a:b:c");
test("ffff::a:b:c:d");
test("ffff::a:b:c:d:e");
test("::1:2:0:0:0");
test("0:0:1:2:3::");
test("ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff");
test("1::255.255.255.255");
//below should fail and not produce results..
test("1.2.3.4");
test("");
test("-");
printf("ok.\n");
}
'''
self.do_run(src, r'''0000:0000:0000:0000:0000:0000:0000:0000 - ::
0000:0000:0000:0000:0000:0000:0000:0000 - ::
0000:0000:0000:0000:0000:0000:0000:0001 - ::1
0000:0000:0000:0000:0000:0000:0000:0001 - ::1
0000:0000:0000:0000:0000:0000:0102:0304 - ::102:304
0000:0000:0000:0000:0000:0000:0102:0304 - ::102:304
0000:0000:0000:0000:0000:0000:1112:1314 - ::1112:1314
0000:0000:0000:0000:0000:0000:1112:1314 - ::1112:1314
0000:0000:0000:0000:0000:ffff:0102:0304 - ::ffff:1.2.3.4
0000:0000:0000:0000:0000:ffff:0102:0304 - ::ffff:1.2.3.4
0001:0000:0000:0000:0000:0000:0000:ffff - 1::ffff
0001:0000:0000:0000:0000:0000:0000:ffff - 1::ffff
0000:0000:0000:0000:0000:0000:ffff:ffff - ::ffff:ffff
0000:0000:0000:0000:0000:0000:ffff:ffff - ::ffff:ffff
0000:ff00:0001:0000:0000:0000:0000:0000 - 0:ff00:1::
0000:ff00:0001:0000:0000:0000:0000:0000 - 0:ff00:1::
0000:00ff:0000:0000:0000:0000:0000:0000 - 0:ff::
0000:00ff:0000:0000:0000:0000:0000:0000 - 0:ff::
abcd:0000:0000:0000:0000:0000:0000:0000 - abcd::
abcd:0000:0000:0000:0000:0000:0000:0000 - abcd::
ffff:0000:0000:0000:0000:0000:0000:000a - ffff::a
ffff:0000:0000:0000:0000:0000:0000:000a - ffff::a
ffff:0000:0000:0000:0000:0000:000a:000b - ffff::a:b
ffff:0000:0000:0000:0000:0000:000a:000b - ffff::a:b
ffff:0000:0000:0000:0000:000a:000b:000c - ffff::a:b:c
ffff:0000:0000:0000:0000:000a:000b:000c - ffff::a:b:c
ffff:0000:0000:0000:000a:000b:000c:000d - ffff::a:b:c:d
ffff:0000:0000:0000:000a:000b:000c:000d - ffff::a:b:c:d
ffff:0000:0000:000a:000b:000c:000d:000e - ffff::a:b:c:d:e
ffff:0000:0000:000a:000b:000c:000d:000e - ffff::a:b:c:d:e
0000:0000:0000:0001:0002:0000:0000:0000 - ::1:2:0:0:0
0000:0000:0000:0001:0002:0000:0000:0000 - ::1:2:0:0:0
0000:0000:0001:0002:0003:0000:0000:0000 - 0:0:1:2:3::
0000:0000:0001:0002:0003:0000:0000:0000 - 0:0:1:2:3::
ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff - ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff
ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff - ffff:ffff:ffff:ffff:ffff:ffff:ffff:ffff
0001:0000:0000:0000:0000:0000:ffff:ffff - 1::ffff:ffff
0001:0000:0000:0000:0000:0000:ffff:ffff - 1::ffff:ffff
ok.
''')
def test_getsockname_unconnected_socket(self):
self.do_run(r'''
#include <sys/socket.h>
#include <stdio.h>
#include <assert.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/inet.h>
#include <string.h>
int main() {
int fd;
int z;
fd = socket(PF_INET, SOCK_STREAM, IPPROTO_TCP);
struct sockaddr_in adr_inet;
socklen_t len_inet = sizeof adr_inet;
z = getsockname(fd, (struct sockaddr *)&adr_inet, &len_inet);
if (z != 0) {
perror("getsockname error");
return 1;
}
char buffer[1000];
sprintf(buffer, "%s:%u", inet_ntoa(adr_inet.sin_addr), (unsigned)ntohs(adr_inet.sin_port));
const char *correct = "0.0.0.0:0";
printf("got (expected) socket: %s (%s), size %lu (%lu)\n", buffer, correct, strlen(buffer), strlen(correct));
assert(strlen(buffer) == strlen(correct));
assert(strcmp(buffer, correct) == 0);
puts("success.");
}
''', 'success.')
def test_getpeername_unconnected_socket(self):
self.do_run(r'''
#include <sys/socket.h>
#include <stdio.h>
#include <assert.h>
#include <sys/socket.h>
#include <netinet/in.h>
#include <arpa/inet.h>
#include <string.h>
int main() {
int fd;
int z;
fd = socket(PF_INET, SOCK_STREAM, IPPROTO_TCP);
struct sockaddr_in adr_inet;
socklen_t len_inet = sizeof adr_inet;
z = getpeername(fd, (struct sockaddr *)&adr_inet, &len_inet);
if (z != 0) {
perror("getpeername error");
return 1;
}
puts("unexpected success.");
}
''', 'getpeername error: Socket not connected', assert_returncode=NON_ZERO)
def test_getsockname_addrlen(self):
self.do_runf(path_from_root('tests', 'sockets', 'test_getsockname_addrlen.c'), 'success')
def test_getaddrinfo(self):
self.do_runf(path_from_root('tests', 'sockets', 'test_getaddrinfo.c'), 'success')
def test_getnameinfo(self):
self.do_runf(path_from_root('tests', 'sockets', 'test_getnameinfo.c'), 'success')
def test_gethostbyname(self):
self.do_runf(path_from_root('tests', 'sockets', 'test_gethostbyname.c'), 'success')
def test_getprotobyname(self):
self.do_runf(path_from_root('tests', 'sockets', 'test_getprotobyname.c'), 'success')
def test_socketpair(self):
self.do_run(r'''
#include <sys/socket.h>
#include <stdio.h>
int main() {
int fd[2];
int err;
err = socketpair(AF_INET, SOCK_STREAM, 0, fd);
if (err != 0) {
perror("socketpair error");
return 1;
}
puts("unexpected success.");
}
''', 'socketpair error: Function not implemented', assert_returncode=NON_ZERO)
def test_link(self):
self.do_run(r'''
#include <netdb.h>
#include <sys/types.h>
#include <sys/socket.h>
int main () {
void* thing = gethostbyname("bing.com");
ssize_t rval = recv (0, thing, 0, 0);
rval = send (0, thing, 0, 0);
return 0;
}''', '', force_c=True)
def test_linking_recv(self):
self.do_run(r'''
#include <sys/types.h>
#include <sys/socket.h>
int main(void) {
recv(0, 0, 0, 0);
return 0;
}
''', '', force_c=True)
def test_linking_send(self):
self.do_run(r'''
#include <sys/types.h>
#include <sys/socket.h>
int main(void) {
send(0, 0, 0, 0);
return 0;
}
''', '', force_c=True)
# This test verifies that function names embedded into the build with --js-library (JS functions imported to asm.js/wasm)
# are minified when -O3 is used
def test_js_function_names_are_minified(self):
def check_size(f, expected_size):
if not os.path.isfile(f):
return # Nonexistent file passes in this check
obtained_size = os.path.getsize(f)
print('size of generated ' + f + ': ' + str(obtained_size))
try_delete(f)
self.assertLess(obtained_size, expected_size)
self.run_process([PYTHON, path_from_root('tests', 'gen_many_js_functions.py'), 'library_long.js', 'main_long.c'])
for wasm in [[], ['-s', 'WASM=0']]:
# Currently we rely on Closure for full minification of every appearance of JS function names.
# TODO: Add minification also for non-Closure users and add [] to this list to test minification without Closure.
for closure in [['--closure', '1']]:
args = [EMCC, '-O3', '--js-library', 'library_long.js', 'main_long.c', '-o', 'a.html'] + wasm + closure
print(' '.join(args))
self.run_process(args)
ret = self.run_process(config.NODE_JS + ['a.js'], stdout=PIPE).stdout
self.assertTextDataIdentical('Sum of numbers from 1 to 1000: 500500 (expected 500500)', ret.strip())
check_size('a.js', 150000)
check_size('a.wasm', 80000)
# Checks that C++ exceptions managing invoke_*() wrappers will not be generated if exceptions are disabled
def test_no_invoke_functions_are_generated_if_exception_catching_is_disabled(self):
self.skipTest('Skipping other.test_no_invoke_functions_are_generated_if_exception_catching_is_disabled: Enable after new version of fastcomp has been tagged')
for args in [[], ['-s', 'WASM=0']]:
self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-s', 'DISABLE_EXCEPTION_CATCHING=1', '-o', 'a.html'] + args)
output = open('a.js').read()
self.assertContained('_main', output) # Smoke test that we actually compiled
self.assertNotContained('invoke_', output)
# Verifies that only the minimal needed set of invoke_*() functions will be generated when C++ exceptions are enabled
def test_no_excessive_invoke_functions_are_generated_when_exceptions_are_enabled(self):
self.skipTest('Skipping other.test_no_excessive_invoke_functions_are_generated_when_exceptions_are_enabled: Enable after new version of fastcomp has been tagged')
for args in [[], ['-s', 'WASM=0']]:
self.run_process([EMCC, path_from_root('tests', 'invoke_i.cpp'), '-s', 'DISABLE_EXCEPTION_CATCHING=0', '-o', 'a.html'] + args)
output = open('a.js').read()
self.assertContained('invoke_i', output)
self.assertNotContained('invoke_ii', output)
self.assertNotContained('invoke_v', output)
def test_emscripten_metadata(self):
self.run_process([EMCC, path_from_root('tests', 'hello_world.c')])
self.assertNotIn(b'emscripten_metadata', open('a.out.wasm', 'rb').read())
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'),
'-s', 'EMIT_EMSCRIPTEN_METADATA'])
self.assertIn(b'emscripten_metadata', open('a.out.wasm', 'rb').read())
# Test is standalone mode too.
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-o', 'out.wasm',
'-s', 'EMIT_EMSCRIPTEN_METADATA'])
self.assertIn(b'emscripten_metadata', open('out.wasm', 'rb').read())
# make sure wasm executes correctly
ret = self.run_process(config.NODE_JS + ['a.out.js'], stdout=PIPE).stdout
self.assertContained('hello, world!\n', ret)
@parameterized({
'O0': (False, ['-O0']), # noqa
'O0_emit': (True, ['-O0', '-s', 'EMIT_EMSCRIPTEN_LICENSE']), # noqa
'O2': (False, ['-O2']), # noqa
'O2_emit': (True, ['-O2', '-s', 'EMIT_EMSCRIPTEN_LICENSE']), # noqa
'O2_js_emit': (True, ['-O2', '-s', 'EMIT_EMSCRIPTEN_LICENSE', '-s', 'WASM=0']), # noqa
'O2_closure': (False, ['-O2', '--closure', '1']), # noqa
'O2_closure_emit': (True, ['-O2', '-s', 'EMIT_EMSCRIPTEN_LICENSE', '--closure', '1']), # noqa
'O2_closure_js_emit': (True, ['-O2', '-s', 'EMIT_EMSCRIPTEN_LICENSE', '--closure', '1', '-s', 'WASM=0']), # noqa
})
def test_emscripten_license(self, expect_license, args):
# fastcomp does not support the new license flag
self.run_process([EMCC, path_from_root('tests', 'hello_world.c')] + args)
with open('a.out.js') as f:
js = f.read()
licenses_found = len(re.findall('Copyright [0-9]* The Emscripten Authors', js))
if expect_license:
self.assertNotEqual(licenses_found, 0, 'Unable to find license block in output file!')
self.assertEqual(licenses_found, 1, 'Found too many license blocks in the output file!')
else:
self.assertEqual(licenses_found, 0, 'Found a license block in the output file, but it should not have been there!')
# This test verifies that the generated exports from asm.js/wasm module only reference the
# unminified exported name exactly once. (need to contain the export name once for unminified
# access from calling code, and should not have the unminified name exist more than once, that
# would be wasteful for size)
def test_function_exports_are_small(self):
def test(args, closure, opt):
extra_args = args + opt + closure
print(extra_args)
args = [EMCC, path_from_root('tests', 'long_function_name_in_export.c'), '-o', 'a.html', '-s', 'ENVIRONMENT=web', '-s', 'DECLARE_ASM_MODULE_EXPORTS=0', '-Werror'] + extra_args
self.run_process(args)
output = open('a.js', 'r').read()
try_delete('a.js')
self.assertNotContained('asm["_thisIsAFunctionExportedFromAsmJsOrWasmWithVeryLongFunction"]', output)
# TODO: Add stricter testing when Wasm side is also optimized: (currently Wasm does still need
# to reference exports multiple times)
if 'WASM=0' in args:
num_times_export_is_referenced = output.count('thisIsAFunctionExportedFromAsmJsOrWasmWithVeryLongFunction')
self.assertEqual(num_times_export_is_referenced, 1)
for closure in [[], ['--closure', '1']]:
for opt in [['-O2'], ['-O3'], ['-Os']]:
test(['-s', 'WASM=0'], closure, opt)
test(['-s', 'WASM_ASYNC_COMPILATION=0'], closure, opt)
# TODO: Debug why the code size is different on Windows and Mac. Also, for
# some unknown reason (at time of writing), this test is not skipped on the
# Windows and Mac autorollers, despite the bot being correctly configured to
# skip this test in all three platforms (Linux, Mac, and Windows).
# The no_windows/no_mac decorators also solve that problem.
@no_windows("Code size is slightly different on Windows")
@no_mac("Code size is slightly different on Mac")
@parameterized({
'hello_world_wasm': ('hello_world', False, True),
'hello_world_wasm2js': ('hello_world', True, True),
'random_printf_wasm': ('random_printf', False),
'random_printf_wasm2js': ('random_printf', True),
'hello_webgl_wasm': ('hello_webgl', False),
'hello_webgl_wasm2js': ('hello_webgl', True),
'hello_webgl2_wasm': ('hello_webgl2', False),
'hello_webgl2_wasm2js': ('hello_webgl2', True),
})
def test_minimal_runtime_code_size(self, test_name, js, compare_js_output=False):
smallest_code_size_args = ['-s', 'MINIMAL_RUNTIME=2',
'-s', 'ENVIRONMENT=web',
'-s', 'TEXTDECODER=2',
'-s', 'ABORTING_MALLOC=0',
'-s', 'ALLOW_MEMORY_GROWTH=0',
'-s', 'SUPPORT_ERRNO=0',
'-s', 'DECLARE_ASM_MODULE_EXPORTS=1',
'-s', 'MALLOC=emmalloc',
'-s', 'GL_EMULATE_GLES_VERSION_STRING_FORMAT=0',
'-s', 'GL_EXTENSIONS_IN_PREFIXED_FORMAT=0',
'-s', 'GL_SUPPORT_AUTOMATIC_ENABLE_EXTENSIONS=0',
'-s', 'GL_SUPPORT_SIMPLE_ENABLE_EXTENSIONS=0',
'-s', 'GL_TRACK_ERRORS=0',
'-s', 'GL_SUPPORT_EXPLICIT_SWAP_CONTROL=0',
'-s', 'GL_POOL_TEMP_BUFFERS=0',
'-s', 'MIN_CHROME_VERSION=58',
'-s', 'NO_FILESYSTEM=1',
'--output_eol', 'linux',
'-Oz',
'--closure', '1',
'-DNDEBUG',
'-ffast-math']
wasm2js = ['-s', 'WASM=0', '--memory-init-file', '1']
hello_world_sources = [path_from_root('tests', 'small_hello_world.c'),
'-s', 'RUNTIME_FUNCS_TO_IMPORT=[]',
'-s', 'USES_DYNAMIC_ALLOC=0']
random_printf_sources = [path_from_root('tests', 'hello_random_printf.c'),
'-s', 'RUNTIME_FUNCS_TO_IMPORT=[]',
'-s', 'USES_DYNAMIC_ALLOC=0',
'-s', 'SINGLE_FILE=1']
hello_webgl_sources = [path_from_root('tests', 'minimal_webgl', 'main.cpp'),
path_from_root('tests', 'minimal_webgl', 'webgl.c'),
'--js-library', path_from_root('tests', 'minimal_webgl', 'library_js.js'),
'-s', 'RUNTIME_FUNCS_TO_IMPORT=[]',
'-s', 'USES_DYNAMIC_ALLOC=1', '-lwebgl.js',
'-s', 'MODULARIZE=1']
hello_webgl2_sources = hello_webgl_sources + ['-s', 'MAX_WEBGL_VERSION=2']
sources = {
'hello_world': hello_world_sources,
'random_printf': random_printf_sources,
'hello_webgl': hello_webgl_sources,
'hello_webgl2': hello_webgl2_sources}[test_name]
def print_percent(actual, expected):
if actual == expected:
return ''
return ' ({:+.2f}%)'.format((actual - expected) * 100.0 / expected)
outputs = ['a.html', 'a.js']
args = smallest_code_size_args[:]
if js:
outputs += ['a.mem']
args += wasm2js
test_name += '_wasm2js'
else:
outputs += ['a.wasm']
test_name += '_wasm'
if 'SINGLE_FILE=1' in sources:
outputs = ['a.html']
results_file = path_from_root('tests', 'code_size', test_name + '.json')
expected_results = {}
try:
expected_results = json.loads(open(results_file, 'r').read())
except Exception:
if not os.environ.get('EMTEST_REBASELINE'):
raise
args = [EMCC, '-o', 'a.html'] + args + sources
print(shared.shlex_join(args))
self.run_process(args)
def get_file_gzipped_size(f):
f_gz = f + '.gz'
with gzip.open(f_gz, 'wb') as gzf:
gzf.write(open(f, 'rb').read())
size = os.path.getsize(f_gz)
try_delete(f_gz)
return size
# For certain tests, don't just check the output size but check
# the full JS output matches the expectations. That means that
# any change that touches those core lines of output will need
# to rebaseline this test. However:
# a) such changes deserve extra scrutiny
# b) such changes should be few and far between
# c) rebaselining is trivial (just run with EMTEST_REBASELINE=1)
# Note that we do not compare the full wasm output since that is
# even more fragile and can change with LLVM updates.
if compare_js_output:
js_out = path_from_root('tests', 'code_size', test_name + '.js')
terser = shared.get_npm_cmd('terser')
self.run_process(terser + ['-b', 'beautify=true', 'a.js', '-o', 'pretty.js'])
self.assertFileContents(js_out, open('pretty.js').read())
obtained_results = {}
total_output_size = 0
total_expected_size = 0
total_output_size_gz = 0
total_expected_size_gz = 0
for f in outputs:
f_gz = f + '.gz'
expected_size = expected_results[f] if f in expected_results else float('inf')
expected_size_gz = expected_results[f_gz] if f_gz in expected_results else float('inf')
size = os.path.getsize(f)
size_gz = get_file_gzipped_size(f)
obtained_results[f] = size
obtained_results[f_gz] = size_gz
if size != expected_size and (f.endswith('.js') or f.endswith('.html')):
print('Contents of ' + f + ': ')
print(open(f, 'r').read())
print('size of ' + f + ' == ' + str(size) + ', expected ' + str(expected_size) + ', delta=' + str(size - expected_size) + print_percent(size, expected_size))
print('size of ' + f_gz + ' == ' + str(size_gz) + ', expected ' + str(expected_size_gz) + ', delta=' + str(size_gz - expected_size_gz) + print_percent(size_gz, expected_size_gz))
# Hack: Generated .mem initializer files have different sizes on different
# platforms (Windows gives x, CircleCI Linux gives x-17 bytes, my home
# Linux gives x+2 bytes..). Likewise asm.js files seem to be affected by
# the LLVM IR text names, which lead to asm.js names, which leads to
# difference code size, which leads to different relooper choices,
# as a result leading to slightly different total code sizes.
# Also as of July 16, 2020, wasm2js files have different sizes on
# different platforms (Windows and MacOS improved to give a slightly
# better thing than Linux does, which didn't change; this just
# started to happen on CI, not in response to a code update, so it
# may have been present all along but just noticed now; it only
# happens in wasm2js, so it may be platform-nondeterminism in closure
# compiler).
# TODO: identify what is causing this. meanwhile allow some amount of slop
if not os.environ.get('EMTEST_REBASELINE'):
if js:
slop = 30
else:
slop = 20
if size <= expected_size + slop and size >= expected_size - slop:
size = expected_size
# N.B. even though the test code above prints out gzip compressed sizes, regression testing is done against uncompressed sizes
# this is because optimizing for compressed sizes can be unpredictable and sometimes counterproductive
total_output_size += size
total_expected_size += expected_size
total_output_size_gz += size_gz
total_expected_size_gz += expected_size_gz
obtained_results['total'] = total_output_size
obtained_results['total_gz'] = total_output_size_gz
print('Total output size=' + str(total_output_size) + ' bytes, expected total size=' + str(total_expected_size) + ', delta=' + str(total_output_size - total_expected_size) + print_percent(total_output_size, total_expected_size))
print('Total output size gzipped=' + str(total_output_size_gz) + ' bytes, expected total size gzipped=' + str(total_expected_size_gz) + ', delta=' + str(total_output_size_gz - total_expected_size_gz) + print_percent(total_output_size_gz, total_expected_size_gz))
if os.environ.get('EMTEST_REBASELINE'):
open(results_file, 'w').write(json.dumps(obtained_results, indent=2) + '\n')
else:
if total_output_size > total_expected_size:
print('Oops, overall generated code size regressed by ' + str(total_output_size - total_expected_size) + ' bytes!')
if total_output_size < total_expected_size:
print('Hey amazing, overall generated code size was improved by ' + str(total_expected_size - total_output_size) + ' bytes! Rerun test with other.test_minimal_runtime_code_size with EMTEST_REBASELINE=1 to update the expected sizes!')
self.assertEqual(total_output_size, total_expected_size)
# Test that legacy settings that have been fixed to a specific value and their value can no longer be changed,
def test_legacy_settings_forbidden_to_change(self):
stderr = self.expect_fail([EMCC, '-s', 'MEMFS_APPEND_TO_TYPED_ARRAYS=0', path_from_root('tests', 'hello_world.c')])
self.assertContained('MEMFS_APPEND_TO_TYPED_ARRAYS=0 is no longer supported', stderr)
self.run_process([EMCC, '-s', 'MEMFS_APPEND_TO_TYPED_ARRAYS=1', path_from_root('tests', 'hello_world.c')])
self.run_process([EMCC, '-s', 'PRECISE_I64_MATH=2', path_from_root('tests', 'hello_world.c')])
def test_jsmath(self):
self.run_process([EMCC, path_from_root('tests', 'other', 'jsmath.cpp'), '-Os', '-o', 'normal.js', '--closure', '0'])
normal_js_size = os.path.getsize('normal.js')
normal_wasm_size = os.path.getsize('normal.wasm')
self.run_process([EMCC, path_from_root('tests', 'other', 'jsmath.cpp'), '-Os', '-o', 'jsmath.js', '-s', 'JS_MATH', '--closure', '0'])
jsmath_js_size = os.path.getsize('jsmath.js')
jsmath_wasm_size = os.path.getsize('jsmath.wasm')
# js math increases JS size, but decreases wasm, and wins overall
# it would win more with closure, but no point in making the test slower)
self.assertLess(normal_js_size, jsmath_js_size)
self.assertLess(jsmath_wasm_size, normal_wasm_size)
self.assertLess(jsmath_js_size + jsmath_wasm_size, 0.90 * (normal_js_size + normal_wasm_size))
# js math has almost identical output, but misses some corner cases, 4 out of 34
normal = self.run_js('normal.js').splitlines()
jsmath = self.run_js('jsmath.js').splitlines()
assert len(normal) == len(jsmath)
diff = 0
for i in range(len(normal)):
if normal[i] != jsmath[i]:
diff += 1
self.assertEqual(diff, 4)
def test_strict_mode_hello_world(self):
# Verify that strict mode can be used for simple hello world program both
# via the environment EMCC_STRICT=1 and from the command line `-s STRICT`
cmd = [EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'STRICT=1']
self.run_process(cmd)
with env_modify({'EMCC_STRICT': '1'}):
self.do_runf(path_from_root('tests', 'hello_world.c'), 'hello, world!')
def test_legacy_settings(self):
cmd = [EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'SPLIT_MEMORY=0']
# By default warnings are not shown
stderr = self.run_process(cmd, stderr=PIPE).stderr
self.assertNotContained('warning', stderr)
# Adding or -Wlegacy-settings enables the warning
stderr = self.run_process(cmd + ['-Wlegacy-settings'], stderr=PIPE).stderr
self.assertContained('warning: use of legacy setting: SPLIT_MEMORY', stderr)
self.assertContained('[-Wlegacy-settings]', stderr)
def test_strict_mode_legacy_settings(self):
cmd = [EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'SPLIT_MEMORY=0']
self.run_process(cmd)
stderr = self.expect_fail(cmd + ['-s', 'STRICT=1'])
self.assertContained('legacy setting used in strict mode: SPLIT_MEMORY', stderr)
with env_modify({'EMCC_STRICT': '1'}):
stderr = self.expect_fail(cmd)
self.assertContained('legacy setting used in strict mode: SPLIT_MEMORY', stderr)
def test_strict_mode_legacy_settings_runtime(self):
# Verify that legacy settings are not accessible at runtime under strict
# mode.
self.set_setting('RETAIN_COMPILER_SETTINGS', 1)
src = r'''\
#include <stdio.h>
#include <emscripten.h>
int main() {
printf("BINARYEN_METHOD: %s\n", (char*)emscripten_get_compiler_setting("BINARYEN_METHOD"));
return 0;
}
'''
self.do_run(src, 'BINARYEN_METHOD: native-wasm')
with env_modify({'EMCC_STRICT': '1'}):
self.do_run(src, 'invalid compiler setting: BINARYEN_METHOD')
self.set_setting('STRICT', 1)
self.do_run(src, 'invalid compiler setting: BINARYEN_METHOD')
def test_renamed_setting(self):
# Verify that renamed settings are available by either name (when not in
# strict mode.
self.set_setting('RETAIN_COMPILER_SETTINGS', 1)
src = r'''\
#include <stdio.h>
#include <emscripten.h>
int main() {
printf("%d %d\n",
emscripten_get_compiler_setting("BINARYEN_ASYNC_COMPILATION"),
emscripten_get_compiler_setting("WASM_ASYNC_COMPILATION"));
return 0;
}
'''
# Setting the new name should set both
self.set_setting('WASM_ASYNC_COMPILATION', 0)
self.do_run(src, '0 0')
self.set_setting('WASM_ASYNC_COMPILATION', 1)
self.do_run(src, '1 1')
self.clear_setting('WASM_ASYNC_COMPILATION')
# Setting the old name should set both
self.set_setting('BINARYEN_ASYNC_COMPILATION', 0)
self.do_run(src, '0 0')
self.set_setting('BINARYEN_ASYNC_COMPILATION', 1)
self.do_run(src, '1 1')
def test_strict_mode_legacy_settings_library(self):
create_test_file('lib.js', r'''
#if SPLIT_MEMORY
#endif
''')
cmd = [EMCC, path_from_root('tests', 'hello_world.c'), '-o', 'out.js', '--js-library', 'lib.js']
self.run_process(cmd)
self.assertContained('ReferenceError: SPLIT_MEMORY is not defined', self.expect_fail(cmd + ['-s', 'STRICT=1']))
with env_modify({'EMCC_STRICT': '1'}):
self.assertContained('ReferenceError: SPLIT_MEMORY is not defined', self.expect_fail(cmd))
def test_strict_mode_link_cxx(self):
# In strict mode C++ programs fail to link unless run with `em++`.
self.run_process([EMXX, '-sSTRICT', path_from_root('tests', 'hello_libcxx.cpp')])
err = self.expect_fail([EMCC, '-sSTRICT', path_from_root('tests', 'hello_libcxx.cpp')])
self.assertContained('error: undefined symbol:', err)
def test_safe_heap_log(self):
self.set_setting('SAFE_HEAP')
self.set_setting('SAFE_HEAP_LOG')
self.set_setting('EXIT_RUNTIME')
src = open(path_from_root('tests', 'hello_world.c')).read()
self.do_run(src, 'SAFE_HEAP load: ')
self.set_setting('WASM', 0)
self.do_run(src, 'SAFE_HEAP load: ')
def test_mini_printfs(self):
def test(code):
with open('src.c', 'w') as f:
f.write('''
#include <stdio.h>
void* unknown_value;
int main() {
%s
}
''' % code)
self.run_process([EMCC, 'src.c', '-O1'])
return os.path.getsize('a.out.wasm')
i = test('printf("%d", *(int*)unknown_value);')
f = test('printf("%f", *(double*)unknown_value);')
lf = test('printf("%Lf", *(long double*)unknown_value);')
both = test('printf("%d", *(int*)unknown_value); printf("%Lf", *(long double*)unknown_value);')
print(i, f, lf, both)
# iprintf is much smaller than printf with float support
self.assertGreater(i, f - 3400)
self.assertLess(i, f - 3000)
# __small_printf is somewhat smaller than printf with long double support
self.assertGreater(f, lf - 900)
self.assertLess(f, lf - 500)
# both is a little bigger still
self.assertGreater(lf, both - 100)
self.assertLess(lf, both - 50)
@parameterized({
'normal': ([], '''\
0.000051 => -5.123719529365189373493194580078e-05
0.000051 => -5.123719300544352718866300544498e-05
0.000051 => -5.123719300544352718866300544498e-05
'''),
'full_long_double': (['-s', 'PRINTF_LONG_DOUBLE'], '''\
0.000051 => -5.123719529365189373493194580078e-05
0.000051 => -5.123719300544352718866300544498e-05
0.000051 => -5.123719300544352710023893104250e-05
'''),
})
def test_long_double_printing(self, args, expected):
create_test_file('src.cpp', r'''
#include <stdio.h>
int main(void) {
float f = 5.123456789e-5;
double d = 5.123456789e-5;
long double ld = 5.123456789e-5;
printf("%f => %.30e\n", f, f / (f - 1));
printf("%f => %.30e\n", d, d / (d - 1));
printf("%Lf => %.30Le\n", ld, ld / (ld - 1));
}
''')
self.run_process([EMCC, 'src.cpp'] + args)
self.assertContained(expected, self.run_js('a.out.js'))
# Tests that passing -s MALLOC=none will not include system malloc() to the build.
def test_malloc_none(self):
stderr = self.expect_fail([EMCC, path_from_root('tests', 'malloc_none.c'), '-s', 'MALLOC=none'])
self.assertContained('undefined symbol: malloc', stderr)
@parameterized({
'c': ['c'],
'cpp': ['cpp'],
})
def test_lsan_leaks(self, ext):
self.do_smart_test(path_from_root('tests', 'other', 'test_lsan_leaks.' + ext),
emcc_args=['-fsanitize=leak', '-s', 'ALLOW_MEMORY_GROWTH=1'],
assert_returncode=NON_ZERO, literals=[
'Direct leak of 2048 byte(s) in 1 object(s) allocated from',
'Direct leak of 1337 byte(s) in 1 object(s) allocated from',
'Direct leak of 42 byte(s) in 1 object(s) allocated from',
])
@parameterized({
'c': ['c', [
r'in malloc.*a\.out\.wasm\+0x',
r'(?im)in f (|[/a-z\.]:).*/test_lsan_leaks\.c:6:21$',
r'(?im)in main (|[/a-z\.]:).*/test_lsan_leaks\.c:10:16$',
r'(?im)in main (|[/a-z\.]:).*/test_lsan_leaks\.c:12:3$',
r'(?im)in main (|[/a-z\.]:).*/test_lsan_leaks\.c:13:3$',
]],
'cpp': ['cpp', [
r'in operator new\[\]\(unsigned long\).*a\.out\.wasm\+0x',
r'(?im)in f\(\) (|[/a-z\.]:).*/test_lsan_leaks\.cpp:4:21$',
r'(?im)in main (|[/a-z\.]:).*/test_lsan_leaks\.cpp:8:16$',
r'(?im)in main (|[/a-z\.]:).*/test_lsan_leaks\.cpp:10:3$',
r'(?im)in main (|[/a-z\.]:).*/test_lsan_leaks\.cpp:11:3$',
]],
})
def test_lsan_stack_trace(self, ext, regexes):
self.do_smart_test(path_from_root('tests', 'other', 'test_lsan_leaks.' + ext),
emcc_args=['-fsanitize=leak', '-s', 'ALLOW_MEMORY_GROWTH=1', '-g4'],
assert_returncode=NON_ZERO, literals=[
'Direct leak of 2048 byte(s) in 1 object(s) allocated from',
'Direct leak of 1337 byte(s) in 1 object(s) allocated from',
'Direct leak of 42 byte(s) in 1 object(s) allocated from',
], regexes=regexes)
@parameterized({
'c': ['c'],
'cpp': ['cpp'],
})
def test_lsan_no_leak(self, ext):
self.do_smart_test(path_from_root('tests', 'other', 'test_lsan_no_leak.' + ext),
emcc_args=['-fsanitize=leak', '-s', 'ALLOW_MEMORY_GROWTH=1', '-s', 'ASSERTIONS=0'],
regexes=[r'^\s*$'])
def test_lsan_no_stack_trace(self):
self.do_smart_test(path_from_root('tests', 'other', 'test_lsan_leaks.c'),
emcc_args=['-fsanitize=leak', '-s', 'ALLOW_MEMORY_GROWTH=1', '-DDISABLE_CONTEXT'],
assert_returncode=NON_ZERO, literals=[
'Direct leak of 3427 byte(s) in 3 object(s) allocated from:',
'SUMMARY: LeakSanitizer: 3427 byte(s) leaked in 3 allocation(s).',
])
def test_asan_null_deref(self):
self.do_smart_test(path_from_root('tests', 'other', 'test_asan_null_deref.c'),
emcc_args=['-fsanitize=address', '-sALLOW_MEMORY_GROWTH=1', '-sINITIAL_MEMORY=314572800'],
assert_returncode=NON_ZERO, literals=[
'AddressSanitizer: null-pointer-dereference on address',
])
def test_asan_no_stack_trace(self):
self.do_smart_test(path_from_root('tests', 'other', 'test_lsan_leaks.c'),
emcc_args=['-fsanitize=address', '-sALLOW_MEMORY_GROWTH=1', '-sINITIAL_MEMORY=314572800', '-DDISABLE_CONTEXT', '-s', 'EXIT_RUNTIME'],
assert_returncode=NON_ZERO, literals=[
'Direct leak of 3427 byte(s) in 3 object(s) allocated from:',
'SUMMARY: AddressSanitizer: 3427 byte(s) leaked in 3 allocation(s).',
])
def test_asan_pthread_stubs(self):
self.do_smart_test(path_from_root('tests', 'other', 'test_asan_pthread_stubs.c'), emcc_args=['-fsanitize=address', '-sALLOW_MEMORY_GROWTH=1', '-sINITIAL_MEMORY=314572800'])
def test_proxy_to_pthread_stack(self):
self.node_args += ['--experimental-wasm-threads', '--experimental-wasm-bulk-memory']
self.do_smart_test(path_from_root('tests', 'other', 'test_proxy_to_pthread_stack.c'),
['success'],
engine=config.NODE_JS,
emcc_args=['-s', 'USE_PTHREADS', '-s', 'PROXY_TO_PTHREAD', '-s', 'TOTAL_STACK=1048576'])
@parameterized({
'async': ['-s', 'WASM_ASYNC_COMPILATION=1'],
'sync': ['-s', 'WASM_ASYNC_COMPILATION=0'],
})
def test_offset_converter(self, *args):
self.do_smart_test(path_from_root('tests', 'other', 'test_offset_converter.c'),
emcc_args=['-s', 'USE_OFFSET_CONVERTER', '-g4'] + list(args), literals=['ok'])
@no_windows('ptys and select are not available on windows')
def test_build_error_color(self):
create_test_file('src.c', 'int main() {')
returncode, output = self.run_on_pty([EMCC, 'src.c'])
self.assertNotEqual(returncode, 0)
self.assertIn(b"\x1b[1msrc.c:1:13: \x1b[0m\x1b[0;1;31merror: \x1b[0m\x1b[1mexpected '}'\x1b[0m", output)
self.assertIn(b"\x1b[31merror: ", output)
@parameterized({
'fno_diagnostics_color': ['-fno-diagnostics-color'],
'fdiagnostics_color_never': ['-fdiagnostics-color=never'],
})
@no_windows('ptys and select are not available on windows')
def test_pty_no_color(self, flag):
with open('src.c', 'w') as f:
f.write('int main() {')
returncode, output = self.run_on_pty([EMCC, flag, 'src.c'])
self.assertNotEqual(returncode, 0)
self.assertNotIn(b'\x1b', output)
def test_sanitizer_color(self):
create_test_file('src.c', '''
#include <emscripten.h>
int main() {
int *p = 0, q;
EM_ASM({ Module.printWithColors = true; });
q = *p;
}
''')
self.run_process([EMCC, '-fsanitize=null', 'src.c'])
output = self.run_js('a.out.js')
self.assertIn('\x1b[1msrc.c', output)
def test_main_reads_params(self):
create_test_file('no.c', '''
int main() {
return 42;
}
''')
self.run_process([EMCC, 'no.c', '-O3', '-o', 'no.js'])
no = os.path.getsize('no.js')
create_test_file('yes.c', '''
int main(int argc, char **argv) {
return argc;
}
''')
self.run_process([EMCC, 'yes.c', '-O3', '-o', 'yes.js'])
yes = os.path.getsize('yes.js')
# not having to set up argc/argv allows us to avoid including a
# significant amount of JS for string support (which is not needed
# otherwise in such a trivial program).
self.assertLess(no, 0.95 * yes)
def test_INCOMING_MODULE_JS_API(self):
def test(args):
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-O3', '--closure', '1'] + args)
for engine in config.JS_ENGINES:
self.assertContained('hello, world!', self.run_js('a.out.js', engine=engine))
with open('a.out.js') as f:
# ignore \r which on windows can increase the size
return len(f.read().replace('\r', ''))
normal = test([])
changed = test(['-s', 'INCOMING_MODULE_JS_API=[]'])
print('sizes', normal, changed)
# Changing this option to [] should decrease code size.
self.assertLess(changed, normal)
# Check an absolute code size as well, with some slack.
self.assertLess(abs(changed - 5627), 150)
def test_llvm_includes(self):
create_test_file('atomics.c', '#include <stdatomic.h>')
self.build('atomics.c')
def test_mmap_and_munmap(self):
emcc_args = []
for f in ['data_ro.dat', 'data_rw.dat']:
create_test_file(f, 'Test file')
emcc_args.extend(['--embed-file', f])
self.do_other_test('test_mmap_and_munmap.cpp', emcc_args)
def test_mmap_and_munmap_anonymous(self):
self.do_other_test('test_mmap_and_munmap_anonymous.cpp', emcc_args=['-s', 'NO_FILESYSTEM'])
def test_mmap_and_munmap_anonymous_asan(self):
self.do_other_test('test_mmap_and_munmap_anonymous.cpp', emcc_args=['-s', 'NO_FILESYSTEM', '-fsanitize=address', '-s', 'ALLOW_MEMORY_GROWTH=1', '-sINITIAL_MEMORY=314572800'])
def test_mmap_memorygrowth(self):
self.do_other_test('test_mmap_memorygrowth.cpp', ['-s', 'ALLOW_MEMORY_GROWTH=1'])
def test_files_and_module_assignment(self):
# a pre-js can set Module to a new object or otherwise undo file preloading/
# embedding changes to Module.preRun. we show an error to avoid confusion
create_test_file('pre.js', 'Module = {};')
create_test_file('src.cpp', r'''
#include <stdio.h>
int main() {
printf("file exists: %d\n", !!fopen("src.cpp", "rb"));
}
''')
self.run_process([EMCC, 'src.cpp', '--pre-js', 'pre.js', '--embed-file', 'src.cpp'])
result = self.run_js('a.out.js', assert_returncode=NON_ZERO)
self.assertContained('Module.preRun should exist because file support used it; did a pre-js delete it?', result)
def test_error(pre):
create_test_file('pre.js', pre)
self.run_process([EMCC, 'src.cpp', '--pre-js', 'pre.js', '--embed-file', 'src.cpp'])
result = self.run_js('a.out.js', assert_returncode=NON_ZERO)
self.assertContained('All preRun tasks that exist before user pre-js code should remain after; did you replace Module or modify Module.preRun?', result)
# error if the user replaces Module or Module.preRun
test_error('Module = { preRun: [] };')
test_error('Module.preRun = [];')
def test_EMSCRIPTEN_and_STRICT(self):
# __EMSCRIPTEN__ is the proper define; we support EMSCRIPTEN for legacy
# code, unless STRICT is enabled.
create_test_file('src.c', '''
#ifndef EMSCRIPTEN
#error "not defined"
#endif
''')
self.run_process([EMCC, 'src.c', '-c'])
self.expect_fail([EMCC, 'src.c', '-s', 'STRICT', '-c'])
def test_exception_settings(self):
for catching, throwing, opts in itertools.product([0, 1], repeat=3):
cmd = [EMCC, path_from_root('tests', 'other', 'exceptions_modes_symbols_defined.cpp'), '-s', 'DISABLE_EXCEPTION_THROWING=%d' % (1 - throwing), '-s', 'DISABLE_EXCEPTION_CATCHING=%d' % (1 - catching), '-O%d' % opts]
print(cmd)
if not throwing and not catching:
self.assertContained('DISABLE_EXCEPTION_THROWING was set (likely due to -fno-exceptions), which means no C++ exception throwing support code is linked in, but such support is required', self.expect_fail(cmd))
elif not throwing and catching:
self.assertContained('DISABLE_EXCEPTION_THROWING was set (probably from -fno-exceptions) but is not compatible with enabling exception catching (DISABLE_EXCEPTION_CATCHING=0)', self.expect_fail(cmd))
else:
self.run_process(cmd)
def test_fignore_exceptions(self):
# the new clang flag -fignore-exceptions basically is the same as -s DISABLE_EXCEPTION_CATCHING=1,
# that is, it allows throwing, but emits no support code for catching.
self.run_process([EMCC, path_from_root('tests', 'other', 'exceptions_modes_symbols_defined.cpp'), '-s', 'DISABLE_EXCEPTION_CATCHING=0'])
enable_size = os.path.getsize('a.out.wasm')
self.run_process([EMCC, path_from_root('tests', 'other', 'exceptions_modes_symbols_defined.cpp'), '-s', 'DISABLE_EXCEPTION_CATCHING=1'])
disable_size = os.path.getsize('a.out.wasm')
self.run_process([EMCC, path_from_root('tests', 'other', 'exceptions_modes_symbols_defined.cpp'), '-s', '-fignore-exceptions'])
ignore_size = os.path.getsize('a.out.wasm')
self.assertGreater(enable_size, disable_size)
self.assertEqual(disable_size, ignore_size)
def test_f_exception(self):
create_test_file('src.cpp', r'''
#include <stdio.h>
int main () {
try {
throw 42;
} catch (int e) {
printf("CAUGHT: %d\n", e);
}
return 0;
}
''')
for compile_flags, link_flags, expect_caught in [
# exceptions are off by default
([], [], False),
# enabling exceptions at link and compile works
(['-fexceptions'], ['-fexceptions'], True),
# just compile isn't enough as the JS runtime lacks support
(['-fexceptions'], [], False),
# just link isn't enough as codegen didn't emit exceptions support
([], ['-fexceptions'], False),
]:
print(compile_flags, link_flags, expect_caught)
self.run_process([EMCC, 'src.cpp', '-c', '-o', 'src.o'] + compile_flags)
self.run_process([EMCC, 'src.o'] + link_flags)
result = self.run_js('a.out.js', assert_returncode=0 if expect_caught else NON_ZERO)
self.assertContainedIf('CAUGHT', result, expect_caught)
def test_assertions_on_internal_api_changes(self):
create_test_file('src.c', r'''
#include <emscripten.h>
int main(int argc, char **argv) {
EM_ASM({
try {
Module['read'];
out('it should not be there');
} catch(e) {
out('error: ' + e);
}
});
}
''')
self.run_process([EMCC, 'src.c', '-s', 'ASSERTIONS'])
self.assertContained('Module.read has been replaced with plain read', self.run_js('a.out.js'))
def test_assertions_on_incoming_module_api_changes(self):
create_test_file('pre.js', r'''
var Module = {
read: function() {}
}
''')
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'ASSERTIONS', '--pre-js', 'pre.js'])
self.assertContained('Module.read option was removed', self.run_js('a.out.js', assert_returncode=NON_ZERO))
def test_assertions_on_outgoing_module_api_changes(self):
create_test_file('src.cpp', r'''
#include <emscripten.h>
int main() {
EM_ASM({
console.log();
function check(name) {
try {
Module[name];
console.log("success: " + name);
} catch(e) {
}
}
check("read");
// TODO check("setWindowTitle");
check("wasmBinary");
check("arguments");
});
}
''')
self.run_process([EMCC, 'src.cpp', '-s', 'ASSERTIONS'])
self.assertContained('''
Module.read has been replaced with plain read_ (the initial value can be provided on Module, but after startup the value is only looked for on a local variable of that name)
Module.wasmBinary has been replaced with plain wasmBinary (the initial value can be provided on Module, but after startup the value is only looked for on a local variable of that name)
Module.arguments has been replaced with plain arguments_ (the initial value can be provided on Module, but after startup the value is only looked for on a local variable of that name)
''', self.run_js('a.out.js'))
def test_assertions_on_ready_promise(self):
# check that when assertions are on we give useful error messages for
# mistakenly thinking the Promise is an instance. I.e., once you could do
# Module()._main to get an instance and the main function, but after
# the breaking change in #10697 Module() now returns a promise, and to get
# the instance you must use .then() to get a callback with the instance.
create_test_file('test.js', r'''
try {
Module()._main;
} catch(e) {
console.log(e);
}
try {
Module().onRuntimeInitialized = 42;
} catch(e) {
console.log(e);
}
''')
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'MODULARIZE', '-s', 'ASSERTIONS', '--extern-post-js', 'test.js'])
# A return code of 7 is from the unhandled Promise rejection
out = self.run_js('a.out.js', assert_returncode=7)
self.assertContained('You are getting _main on the Promise object, instead of the instance. Use .then() to get called back with the instance, see the MODULARIZE docs in src/settings.js', out)
self.assertContained('You are setting onRuntimeInitialized on the Promise object, instead of the instance. Use .then() to get called back with the instance, see the MODULARIZE docs in src/settings.js', out)
def test_em_asm_duplicate_strings(self):
# We had a regression where tow different EM_ASM strings from two diffferent
# object files were de-duplicated in wasm-emscripten-finalize. This used to
# work when we used zero-based index for store the JS strings, but once we
# switched to absolute addresses the string needs to exist twice in the JS
# file.
create_test_file('foo.c', '''
#include <emscripten.h>
void foo() {
EM_ASM({ console.log('Hello, world!'); });
}
''')
create_test_file('main.c', '''
#include <emscripten.h>
void foo();
int main() {
foo();
EM_ASM({ console.log('Hello, world!'); });
return 0;
}
''')
self.run_process([EMCC, '-c', 'foo.c'])
self.run_process([EMCC, '-c', 'main.c'])
self.run_process([EMCC, 'foo.o', 'main.o'])
self.assertContained('Hello, world!\nHello, world!\n', self.run_js('a.out.js'))
def test_em_asm_strict_c(self):
create_test_file('src.c', '''
#include <emscripten/em_asm.h>
int main() {
EM_ASM({ console.log('Hello, world!'); });
}
''')
result = self.run_process([EMCC, '-std=c11', 'src.c'], stderr=PIPE, check=False)
self.assertNotEqual(result.returncode, 0)
self.assertIn('EM_ASM does not work in -std=c* modes, use -std=gnu* modes instead', result.stderr)
def test_boost_graph(self):
self.do_smart_test(path_from_root('tests', 'test_boost_graph.cpp'),
emcc_args=['-s', 'USE_BOOST_HEADERS=1'])
def test_setjmp_em_asm(self):
create_test_file('src.c', '''
#include <emscripten.h>
#include <setjmp.h>
int main() {
jmp_buf buf;
setjmp(buf);
EM_ASM({
console.log("hello world");
});
}
''')
result = self.run_process([EMCC, 'src.c'], stderr=PIPE, check=False)
self.assertNotEqual(result.returncode, 0)
self.assertIn('Cannot use EM_ASM* alongside setjmp/longjmp', result.stderr)
self.assertIn('Please consider using EM_JS, or move the EM_ASM into another function.', result.stderr)
def test_missing_stdlibs(self):
# Certain standard libraries are expected to be useable via -l flags but
# don't actually exist in our standard library path. Make sure we don't
# error out when linking with these flags.
self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-lm', '-ldl', '-lrt', '-lpthread'])
def test_supported_linker_flags(self):
out = self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-Wl,-rpath=foo'], stderr=PIPE).stderr
self.assertContained('warning: ignoring unsupported linker flag: `-rpath=foo`', out)
out = self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-Wl,-rpath-link,foo'], stderr=PIPE).stderr
self.assertContained('warning: ignoring unsupported linker flag: `-rpath-link`', out)
out = self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'),
'-Wl,--no-check-features,-mllvm,-debug'], stderr=PIPE).stderr
self.assertNotContained('warning: ignoring unsupported linker flag', out)
out = self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-Wl,-allow-shlib-undefined'], stderr=PIPE).stderr
self.assertContained('warning: ignoring unsupported linker flag: `-allow-shlib-undefined`', out)
out = self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-Wl,--allow-shlib-undefined'], stderr=PIPE).stderr
self.assertContained('warning: ignoring unsupported linker flag: `--allow-shlib-undefined`', out)
out = self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-Wl,-version-script,foo'], stderr=PIPE).stderr
self.assertContained('warning: ignoring unsupported linker flag: `-version-script`', out)
def test_linker_flags_pass_through(self):
err = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.cpp'), '-Wl,--waka'])
self.assertContained('wasm-ld: error: unknown argument: --waka', err)
err = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.cpp'), '-Xlinker', '--waka'])
self.assertContained('wasm-ld: error: unknown argument: --waka', err)
def test_linker_flags_unused(self):
err = self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-c', '-lbar'], stderr=PIPE).stderr
self.assertContained("warning: argument unused during compilation: '-lbar' [-Wunused-command-line-argument]", err)
def test_non_wasm_without_wasm_in_vm(self):
# Test that our non-wasm output does not depend on wasm support in the vm.
self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-s', 'WASM=0'])
with open('a.out.js') as f:
js = f.read()
with open('a.out.js', 'w') as f:
f.write('var WebAssembly = null;\n' + js)
for engine in config.JS_ENGINES:
self.assertContained('hello, world!', self.run_js('a.out.js', engine=engine))
def test_empty_output_extension(self):
# Default to JS output when no extension is present
self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-Werror', '-o', 'hello'])
self.assertContained('hello, world!', self.run_js('hello'))
def test_backwards_deps_in_archive(self):
# Test that JS dependencies from deps_info.json work for code linked via
# static archives using -l<name>
self.run_process([EMCC, '-c', path_from_root('tests', 'sockets', 'test_gethostbyname.c'), '-o', 'a.o'])
self.run_process([LLVM_AR, 'cr', 'liba.a', 'a.o'])
create_test_file('empty.c', 'static int foo = 0;')
self.run_process([EMCC, 'empty.c', '-la', '-L.'])
self.assertContained('success', self.run_js('a.out.js'))
def test_warning_flags(self):
self.run_process([EMCC, '-c', '-o', 'hello.o', path_from_root('tests', 'hello_world.c')])
cmd = [EMCC, 'hello.o', '-o', 'a.js', '-g', '--closure', '1']
# warning that is enabled by default
stderr = self.run_process(cmd, stderr=PIPE).stderr
self.assertContained('emcc: warning: disabling closure because debug info was requested [-Wemcc]', stderr)
# -w to suppress warnings
stderr = self.run_process(cmd + ['-w'], stderr=PIPE).stderr
self.assertNotContained('warning', stderr)
# -Wno-invalid-input to suppress just this one warning
stderr = self.run_process(cmd + ['-Wno-emcc'], stderr=PIPE).stderr
self.assertNotContained('warning', stderr)
# with -Werror should fail
stderr = self.expect_fail(cmd + ['-Werror'])
self.assertContained('error: disabling closure because debug info was requested [-Wemcc] [-Werror]', stderr)
# with -Werror + -Wno-error=<type> should only warn
stderr = self.run_process(cmd + ['-Werror', '-Wno-error=emcc'], stderr=PIPE).stderr
self.assertContained('emcc: warning: disabling closure because debug info was requested [-Wemcc]', stderr)
# check that `-Werror=foo` also enales foo
stderr = self.expect_fail(cmd + ['-Werror=legacy-settings', '-s', 'TOTAL_MEMORY=1'])
self.assertContained('error: use of legacy setting: TOTAL_MEMORY (setting renamed to INITIAL_MEMORY) [-Wlegacy-settings] [-Werror]', stderr)
# check that `-Wno-pthreads-mem` disables USE_PTHREADS + ALLOW_GROWTH_MEMORY warning
stderr = self.run_process(cmd + ['-Wno-pthreads-mem-growth', '-s', 'USE_PTHREADS=1', '-s', 'ALLOW_MEMORY_GROWTH=1'], stderr=PIPE).stderr
self.assertNotContained('USE_PTHREADS + ALLOW_MEMORY_GROWTH may run non-wasm code slowly, see https://github.com/WebAssembly/design/issues/1271', stderr)
def test_emranlib(self):
create_test_file('foo.c', 'int foo = 1;')
create_test_file('bar.c', 'int bar = 2;')
self.run_process([EMCC, '-c', 'foo.c', 'bar.c'])
# Create a library with no archive map
self.run_process([EMAR, 'crS', 'liba.a', 'foo.o', 'bar.o'])
output = self.run_process([shared.LLVM_NM, '--print-armap', 'liba.a'], stdout=PIPE).stdout
self.assertNotContained('Archive map', output)
# Add an archive map
self.run_process([EMRANLIB, 'liba.a'])
output = self.run_process([shared.LLVM_NM, '--print-armap', 'liba.a'], stdout=PIPE).stdout
self.assertContained('Archive map', output)
def test_pthread_stub(self):
# Verify that programs containing pthread code can still work even
# without enabling threads. This is possible becase we link in
# libpthread_stub.a
create_test_file('pthread.c', r'''
#include <stdint.h>
#include <stdio.h>
#include <pthread.h>
static void cleanup (void* arg) {
printf("cleanup: %ld\n", (intptr_t)arg);
}
int main() {
pthread_atfork(NULL, NULL, NULL);
pthread_cleanup_push(cleanup, (void*)42);
pthread_cleanup_pop(1);
return 0;
}
''')
self.do_runf('pthread.c', 'cleanup: 42')
def test_stdin_preprocess(self):
create_test_file('temp.h', '#include <string>')
outputStdin = self.run_process([EMCC, '-x', 'c++', '-dM', '-E', '-'], input="#include <string>", stdout=PIPE).stdout
outputFile = self.run_process([EMCC, '-x', 'c++', '-dM', '-E', 'temp.h'], stdout=PIPE).stdout
self.assertTextDataIdentical(outputStdin, outputFile)
def test_stdin_compile_only(self):
# Should fail without -x lang specifier
with open(path_from_root('tests', 'hello_world.cpp')) as f:
err = self.expect_fail([EMCC, '-c', '-'], input=f.read())
self.assertContained('error: -E or -x required when input is from standard input', err)
with open(path_from_root('tests', 'hello_world.cpp')) as f:
self.run_process([EMCC, '-c', '-o', 'out.o', '-x', 'c++', '-'], input=f.read())
self.assertExists('out.o')
# Same again but without an explicit output filename
with open(path_from_root('tests', 'hello_world.cpp')) as f:
self.run_process([EMCC, '-c', '-x', 'c++', '-'], input=f.read())
self.assertExists('-.o')
def test_stdin_compile_and_link(self):
with open(path_from_root('tests', 'hello_world.cpp')) as f:
self.run_process([EMCC, '-x', 'c++', '-'], input=f.read())
self.assertContained('hello, world!', self.run_js('a.out.js'))
def test_stdout_link(self):
# linking to stdout `-` doesn't work, we have no way to pass such an output filename
# through post-link tools such as binaryen.
err = self.expect_fail([EMCC, '-o', '-', path_from_root('tests', 'hello_world.cpp')])
self.assertContained('invalid output filename: `-`', err)
self.assertNotExists('-')
err = self.expect_fail([EMCC, '-o', '-foo', path_from_root('tests', 'hello_world.cpp')])
self.assertContained('invalid output filename: `-foo`', err)
self.assertNotExists('-foo')
def test_immutable_after_link(self):
# some builds are guaranteed to not require any binaryen work after wasm-ld
def ok(args, filename='hello_world.cpp', expected='hello, world!'):
print('ok', args, filename)
args += ['-sERROR_ON_WASM_CHANGES_AFTER_LINK']
self.run_process([EMCC, path_from_root('tests', filename)] + args)
self.assertContained(expected, self.run_js('a.out.js'))
# -O0 with BigInt support (to avoid the need for legalization)
required_flags = ['-sWASM_BIGINT']
ok(required_flags)
# Same with DWARF
ok(required_flags + ['-g'])
# Function pointer calls from JS work too
ok(required_flags, filename='hello_world_main_loop.cpp')
# -O1 is ok as we don't run wasm-opt there (but no higher, see below)
ok(required_flags + ['-O1'])
# Exception support shouldn't require changes after linking
ok(required_flags + ['-fexceptions'])
# other builds fail with a standard message + extra details
def fail(args, details):
print('fail', args, details)
args += ['-sERROR_ON_WASM_CHANGES_AFTER_LINK']
err = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.cpp')] + args)
self.assertContained('changes to the wasm are required after link, but disallowed by ERROR_ON_WASM_CHANGES_AFTER_LINK', err)
self.assertContained(details, err)
# plain -O0
legalization_message = 'to disable int64 legalization (which requires changes after link) use -s WASM_BIGINT'
fail([], legalization_message)
# optimized builds even without legalization
optimization_message = '-O2+ optimizations always require changes, build with -O0 or -O1 instead'
fail(required_flags + ['-O2'], optimization_message)
fail(required_flags + ['-O3'], optimization_message)
def test_output_to_nowhere(self):
self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp'), '-o', os.devnull, '-c'])
# Test that passing -s MIN_X_VERSION=-1 on the command line will result in browser X being not supported at all.
# I.e. -s MIN_X_VERSION=-1 is equal to -s MIN_X_VERSION=Infinity
def test_drop_support_for_browser(self):
# Test that -1 means "not supported"
self.run_process([EMCC, path_from_root('tests', 'test_html5_core.c'), '-s', 'MIN_IE_VERSION=-1'])
self.assertContained('allowsDeferredCalls: true', open('a.out.js').read())
self.assertNotContained('allowsDeferredCalls: JSEvents.isInternetExplorer()', open('a.out.js').read())
def test_errno_type(self):
create_test_file('errno_type.c', '''
#include <errno.h>
// Use of these constants in C preprocessor comparisons should work.
#if EPERM > 0
#define DAV1D_ERR(e) (-(e))
#else
#define DAV1D_ERR(e) (e)
#endif
''')
self.run_process([EMCC, 'errno_type.c'])
def test_standalone_syscalls(self):
self.run_process([EMCC, path_from_root('tests', 'other', 'test_standalone_syscalls.cpp'), '-o', 'test.wasm'])
with open(path_from_root('tests', 'other', 'test_standalone_syscalls.out')) as f:
expected = f.read()
for engine in config.WASM_ENGINES:
self.assertContained(expected, self.run_js('test.wasm', engine))
@requires_native_clang
def test_wasm2c_reactor(self):
# test compiling an unsafe library using wasm2c, then using it from a
# main program. this shows it is easy to use wasm2c as a sandboxing
# mechanism.
# first compile the library with emcc, getting a .c and .h
self.run_process([EMCC,
path_from_root('tests', 'other', 'wasm2c', 'unsafe-library.c'),
'-O3', '-o', 'lib.wasm', '-s', 'WASM2C', '--no-entry'])
# compile the main program natively normally, together with the unsafe
# library
self.run_process([CLANG_CC,
path_from_root('tests', 'other', 'wasm2c', 'my-code.c'),
'lib.wasm.c', '-O3', '-o', 'program.exe'] +
clang_native.get_clang_native_args(),
env=clang_native.get_clang_native_env())
output = self.run_process([os.path.abspath('program.exe')], stdout=PIPE).stdout
with open(path_from_root('tests', 'other', 'wasm2c', 'output.txt')) as f:
self.assertEqual(output, f.read())
@parameterized({
'wasm2js': (['-s', 'WASM=0'], ''),
'modularize': (['-s', 'MODULARIZE'], 'Module()'),
})
def test_promise_polyfill(self, constant_args, extern_post_js):
def test(args, expect_fail):
# legacy browsers may lack Promise, which wasm2js depends on. see what
# happens when we kill the global Promise function.
create_test_file('extern-post.js', extern_post_js)
self.run_process([EMCC, path_from_root('tests', 'hello_world.cpp')] + constant_args + args + ['--extern-post-js', 'extern-post.js'])
with open('a.out.js') as f:
js = f.read()
with open('a.out.js', 'w') as f:
f.write('Promise = undefined;\n' + js)
return self.run_js('a.out.js', assert_returncode=NON_ZERO if expect_fail else 0)
# we fail without legacy support
self.assertNotContained('hello, world!', test([], expect_fail=True))
# but work with it
self.assertContained('hello, world!', test(['-s', 'LEGACY_VM_SUPPORT'], expect_fail=False))
# Compile-test for -s USE_WEBGPU=1 and library_webgpu.js.
def test_webgpu_compiletest(self):
for args in [[], ['-s', 'ASSERTIONS=1']]:
self.run_process([EMCC, path_from_root('tests', 'webgpu_dummy.cpp'), '-s', 'USE_WEBGPU=1'] + args)
def test_signature_mismatch(self):
create_test_file('a.c', 'void foo(); int main() { foo(); return 0; }')
create_test_file('b.c', 'int foo() { return 1; }')
stderr = self.run_process([EMCC, 'a.c', 'b.c'], stderr=PIPE).stderr
self.assertContained('function signature mismatch: foo', stderr)
self.expect_fail([EMCC, '-Wl,--fatal-warnings', 'a.c', 'b.c'])
self.expect_fail([EMCC, '-s', 'STRICT', 'a.c', 'b.c'])
def test_lld_report_undefined(self):
create_test_file('main.c', 'void foo(); int main() { foo(); return 0; }')
stderr = self.expect_fail([EMCC, '-s', 'LLD_REPORT_UNDEFINED', 'main.c'])
self.assertContained('wasm-ld: error:', stderr)
self.assertContained('main_0.o: undefined symbol: foo', stderr)
def test_4GB(self):
stderr = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), '-s', 'INITIAL_MEMORY=2GB'])
self.assertContained('INITIAL_MEMORY must be less than 2GB due to current spec limitations', stderr)
# Verifies that warning messages that Closure outputs are recorded to console
def test_closure_warnings(self):
proc = self.run_process([EMCC, path_from_root('tests', 'test_closure_warning.c'), '-O3', '--closure', '1', '-s', 'CLOSURE_WARNINGS=quiet'], stderr=PIPE)
self.assertNotContained('WARNING', proc.stderr)
proc = self.run_process([EMCC, path_from_root('tests', 'test_closure_warning.c'), '-O3', '--closure', '1', '-s', 'CLOSURE_WARNINGS=warn'], stderr=PIPE)
self.assertContained('WARNING - [JSC_REFERENCE_BEFORE_DECLARE] Variable referenced before declaration', proc.stderr)
self.expect_fail([EMCC, path_from_root('tests', 'test_closure_warning.c'), '-O3', '--closure', '1', '-s', 'CLOSURE_WARNINGS=error'])
def test_bitcode_input(self):
# Verify that bitcode files are accepted as input
create_test_file('main.c', 'void foo(); int main() { return 0; }')
self.run_process([EMCC, '-emit-llvm', '-c', '-o', 'main.bc', 'main.c'])
self.assertTrue(building.is_bitcode('main.bc'))
self.run_process([EMCC, '-c', '-o', 'main.o', 'main.bc'])
self.assertTrue(building.is_wasm('main.o'))
def test_nostdlib(self):
# First ensure all the system libs are built
self.run_process([EMCC, path_from_root('tests', 'unistd', 'close.c')])
self.assertContained('undefined symbol:', self.expect_fail([EMCC, path_from_root('tests', 'unistd', 'close.c'), '-nostdlib']))
self.assertContained('undefined symbol:', self.expect_fail([EMCC, path_from_root('tests', 'unistd', 'close.c'), '-nodefaultlibs']))
# Build again but with explit system libraries
libs = ['-lc', '-lcompiler_rt', '-lc_rt_wasm']
self.run_process([EMCC, path_from_root('tests', 'unistd', 'close.c'), '-nostdlib'] + libs)
self.run_process([EMCC, path_from_root('tests', 'unistd', 'close.c'), '-nodefaultlibs'] + libs)
def test_argument_match(self):
# Verify that emcc arguments match precisely. We had a bug where only the prefix
# was matched
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '--minify', '0'])
err = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), '--minifyXX'])
self.assertContained("error: unsupported option '--minifyXX'", err)
def test_argument_missing(self):
err = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), '--minify'])
self.assertContained("error: option '--minify' requires an argument", err)
def test_argument_missing_file(self):
err = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), '--pre-js', 'foo.js'])
self.assertContained("emcc: error: '--pre-js': file not found: 'foo.js'", err)
def test_default_to_cxx(self):
create_test_file('foo.h', '#include <string.h>')
create_test_file('cxxfoo.h', '#include <string>')
# The default bahviour is to default to C++, which means the C++ header can be compiled even
# with emcc.
self.run_process([EMCC, '-c', 'cxxfoo.h'])
# But this means that C flags can't be passed (since we are assuming C++)
err = self.expect_fail([EMCC, '-std=gnu11', '-c', 'foo.h'])
self.assertContained("'-std=gnu11' not allowed with 'C++'", err)
# If we disable DEFAULT_TO_CXX the emcc can be used with cflags, but can't be used to build
# C++ headers
self.run_process([EMCC, '-std=gnu11', '-c', 'foo.h', '-s', 'DEFAULT_TO_CXX=0'])
err = self.expect_fail([EMCC, '-c', 'cxxfoo.h', '-s', 'DEFAULT_TO_CXX=0'])
self.assertContained("'string' file not found", err)
# Using em++ should alwasy work for C++ headers
self.run_process([EMXX, '-c', 'cxxfoo.h', '-s', 'DEFAULT_TO_CXX=0'])
# Or using emcc with `-x c++`
self.run_process([EMCC, '-c', 'cxxfoo.h', '-s', 'DEFAULT_TO_CXX=0', '-x', 'c++-header'])
@parameterized({
'': ([],),
'minimal': (['-s', 'MINIMAL_RUNTIME'],),
})
def test_support_errno(self, args):
self.emcc_args += args
src = path_from_root('tests', 'core', 'test_support_errno.c')
output = path_from_root('tests', 'core', 'test_support_errno.out')
self.do_run_from_file(src, output)
size_default = os.path.getsize('test_support_errno.js')
# Run the same test again but with SUPPORT_ERRNO disabled. This time we don't expect errno
# to be set after the failing syscall.
self.set_setting('SUPPORT_ERRNO', 0)
output = path_from_root('tests', 'core', 'test_support_errno_disabled.out')
self.do_run_from_file(src, output)
# Verify the JS output was smaller
self.assertLess(os.path.getsize('test_support_errno.js'), size_default)
def test_assembly(self):
self.run_process([EMCC, '-c', path_from_root('tests', 'other', 'test_asm.s'), '-o', 'foo.o'])
src = path_from_root('tests', 'other', 'test_asm.c')
output = path_from_root('tests', 'other', 'test_asm.out')
self.do_run_from_file(src, output, libraries=['foo.o'])
def test_assembly_preprocessed(self):
self.run_process([EMCC, '-c', path_from_root('tests', 'other', 'test_asm_cpp.S'), '-o', 'foo.o'])
src = path_from_root('tests', 'other', 'test_asm.c')
output = path_from_root('tests', 'other', 'test_asm.out')
self.do_run_from_file(src, output, libraries=['foo.o'])
@parameterized({
'': (['-DUSE_KEEPALIVE'],),
'minimal': (['-DUSE_KEEPALIVE', '-s', 'MINIMAL_RUNTIME'],),
'command_line': (['-s', 'EXPORTED_FUNCTIONS=[_g_foo,_main]'],),
})
def test_export_global_address(self, args):
self.emcc_args += args
self.do_run_in_out_file_test('tests', 'other', 'test_export_global_address.c')
def test_linker_version(self):
out = self.run_process([EMCC, '-Wl,--version'], stdout=PIPE).stdout
self.assertContained('LLD ', out)
# Tests that if a JS library function is missing, the linker will print out which function
# depended on the missing function.
def test_chained_js_error_diagnostics(self):
err = self.expect_fail([EMCC, path_from_root('tests', 'test_chained_js_error_diagnostics.c'), '--js-library', path_from_root('tests', 'test_chained_js_error_diagnostics.js')])
self.assertContained("error: undefined symbol: nonexistent_function (referenced by bar__deps: ['nonexistent_function'], referenced by foo__deps: ['bar'], referenced by top-level compiled C/C++ code)", err)
def test_xclang_flag(self):
create_test_file('foo.h', ' ')
self.run_process([EMCC, '-c', '-o', 'out.o', '-Xclang', '-include', '-Xclang', 'foo.h', path_from_root('tests', 'hello_world.c')])
def test_emcc_size_parsing(self):
create_test_file('foo.h', ' ')
err = self.expect_fail([EMCC, '-s', 'TOTAL_MEMORY=X'])
self.assertContained('error: invalid byte size `X`. Valid suffixes are: kb, mb, gb, tb', err)
err = self.expect_fail([EMCC, '-s', 'TOTAL_MEMORY=11PB'])
self.assertContained('error: invalid byte size `11PB`. Valid suffixes are: kb, mb, gb, tb', err)
def test_native_call_before_init(self):
self.set_setting('ASSERTIONS')
self.set_setting('EXPORTED_FUNCTIONS', ['_foo'])
self.add_pre_run('console.log("calling foo"); Module["_foo"]();')
create_test_file('foo.c', '#include <stdio.h>\nint foo() { puts("foo called"); return 3; }')
self.build('foo.c')
err = self.expect_fail(config.NODE_JS + ['foo.js'], stdout=PIPE)
self.assertContained('native function `foo` called before runtime initialization', err)
def test_native_call_after_exit(self):
self.set_setting('ASSERTIONS')
self.set_setting('EXIT_RUNTIME')
self.add_on_exit('console.log("calling main again"); Module["_main"]();')
create_test_file('foo.c', '#include <stdio.h>\nint main() { puts("foo called"); return 0; }')
self.build('foo.c')
err = self.expect_fail(config.NODE_JS + ['foo.js'], stdout=PIPE)
self.assertContained('native function `main` called after runtime exit', err)
def test_metadce_wasm2js_i64(self):
# handling i64 unsigned remainder brings in some i64 support code. metadce
# must not remove it.
create_test_file('src.cpp', r'''
int main(int argc, char **argv) {
// Intentionally do not print anything, to not bring in more code than we
// need to test - this only tests that we do not crash, which we would if
// metadce broke us.
unsigned long long x = argc;
// do some i64 math, but return 0
return (x % (x - 20)) == 42;
}''')
self.run_process([EMCC, 'src.cpp', '-O3', '-s', 'WASM=0'])
self.run_js('a.out.js')
def test_deterministic(self):
# test some things that may not be nondeterministic
create_test_file('src.cpp', r'''
#include <emscripten.h>
#include <stdio.h>
#include <stdlib.h>
#include <time.h>
int main () {
timespec now;
clock_gettime(CLOCK_REALTIME, &now);
printf("C now: %ld %ld\n", now.tv_sec, now.tv_nsec);
printf("js now: %f\n", emscripten_get_now());
printf("C randoms: %d %d %d\n", rand(), rand(), rand());
printf("JS random: %d\n", EM_ASM_INT({ return Math.random() }));
}
''')
self.run_process([EMCC, 'src.cpp', '-sDETERMINISTIC'])
one = self.run_js('a.out.js')
# ensure even if the time resolution is 1 second, that if we see the real
# time we'll see a difference
time.sleep(2)
two = self.run_js('a.out.js')
self.assertIdentical(one, two)
def test_err(self):
self.do_other_test('test_err.cpp')
def test_shared_flag(self):
# Test that `-shared` flag causes object file generation but gives a warning
err = self.run_process([EMCC, '-shared', path_from_root('tests', 'hello_world.c'), '-o', 'out.foo'], stderr=PIPE).stderr
self.assertContained('linking a library with `-shared` will emit a static object', err)
self.assertIsObjectFile('out.foo')
# Test that using an exectuable output name overides the `-shared` flag, but produces a warning.
err = self.run_process([EMCC, '-shared', path_from_root('tests', 'hello_world.c'), '-o', 'out.js'],
stderr=PIPE).stderr
self.assertContained('warning: -shared/-r used with executable output suffix', err)
self.run_js('out.js')
@no_windows('windows does not support shbang syntax')
@with_env_modify({'EMMAKEN_JUST_CONFIGURE': '1'})
def test_autoconf_mode(self):
self.run_process([EMCC, path_from_root('tests', 'hello_world.c')])
# Test that output name is just `a.out` and that it is directly executable
output = self.run_process([os.path.abspath('a.out')], stdout=PIPE).stdout
self.assertContained('hello, world!', output)
def test_standalone_export_main(self):
# Tests that explicitly exported `_main` does not fail, even though `_start` is the entry
# point.
# We should consider making this a warning since the `_main` export is redundant.
self.run_process([EMCC, '-sEXPORTED_FUNCTIONS=[_main]', '-sSTANDALONE_WASM', '-c', path_from_root('tests', 'core', 'test_hello_world.c')])
def test_missing_malloc_export(self):
# we used to include malloc by default. show a clear error in builds with
# ASSERTIONS to help with any confusion when the user calls malloc/free
# directly
create_test_file('unincluded_malloc.c', r'''
#include <emscripten.h>
int main() {
EM_ASM({
try {
_malloc(1);
} catch(e) {
console.log('exception:', e);
}
try {
_free();
} catch(e) {
console.log('exception:', e);
}
});
}
''')
self.do_runf('unincluded_malloc.c', (
"malloc() called but not included in the build - add '_malloc' to EXPORTED_FUNCTIONS",
"free() called but not included in the build - add '_free' to EXPORTED_FUNCTIONS"))
def test_missing_malloc_export_indirect(self):
# we used to include malloc by default. show a clear error in builds with
# ASSERTIONS to help with any confusion when the user calls a JS API that
# requires malloc
create_test_file('unincluded_malloc.c', r'''
#include <emscripten.h>
int main() {
EM_ASM({
try {
allocateUTF8("foo");
} catch(e) {
console.log('exception:', e);
}
});
}
''')
self.do_runf('unincluded_malloc.c', 'malloc was not included, but is needed in allocateUTF8. Adding "_malloc" to EXPORTED_FUNCTIONS should fix that. This may be a bug in the compiler, please file an issue.')
def test_getrusage(self):
self.do_runf(path_from_root('tests', 'other', 'test_getrusage.c'))
@with_env_modify({'EMMAKEN_COMPILER': shared.CLANG_CC})
def test_emmaken_compiler(self):
stderr = self.run_process([EMCC, '-c', path_from_root('tests', 'core', 'test_hello_world.c')], stderr=PIPE).stderr
self.assertContained('warning: `EMMAKEN_COMPILER` is deprecated', stderr)
@no_windows('relies on a shell script')
def test_compiler_wrapper(self):
create_test_file('wrapper.sh', '''\
#!/bin/sh
echo "wrapping compiler call: $@"
exec "$@"
''')
make_executable('wrapper.sh')
with env_modify({'EM_COMPILER_WRAPPER': './wrapper.sh'}):
stdout = self.run_process([EMCC, '-c', path_from_root('tests', 'core', 'test_hello_world.c')], stdout=PIPE).stdout
self.assertContained('wrapping compiler call: ', stdout)
self.assertExists('test_hello_world.o')
stdout = self.run_process([EMCC, '-c', path_from_root('tests', 'core', 'test_hello_world.c'), '--compiler-wrapper=./wrapper.sh'], stdout=PIPE).stdout
self.assertContained('wrapping compiler call: ', stdout)
self.assertExists('test_hello_world.o')
def test_llvm_option_dash_o(self):
# emcc used to interpret -mllvm's option value as the output file if it
# began with -o
stderr = self.run_process(
[EMCC, '-v', '-o', 'llvm_option_dash_o_output', '-mllvm',
'-opt-bisect-limit=1', path_from_root('tests', 'hello_world.c')],
stderr=PIPE).stderr
self.assertExists('llvm_option_dash_o_output')
self.assertNotExists('pt-bisect-limit=1')
self.assertContained(' -mllvm -opt-bisect-limit=1 ', stderr)
# Regression test for #12236: the '-mllvm' argument was indexed instead of
# its value, and the index was out of bounds if the argument was sixth or
# further on the command line
self.run_process(
[EMCC, '-DFOO', '-DBAR', '-DFOOBAR', '-DBARFOO',
'-o', 'llvm_option_dash_o_output', '-mllvm', '-opt-bisect-limit=1',
path_from_root('tests', 'hello_world.c')])
def test_SYSCALL_DEBUG(self):
self.set_setting('SYSCALL_DEBUG')
self.do_runf(path_from_root('tests', 'hello_world.c'), 'syscall! fd_write: [1,')
def test_LIBRARY_DEBUG(self):
self.set_setting('LIBRARY_DEBUG')
self.do_runf(path_from_root('tests', 'hello_world.c'), '[library call:_fd_write: 0x1')
def test_SUPPORT_LONGJMP_executable(self):
stderr = self.run_process([EMCC, path_from_root('tests', 'core', 'test_longjmp.c'), '-s', 'SUPPORT_LONGJMP=0'], stderr=PIPE, check=False).stderr
self.assertContained('error: longjmp support was disabled (SUPPORT_LONGJMP=0), but it is required by the code (either set SUPPORT_LONGJMP=1, or remove uses of it in the project)',
stderr)
def test_SUPPORT_LONGJMP_object(self):
# compile the object *with* support, but link without
self.run_process([EMCC, path_from_root('tests', 'core', 'test_longjmp.c'), '-c', '-s', 'SUPPORT_LONGJMP=1', '-o', 'a.o'])
stderr = self.run_process([EMCC, 'a.o', '-s', 'SUPPORT_LONGJMP=0'], stderr=PIPE, check=False).stderr
self.assertContained('error: longjmp support was disabled (SUPPORT_LONGJMP=0), but it is required by the code (either set SUPPORT_LONGJMP=1, or remove uses of it in the project)',
stderr)
def test_pthread_MODULARIZE(self):
stderr = self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-pthread', '-sMODULARIZE'], stderr=PIPE, check=False).stderr
self.assertContained('pthreads + MODULARIZE currently require you to set -s EXPORT_NAME=Something (see settings.js) to Something != Module, so that the .worker.js file can work',
stderr)
def test_jslib_clobber_i(self):
# Regression check for an issue we have where a library clobbering the global `i` variable could
# prevent processing of further libraries.
create_test_file('lib1.js', 'for (var i = 0; i < 100; i++) {}')
create_test_file('lib2.js', '''
mergeInto(LibraryManager.library, {
foo: function() { }
});
''')
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'),
'-s', 'DEFAULT_LIBRARY_FUNCS_TO_INCLUDE=[foo]',
'--js-library=lib1.js',
'--js-library=lib2.js'])
def test_jslib_bad_config(self):
create_test_file('lib.js', '''
mergeInto(LibraryManager.library, {
foo__sig: 'ii',
});
''')
err = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), '--js-library=lib.js'])
self.assertContained('error: Missing library element `foo` for library config `foo__sig`', err)
def test_jslib_ifdef(self):
create_test_file('lib.js', '''
#ifdef ASSERTIONS
var foo;
#endif
''')
proc = self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '--js-library=lib.js'], stderr=PIPE)
self.assertContained('warning: use of #ifdef in js library. Use #if instead.', proc.stderr)
def test_wasm2js_no_dynamic_linking(self):
for arg in ['-sMAIN_MODULE', '-sSIDE_MODULE', '-sRELOCATABLE']:
err = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), '-sMAIN_MODULE', '-sWASM=0'])
self.assertContained('WASM2JS is not compatible with relocatable output', err)
def test_wasm2js_standalone(self):
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-sSTANDALONE_WASM', '-sWASM=0'])
self.assertContained('hello, world!', self.run_js('a.out.js'))
def test_oformat(self):
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '--oformat=wasm', '-o', 'out.foo'])
self.assertTrue(building.is_wasm('out.foo'))
self.clear()
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '--oformat=html', '-o', 'out.foo'])
self.assertFalse(building.is_wasm('out.foo'))
self.assertContained('<html ', open('out.foo').read())
self.clear()
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '--oformat=js', '-o', 'out.foo'])
self.assertFalse(building.is_wasm('out.foo'))
self.assertContained('new ExitStatus', open('out.foo').read())
self.clear()
err = self.expect_fail([EMCC, path_from_root('tests', 'hello_world.c'), '--oformat=foo'])
self.assertContained("error: invalid output format: `foo` (must be one of ['wasm', 'js', 'mjs', 'html', 'bare']", err)
# Tests that the old format of {{{ makeDynCall('sig') }}}(func, param1) works
def test_old_makeDynCall_syntax(self):
err = self.run_process([EMCC, path_from_root('tests', 'test_old_dyncall_format.c'), '--js-library', path_from_root('tests', 'library_test_old_dyncall_format.js')], stderr=PIPE).stderr
self.assertContained('syntax for makeDynCall has changed', err)
def test_post_link(self):
err = self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '--oformat=bare', '-o', 'bare.wasm'], stderr=PIPE).stderr
self.assertContained('--oformat=base/--post-link are experimental and subject to change', err)
err = self.run_process([EMCC, '--post-link', 'bare.wasm'], stderr=PIPE).stderr
self.assertContained('--oformat=base/--post-link are experimental and subject to change', err)
err = self.assertContained('hello, world!', self.run_js('a.out.js'))
def compile_with_wasi_sdk(self, filename, output):
sysroot = os.environ.get('EMTEST_WASI_SYSROOT')
if not sysroot:
self.skipTest('EMTEST_WASI_SYSROOT not found in environment')
sysroot = os.path.expanduser(sysroot)
self.run_process([CLANG_CC, '--sysroot=' + sysroot, '--target=wasm32-wasi', filename, '-o', output])
def test_run_wasi_sdk_output(self):
self.compile_with_wasi_sdk(path_from_root('tests', 'hello_world.c'), 'hello.wasm')
self.run_process([EMCC, '--post-link', '-sPURE_WASI', 'hello.wasm'])
self.assertContained('hello, world!', self.run_js('a.out.js'))
# Test that Closure prints out clear readable error messages when there are errors.
def test_closure_errors(self):
err = self.expect_fail([EMCC, path_from_root('tests', 'closure_error.c'), '-O2', '--closure', '1'])
lines = err.split('\n')
def find_substr_index(s):
for i, line in enumerate(lines):
if s in line:
return i
return -1
idx1 = find_substr_index('[JSC_UNDEFINED_VARIABLE] variable thisVarDoesNotExist is undeclared')
idx2 = find_substr_index('[JSC_UNDEFINED_VARIABLE] variable thisVarDoesNotExistEither is undeclared')
self.assertNotEqual(idx1, -1)
self.assertNotEqual(idx2, -1)
# The errors must be present on distinct lines.
self.assertNotEqual(idx1, idx2)
# Make sure that --cpuprofiler compiles with --closure 1
def test_cpuprofiler_closure(self):
# TODO: Enable '-s', 'CLOSURE_WARNINGS=error' in the following, but that has currently regressed.
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-O2', '--closure', '1', '--cpuprofiler'])
# Make sure that --memoryprofiler compiles with --closure 1
def test_memoryprofiler_closure(self):
# TODO: Enable '-s', 'CLOSURE_WARNINGS=error' in the following, but that has currently regressed.
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-O2', '--closure', '1', '--memoryprofiler'])
# Make sure that --threadprofiler compiles with --closure 1
def test_threadprofiler_closure(self):
# TODO: Enable '-s', 'CLOSURE_WARNINGS=error' in the following, but that has currently regressed.
self.run_process([EMCC, path_from_root('tests', 'hello_world.c'), '-O2', '-s', 'USE_PTHREADS=1', '--closure', '1', '--threadprofiler'])
def test_syslog(self):
self.do_other_test('test_syslog.c')
def test_split_module(self):
self.set_setting('SPLIT_MODULE')
self.emcc_args += ['-g', '-Wno-experimental']
self.emcc_args += ['--post-js', path_from_root('tests', 'other', 'test_split_module.post.js')]
self.emcc_args += ['-sEXPORTED_FUNCTIONS=[_malloc, _free]']
self.do_other_test('test_split_module.c')
self.assertExists('test_split_module.wasm')
self.assertExists('test_split_module.wasm.orig')
self.assertExists('profile.data')
wasm_split = os.path.join(building.get_binaryen_bin(), 'wasm-split')
self.run_process([wasm_split, '--enable-mutable-globals', '--export-prefix=split@', 'test_split_module.wasm.orig', '-o1', 'primary.wasm', '-o2', 'secondary.wasm', '--profile=profile.data'])
os.remove('test_split_module.wasm')
os.rename('primary.wasm', 'test_split_module.wasm')
os.rename('secondary.wasm', 'test_split_module.wasm.deferred')
result = self.run_js('test_split_module.js')
self.assertNotIn('writing profile', result)
self.assertIn('Hello! answer: 42', result)
| []
| []
| [
"NM",
"EMTEST_WASI_SYSROOT",
"EMTEST_REBASELINE",
"CROSS_COMPILE"
]
| [] | ["NM", "EMTEST_WASI_SYSROOT", "EMTEST_REBASELINE", "CROSS_COMPILE"] | python | 4 | 0 | |
sdk/eventgrid/azure-messaging-eventgrid/src/samples/java/com/azure/messaging/eventgrid/samples/PublishCloudEventsToTopicAsynchronously.java | // Copyright (c) Microsoft Corporation. All rights reserved.
// Licensed under the MIT License.
package com.azure.messaging.eventgrid.samples;
import com.azure.core.credential.AzureKeyCredential;
import com.azure.core.models.CloudEvent;
import com.azure.core.models.CloudEventDataFormat;
import com.azure.core.util.BinaryData;
import com.azure.messaging.eventgrid.EventGridPublisherAsyncClient;
import com.azure.messaging.eventgrid.EventGridPublisherClientBuilder;
import com.azure.messaging.eventgrid.samples.models.User;
import java.io.IOException;
import java.nio.charset.StandardCharsets;
import java.util.ArrayList;
import java.util.List;
/**
* This sample code shows how to send {@link CloudEvent CloudEvents} to an Event Grid Topic that accepts cloud event schema.
* Refer to the <a href="https://docs.microsoft.com/en-us/azure/event-grid/cloud-event-schema">CloudEvent schema</a>.
*
* @see PublishEventGridEventsToTopicAsynchronously for a sample to send an Event Grid event.
*/
public class PublishCloudEventsToTopicAsynchronously {
public static void main(String[] args) throws IOException {
EventGridPublisherAsyncClient<CloudEvent> publisherClient = new EventGridPublisherClientBuilder()
.endpoint(System.getenv("AZURE_EVENTGRID_CLOUDEVENT_ENDPOINT")) // make sure it accepts CloudEvent
.credential(new AzureKeyCredential(System.getenv("AZURE_EVENTGRID_CLOUDEVENT_KEY")))
.buildCloudEventPublisherAsyncClient();
// Create a CloudEvent with String data
String str = "FirstName: John1, LastName:James";
CloudEvent cloudEventStr = new CloudEvent("https://com.example.myapp", "User.Created.Text",
BinaryData.fromObject(str), CloudEventDataFormat.JSON, "text/plain");
// Create a CloudEvent with Object data
User newUser = new User("John2", "James");
CloudEvent cloudEventModel = new CloudEvent("https://com.example.myapp", "User.Created.Object",
BinaryData.fromObject(newUser), CloudEventDataFormat.JSON, "application/json");
// Create a CloudEvent with bytes data
byte[] byteSample = "FirstName: John3, LastName: James".getBytes(StandardCharsets.UTF_8);
CloudEvent cloudEventBytes = new CloudEvent("https://com.example.myapp", "User.Created.Binary",
BinaryData.fromBytes(byteSample), CloudEventDataFormat.BYTES, "application/octet-stream");
// Create a CloudEvent with Json String data
String jsonStrData = "\"FirstName: John1, LastName:James\"";
CloudEvent cloudEventJsonStrData = new CloudEvent("https://com.example.myapp", "User.Created.Text",
BinaryData.fromString(jsonStrData), CloudEventDataFormat.JSON, "text/plain");
// Send them to the event grid topic altogether.
List<CloudEvent> events = new ArrayList<>();
events.add(cloudEventStr);
events.add(cloudEventModel);
events.add(cloudEventBytes);
events.add(cloudEventJsonStrData);
events.add(cloudEventBytes.addExtensionAttribute("extension", "value"));
publisherClient.sendEvents(events)
.subscribe(); // This is non-blocking.
System.out.println("Press any key to exit.");
System.in.read(); // Prevent exit immediately.
}
}
| [
"\"AZURE_EVENTGRID_CLOUDEVENT_ENDPOINT\"",
"\"AZURE_EVENTGRID_CLOUDEVENT_KEY\""
]
| []
| [
"AZURE_EVENTGRID_CLOUDEVENT_ENDPOINT",
"AZURE_EVENTGRID_CLOUDEVENT_KEY"
]
| [] | ["AZURE_EVENTGRID_CLOUDEVENT_ENDPOINT", "AZURE_EVENTGRID_CLOUDEVENT_KEY"] | java | 2 | 0 | |
main.go | //go:generate go install -v github.com/josephspurrier/goversioninfo/cmd/goversioninfo
//go:generate goversioninfo -icon=res/papp.ico -manifest=res/papp.manifest
package main
import (
"os"
"path"
"github.com/portapps/portapps/v3"
"github.com/portapps/portapps/v3/pkg/log"
"github.com/portapps/portapps/v3/pkg/utl"
)
type config struct {
Cleanup bool `yaml:"cleanup" mapstructure:"cleanup"`
}
var (
app *portapps.App
cfg *config
)
func init() {
var err error
// Default config
cfg = &config{
Cleanup: false,
}
// Init app
if app, err = portapps.NewWithCfg("vscode-portable", "Visual Studio Code", cfg); err != nil {
log.Fatal().Err(err).Msg("Cannot initialize application. See log file for more info.")
}
}
func main() {
utl.CreateFolder(app.DataPath)
app.Process = utl.PathJoin(app.AppPath, "Code.exe")
app.Args = []string{
"--log debug",
}
// Cleanup on exit
if cfg.Cleanup {
defer func() {
utl.Cleanup([]string{
path.Join(os.Getenv("APPDATA"), "Code"),
})
}()
}
os.Setenv("VSCODE_APPDATA", utl.PathJoin(app.DataPath, "appdata"))
if !app.Config().Common.DisableLog {
os.Setenv("VSCODE_LOGS", utl.PathJoin(app.DataPath, "logs"))
}
os.Setenv("VSCODE_EXTENSIONS", utl.PathJoin(app.DataPath, "extensions"))
defer app.Close()
app.Launch(os.Args[1:])
}
| [
"\"APPDATA\""
]
| []
| [
"APPDATA"
]
| [] | ["APPDATA"] | go | 1 | 0 | |
discordbot.py | from discord.ext import commands
import os
import traceback
bot = commands.Bot(command_prefix='/')
token = os.environ['DISCORD_BOT_TOKEN']
@bot.event
async def on_command_error(ctx, error):
orig_error = getattr(error, "original", error)
error_msg = ''.join(traceback.TracebackException.from_exception(orig_error).format())
await ctx.send(error_msg)
@bot.command()
async def 'ああ言えば'(ctx):
await ctx.send('こういう')
bot.run(token)
| []
| []
| [
"DISCORD_BOT_TOKEN"
]
| [] | ["DISCORD_BOT_TOKEN"] | python | 1 | 0 | |
cmd/kube-apiserver/app/server.go | /*
Copyright 2014 The Kubernetes Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Package app does all of the work necessary to create a Kubernetes
// APIServer by binding together the API, master and APIServer infrastructure.
// It can be configured and called directly or via the hyperkube framework.
package app
import (
"crypto/tls"
"fmt"
"io/ioutil"
"net"
"net/http"
"net/url"
"os"
"reflect"
"strconv"
"strings"
"time"
"github.com/go-openapi/spec"
"github.com/golang/glog"
"github.com/spf13/cobra"
"github.com/spf13/pflag"
"k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/openapi"
"k8s.io/apimachinery/pkg/runtime/schema"
utilerrors "k8s.io/apimachinery/pkg/util/errors"
utilnet "k8s.io/apimachinery/pkg/util/net"
"k8s.io/apimachinery/pkg/util/sets"
utilwait "k8s.io/apimachinery/pkg/util/wait"
"k8s.io/apiserver/pkg/admission"
"k8s.io/apiserver/pkg/authentication/authenticator"
"k8s.io/apiserver/pkg/authorization/authorizer"
genericapiserver "k8s.io/apiserver/pkg/server"
"k8s.io/apiserver/pkg/server/filters"
serverstorage "k8s.io/apiserver/pkg/server/storage"
"k8s.io/kubernetes/cmd/kube-apiserver/app/options"
"k8s.io/kubernetes/cmd/kube-apiserver/app/preflight"
"k8s.io/kubernetes/pkg/api"
"k8s.io/kubernetes/pkg/apis/apps"
"k8s.io/kubernetes/pkg/apis/batch"
"k8s.io/kubernetes/pkg/apis/extensions"
"k8s.io/kubernetes/pkg/capabilities"
"k8s.io/kubernetes/pkg/client/clientset_generated/internalclientset"
informers "k8s.io/kubernetes/pkg/client/informers/informers_generated/internalversion"
"k8s.io/kubernetes/pkg/cloudprovider"
serviceaccountcontroller "k8s.io/kubernetes/pkg/controller/serviceaccount"
generatedopenapi "k8s.io/kubernetes/pkg/generated/openapi"
"k8s.io/kubernetes/pkg/kubeapiserver"
kubeapiserveradmission "k8s.io/kubernetes/pkg/kubeapiserver/admission"
kubeauthenticator "k8s.io/kubernetes/pkg/kubeapiserver/authenticator"
"k8s.io/kubernetes/pkg/kubeapiserver/authorizer/modes"
kubeoptions "k8s.io/kubernetes/pkg/kubeapiserver/options"
kubeserver "k8s.io/kubernetes/pkg/kubeapiserver/server"
"k8s.io/kubernetes/pkg/master"
"k8s.io/kubernetes/pkg/master/tunneler"
quotainstall "k8s.io/kubernetes/pkg/quota/install"
"k8s.io/kubernetes/pkg/registry/cachesize"
rbacrest "k8s.io/kubernetes/pkg/registry/rbac/rest"
"k8s.io/kubernetes/pkg/version"
"k8s.io/kubernetes/plugin/pkg/auth/authenticator/token/bootstrap"
)
const etcdRetryLimit = 60
const etcdRetryInterval = 1 * time.Second
// NewAPIServerCommand creates a *cobra.Command object with default parameters
func NewAPIServerCommand() *cobra.Command {
s := options.NewServerRunOptions()
s.AddFlags(pflag.CommandLine)
cmd := &cobra.Command{
Use: "kube-apiserver",
Long: `The Kubernetes API server validates and configures data
for the api objects which include pods, services, replicationcontrollers, and
others. The API Server services REST operations and provides the frontend to the
cluster's shared state through which all other components interact.`,
Run: func(cmd *cobra.Command, args []string) {
},
}
return cmd
}
// Run runs the specified APIServer. This should never exit.
func Run(runOptions *options.ServerRunOptions, stopCh <-chan struct{}) error {
kubeAPIServerConfig, sharedInformers, insecureServingOptions, err := CreateKubeAPIServerConfig(runOptions)
if err != nil {
return err
}
// TPRs are enabled and not yet beta, since this these are the successor, they fall under the same enablement rule
// If additional API servers are added, they should be gated.
apiExtensionsConfig, err := createAPIExtensionsConfig(*kubeAPIServerConfig.GenericConfig, runOptions)
if err != nil {
return err
}
apiExtensionsServer, err := createAPIExtensionsServer(apiExtensionsConfig, genericapiserver.EmptyDelegate)
if err != nil {
return err
}
kubeAPIServer, err := CreateKubeAPIServer(kubeAPIServerConfig, apiExtensionsServer.GenericAPIServer, sharedInformers)
if err != nil {
return err
}
// if we're starting up a hacked up version of this API server for a weird test case,
// just start the API server as is because clients don't get built correctly when you do this
if len(os.Getenv("KUBE_API_VERSIONS")) > 0 {
if insecureServingOptions != nil {
insecureHandlerChain := kubeserver.BuildInsecureHandlerChain(kubeAPIServer.GenericAPIServer.UnprotectedHandler(), kubeAPIServerConfig.GenericConfig)
if err := kubeserver.NonBlockingRun(insecureServingOptions, insecureHandlerChain, stopCh); err != nil {
return err
}
}
return kubeAPIServer.GenericAPIServer.PrepareRun().Run(stopCh)
}
// otherwise go down the normal path of standing the aggregator up in front of the API server
// this wires up openapi
kubeAPIServer.GenericAPIServer.PrepareRun()
// aggregator comes last in the chain
aggregatorConfig, err := createAggregatorConfig(*kubeAPIServerConfig.GenericConfig, runOptions)
if err != nil {
return err
}
aggregatorServer, err := createAggregatorServer(aggregatorConfig, kubeAPIServer.GenericAPIServer, sharedInformers, apiExtensionsServer.Informers)
if err != nil {
// we don't need special handling for innerStopCh because the aggregator server doesn't create any go routines
return err
}
if insecureServingOptions != nil {
insecureHandlerChain := kubeserver.BuildInsecureHandlerChain(aggregatorServer.GenericAPIServer.UnprotectedHandler(), kubeAPIServerConfig.GenericConfig)
if err := kubeserver.NonBlockingRun(insecureServingOptions, insecureHandlerChain, stopCh); err != nil {
return err
}
}
return aggregatorServer.GenericAPIServer.PrepareRun().Run(stopCh)
}
// CreateKubeAPIServer creates and wires a workable kube-apiserver
func CreateKubeAPIServer(kubeAPIServerConfig *master.Config, delegateAPIServer genericapiserver.DelegationTarget, sharedInformers informers.SharedInformerFactory) (*master.Master, error) {
kubeAPIServer, err := kubeAPIServerConfig.Complete().New(delegateAPIServer)
if err != nil {
return nil, err
}
kubeAPIServer.GenericAPIServer.AddPostStartHook("start-kube-apiserver-informers", func(context genericapiserver.PostStartHookContext) error {
sharedInformers.Start(context.StopCh)
return nil
})
return kubeAPIServer, nil
}
// CreateKubeAPIServerConfig creates all the resources for running the API server, but runs none of them
func CreateKubeAPIServerConfig(s *options.ServerRunOptions) (*master.Config, informers.SharedInformerFactory, *kubeserver.InsecureServingInfo, error) {
// register all admission plugins
registerAllAdmissionPlugins(s.Admission.Plugins)
// set defaults in the options before trying to create the generic config
if err := defaultOptions(s); err != nil {
return nil, nil, nil, err
}
// validate options
if errs := s.Validate(); len(errs) != 0 {
return nil, nil, nil, utilerrors.NewAggregate(errs)
}
genericConfig, sharedInformers, insecureServingOptions, err := BuildGenericConfig(s)
if err != nil {
return nil, nil, nil, err
}
if err := utilwait.PollImmediate(etcdRetryInterval, etcdRetryLimit*etcdRetryInterval, preflight.EtcdConnection{ServerList: s.Etcd.StorageConfig.ServerList}.CheckEtcdServers); err != nil {
return nil, nil, nil, fmt.Errorf("error waiting for etcd connection: %v", err)
}
capabilities.Initialize(capabilities.Capabilities{
AllowPrivileged: s.AllowPrivileged,
// TODO(vmarmol): Implement support for HostNetworkSources.
PrivilegedSources: capabilities.PrivilegedSources{
HostNetworkSources: []string{},
HostPIDSources: []string{},
HostIPCSources: []string{},
},
PerConnectionBandwidthLimitBytesPerSec: s.MaxConnectionBytesPerSec,
})
// Setup nodeTunneler if needed
var nodeTunneler tunneler.Tunneler
var proxyDialerFn utilnet.DialFunc
if len(s.SSHUser) > 0 {
// Get ssh key distribution func, if supported
var installSSHKey tunneler.InstallSSHKey
cloud, err := cloudprovider.InitCloudProvider(s.CloudProvider.CloudProvider, s.CloudProvider.CloudConfigFile)
if err != nil {
return nil, nil, nil, fmt.Errorf("cloud provider could not be initialized: %v", err)
}
if cloud != nil {
if instances, supported := cloud.Instances(); supported {
installSSHKey = instances.AddSSHKeyToAllInstances
}
}
if s.KubeletConfig.Port == 0 {
return nil, nil, nil, fmt.Errorf("must enable kubelet port if proxy ssh-tunneling is specified")
}
if s.KubeletConfig.ReadOnlyPort == 0 {
return nil, nil, nil, fmt.Errorf("must enable kubelet readonly port if proxy ssh-tunneling is specified")
}
// Set up the nodeTunneler
// TODO(cjcullen): If we want this to handle per-kubelet ports or other
// kubelet listen-addresses, we need to plumb through options.
healthCheckPath := &url.URL{
Scheme: "http",
Host: net.JoinHostPort("127.0.0.1", strconv.FormatUint(uint64(s.KubeletConfig.ReadOnlyPort), 10)),
Path: "healthz",
}
nodeTunneler = tunneler.New(s.SSHUser, s.SSHKeyfile, healthCheckPath, installSSHKey)
// Use the nodeTunneler's dialer to connect to the kubelet
s.KubeletConfig.Dial = nodeTunneler.Dial
// Use the nodeTunneler's dialer when proxying to pods, services, and nodes
proxyDialerFn = nodeTunneler.Dial
}
// Proxying to pods and services is IP-based... don't expect to be able to verify the hostname
proxyTLSClientConfig := &tls.Config{InsecureSkipVerify: true}
proxyTransport := utilnet.SetTransportDefaults(&http.Transport{
Dial: proxyDialerFn,
TLSClientConfig: proxyTLSClientConfig,
})
serviceIPRange, apiServerServiceIP, err := master.DefaultServiceIPRange(s.ServiceClusterIPRange)
if err != nil {
return nil, nil, nil, err
}
storageFactory, err := BuildStorageFactory(s)
if err != nil {
return nil, nil, nil, err
}
clientCA, err := readCAorNil(s.Authentication.ClientCert.ClientCA)
if err != nil {
return nil, nil, nil, err
}
requestHeaderProxyCA, err := readCAorNil(s.Authentication.RequestHeader.ClientCAFile)
if err != nil {
return nil, nil, nil, err
}
config := &master.Config{
GenericConfig: genericConfig,
ClientCARegistrationHook: master.ClientCARegistrationHook{
ClientCA: clientCA,
RequestHeaderUsernameHeaders: s.Authentication.RequestHeader.UsernameHeaders,
RequestHeaderGroupHeaders: s.Authentication.RequestHeader.GroupHeaders,
RequestHeaderExtraHeaderPrefixes: s.Authentication.RequestHeader.ExtraHeaderPrefixes,
RequestHeaderCA: requestHeaderProxyCA,
RequestHeaderAllowedNames: s.Authentication.RequestHeader.AllowedNames,
},
APIResourceConfigSource: storageFactory.APIResourceConfigSource,
StorageFactory: storageFactory,
EnableCoreControllers: true,
EventTTL: s.EventTTL,
KubeletClientConfig: s.KubeletConfig,
EnableUISupport: true,
EnableLogsSupport: s.EnableLogsHandler,
ProxyTransport: proxyTransport,
Tunneler: nodeTunneler,
ServiceIPRange: serviceIPRange,
APIServerServiceIP: apiServerServiceIP,
APIServerServicePort: 443,
ServiceNodePortRange: s.ServiceNodePortRange,
KubernetesServiceNodePort: s.KubernetesServiceNodePort,
MasterCount: s.MasterCount,
}
return config, sharedInformers, insecureServingOptions, nil
}
// BuildGenericConfig takes the master server options and produces the genericapiserver.Config associated with it
func BuildGenericConfig(s *options.ServerRunOptions) (*genericapiserver.Config, informers.SharedInformerFactory, *kubeserver.InsecureServingInfo, error) {
genericConfig := genericapiserver.NewConfig(api.Codecs)
if err := s.GenericServerRunOptions.ApplyTo(genericConfig); err != nil {
return nil, nil, nil, err
}
insecureServingOptions, err := s.InsecureServing.ApplyTo(genericConfig)
if err != nil {
return nil, nil, nil, err
}
if err := s.SecureServing.ApplyTo(genericConfig); err != nil {
return nil, nil, nil, err
}
if err := s.Authentication.ApplyTo(genericConfig); err != nil {
return nil, nil, nil, err
}
if err := s.Audit.ApplyTo(genericConfig); err != nil {
return nil, nil, nil, err
}
if err := s.Features.ApplyTo(genericConfig); err != nil {
return nil, nil, nil, err
}
genericConfig.OpenAPIConfig = genericapiserver.DefaultOpenAPIConfig(generatedopenapi.GetOpenAPIDefinitions, api.Scheme)
genericConfig.OpenAPIConfig.PostProcessSpec = postProcessOpenAPISpecForBackwardCompatibility
genericConfig.OpenAPIConfig.Info.Title = "Kubernetes"
genericConfig.SwaggerConfig = genericapiserver.DefaultSwaggerConfig()
genericConfig.EnableMetrics = true
genericConfig.LongRunningFunc = filters.BasicLongRunningRequestCheck(
sets.NewString("watch", "proxy"),
sets.NewString("attach", "exec", "proxy", "log", "portforward"),
)
kubeVersion := version.Get()
genericConfig.Version = &kubeVersion
storageFactory, err := BuildStorageFactory(s)
if err != nil {
return nil, nil, nil, err
}
if err := s.Etcd.ApplyWithStorageFactoryTo(storageFactory, genericConfig); err != nil {
return nil, nil, nil, err
}
// Use protobufs for self-communication.
// Since not every generic apiserver has to support protobufs, we
// cannot default to it in generic apiserver and need to explicitly
// set it in kube-apiserver.
genericConfig.LoopbackClientConfig.ContentConfig.ContentType = "application/vnd.kubernetes.protobuf"
client, err := internalclientset.NewForConfig(genericConfig.LoopbackClientConfig)
if err != nil {
kubeAPIVersions := os.Getenv("KUBE_API_VERSIONS")
if len(kubeAPIVersions) == 0 {
return nil, nil, nil, fmt.Errorf("failed to create clientset: %v", err)
}
// KUBE_API_VERSIONS is used in test-update-storage-objects.sh, disabling a number of API
// groups. This leads to a nil client above and undefined behaviour further down.
//
// TODO: get rid of KUBE_API_VERSIONS or define sane behaviour if set
glog.Errorf("Failed to create clientset with KUBE_API_VERSIONS=%q. KUBE_API_VERSIONS is only for testing. Things will break.", kubeAPIVersions)
}
sharedInformers := informers.NewSharedInformerFactory(client, 10*time.Minute)
genericConfig.Authenticator, genericConfig.OpenAPIConfig.SecurityDefinitions, err = BuildAuthenticator(s, storageFactory, client, sharedInformers)
if err != nil {
return nil, nil, nil, fmt.Errorf("invalid authentication config: %v", err)
}
genericConfig.Authorizer, err = BuildAuthorizer(s, sharedInformers)
if err != nil {
return nil, nil, nil, fmt.Errorf("invalid authorization config: %v", err)
}
if !sets.NewString(s.Authorization.Modes()...).Has(modes.ModeRBAC) {
genericConfig.DisabledPostStartHooks.Insert(rbacrest.PostStartHookName)
}
pluginInitializer, err := BuildAdmissionPluginInitializer(
s,
client,
sharedInformers,
genericConfig.Authorizer,
)
if err != nil {
return nil, nil, nil, fmt.Errorf("failed to create admission plugin initializer: %v", err)
}
err = s.Admission.ApplyTo(
genericConfig,
pluginInitializer)
if err != nil {
return nil, nil, nil, fmt.Errorf("failed to initialize admission: %v", err)
}
return genericConfig, sharedInformers, insecureServingOptions, nil
}
// BuildAdmissionPluginInitializer constructs the admission plugin initializer
func BuildAdmissionPluginInitializer(s *options.ServerRunOptions, client internalclientset.Interface, sharedInformers informers.SharedInformerFactory, apiAuthorizer authorizer.Authorizer) (admission.PluginInitializer, error) {
var cloudConfig []byte
if s.CloudProvider.CloudConfigFile != "" {
var err error
cloudConfig, err = ioutil.ReadFile(s.CloudProvider.CloudConfigFile)
if err != nil {
glog.Fatalf("Error reading from cloud configuration file %s: %#v", s.CloudProvider.CloudConfigFile, err)
}
}
// TODO: use a dynamic restmapper. See https://github.com/kubernetes/kubernetes/pull/42615.
restMapper := api.Registry.RESTMapper()
// NOTE: we do not provide informers to the quota registry because admission level decisions
// do not require us to open watches for all items tracked by quota.
quotaRegistry := quotainstall.NewRegistry(nil, nil)
pluginInitializer := kubeapiserveradmission.NewPluginInitializer(client, sharedInformers, apiAuthorizer, cloudConfig, restMapper, quotaRegistry)
return pluginInitializer, nil
}
// BuildAuthenticator constructs the authenticator
func BuildAuthenticator(s *options.ServerRunOptions, storageFactory serverstorage.StorageFactory, client internalclientset.Interface, sharedInformers informers.SharedInformerFactory) (authenticator.Request, *spec.SecurityDefinitions, error) {
authenticatorConfig := s.Authentication.ToAuthenticationConfig()
if s.Authentication.ServiceAccounts.Lookup {
// we have to go direct to storage because the clientsets fail when they're initialized with some API versions excluded
// we should stop trying to control them like that.
storageConfig, err := storageFactory.NewConfig(api.Resource("serviceaccounts"))
if err != nil {
return nil, nil, fmt.Errorf("unable to get serviceaccounts storage: %v", err)
}
authenticatorConfig.ServiceAccountTokenGetter = serviceaccountcontroller.NewGetterFromStorageInterface(storageConfig, storageFactory.ResourcePrefix(api.Resource("serviceaccounts")), storageFactory.ResourcePrefix(api.Resource("secrets")))
}
if client == nil || reflect.ValueOf(client).IsNil() {
// TODO: Remove check once client can never be nil.
glog.Errorf("Failed to setup bootstrap token authenticator because the loopback clientset was not setup properly.")
} else {
authenticatorConfig.BootstrapTokenAuthenticator = bootstrap.NewTokenAuthenticator(
sharedInformers.Core().InternalVersion().Secrets().Lister().Secrets(v1.NamespaceSystem),
)
}
return authenticatorConfig.New()
}
// BuildAuthorizer constructs the authorizer
func BuildAuthorizer(s *options.ServerRunOptions, sharedInformers informers.SharedInformerFactory) (authorizer.Authorizer, error) {
authorizationConfig := s.Authorization.ToAuthorizationConfig(sharedInformers)
return authorizationConfig.New()
}
// BuildStorageFactory constructs the storage factory
func BuildStorageFactory(s *options.ServerRunOptions) (*serverstorage.DefaultStorageFactory, error) {
storageGroupsToEncodingVersion, err := s.StorageSerialization.StorageGroupsToEncodingVersion()
if err != nil {
return nil, fmt.Errorf("error generating storage version map: %s", err)
}
storageFactory, err := kubeapiserver.NewStorageFactory(
s.Etcd.StorageConfig, s.Etcd.DefaultStorageMediaType, api.Codecs,
serverstorage.NewDefaultResourceEncodingConfig(api.Registry), storageGroupsToEncodingVersion,
// FIXME: this GroupVersionResource override should be configurable
[]schema.GroupVersionResource{batch.Resource("cronjobs").WithVersion("v2alpha1")},
master.DefaultAPIResourceConfigSource(), s.APIEnablement.RuntimeConfig)
if err != nil {
return nil, fmt.Errorf("error in initializing storage factory: %s", err)
}
// keep Deployments in extensions for backwards compatibility, we'll have to migrate at some point, eventually
storageFactory.AddCohabitatingResources(extensions.Resource("deployments"), apps.Resource("deployments"))
for _, override := range s.Etcd.EtcdServersOverrides {
tokens := strings.Split(override, "#")
if len(tokens) != 2 {
glog.Errorf("invalid value of etcd server overrides: %s", override)
continue
}
apiresource := strings.Split(tokens[0], "/")
if len(apiresource) != 2 {
glog.Errorf("invalid resource definition: %s", tokens[0])
continue
}
group := apiresource[0]
resource := apiresource[1]
groupResource := schema.GroupResource{Group: group, Resource: resource}
servers := strings.Split(tokens[1], ";")
storageFactory.SetEtcdLocation(groupResource, servers)
}
return storageFactory, nil
}
func defaultOptions(s *options.ServerRunOptions) error {
// set defaults
if err := s.GenericServerRunOptions.DefaultAdvertiseAddress(s.SecureServing); err != nil {
return err
}
if err := kubeoptions.DefaultAdvertiseAddress(s.GenericServerRunOptions, s.InsecureServing); err != nil {
return err
}
_, apiServerServiceIP, err := master.DefaultServiceIPRange(s.ServiceClusterIPRange)
if err != nil {
return fmt.Errorf("error determining service IP ranges: %v", err)
}
if err := s.SecureServing.MaybeDefaultWithSelfSignedCerts(s.GenericServerRunOptions.AdvertiseAddress.String(), []string{"kubernetes.default.svc", "kubernetes.default", "kubernetes"}, []net.IP{apiServerServiceIP}); err != nil {
return fmt.Errorf("error creating self-signed certificates: %v", err)
}
if err := s.CloudProvider.DefaultExternalHost(s.GenericServerRunOptions); err != nil {
return fmt.Errorf("error setting the external host value: %v", err)
}
s.Authentication.ApplyAuthorization(s.Authorization)
// Default to the private server key for service account token signing
if len(s.Authentication.ServiceAccounts.KeyFiles) == 0 && s.SecureServing.ServerCert.CertKey.KeyFile != "" {
if kubeauthenticator.IsValidServiceAccountKeyFile(s.SecureServing.ServerCert.CertKey.KeyFile) {
s.Authentication.ServiceAccounts.KeyFiles = []string{s.SecureServing.ServerCert.CertKey.KeyFile}
} else {
glog.Warning("No TLS key provided, service account token authentication disabled")
}
}
if s.Etcd.StorageConfig.DeserializationCacheSize == 0 {
// When size of cache is not explicitly set, estimate its size based on
// target memory usage.
glog.V(2).Infof("Initializing deserialization cache size based on %dMB limit", s.GenericServerRunOptions.TargetRAMMB)
// This is the heuristics that from memory capacity is trying to infer
// the maximum number of nodes in the cluster and set cache sizes based
// on that value.
// From our documentation, we officially recomment 120GB machines for
// 2000 nodes, and we scale from that point. Thus we assume ~60MB of
// capacity per node.
// TODO: We may consider deciding that some percentage of memory will
// be used for the deserialization cache and divide it by the max object
// size to compute its size. We may even go further and measure
// collective sizes of the objects in the cache.
clusterSize := s.GenericServerRunOptions.TargetRAMMB / 60
s.Etcd.StorageConfig.DeserializationCacheSize = 25 * clusterSize
if s.Etcd.StorageConfig.DeserializationCacheSize < 1000 {
s.Etcd.StorageConfig.DeserializationCacheSize = 1000
}
}
if s.Etcd.EnableWatchCache {
glog.V(2).Infof("Initializing cache sizes based on %dMB limit", s.GenericServerRunOptions.TargetRAMMB)
cachesize.InitializeWatchCacheSizes(s.GenericServerRunOptions.TargetRAMMB)
cachesize.SetWatchCacheSizes(s.GenericServerRunOptions.WatchCacheSizes)
}
return nil
}
func readCAorNil(file string) ([]byte, error) {
if len(file) == 0 {
return nil, nil
}
return ioutil.ReadFile(file)
}
// PostProcessSpec adds removed definitions for backward compatibility
func postProcessOpenAPISpecForBackwardCompatibility(s *spec.Swagger) (*spec.Swagger, error) {
compatibilityMap := map[string]string{
"v1beta1.DeploymentStatus": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.DeploymentStatus",
"v1beta1.ReplicaSetList": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.ReplicaSetList",
"v1beta1.Eviction": "k8s.io/kubernetes/pkg/apis/policy/v1beta1.Eviction",
"v1beta1.StatefulSetList": "k8s.io/kubernetes/pkg/apis/apps/v1beta1.StatefulSetList",
"v1beta1.RoleBinding": "k8s.io/kubernetes/pkg/apis/rbac/v1beta1.RoleBinding",
"v1beta1.PodSecurityPolicyList": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.PodSecurityPolicyList",
"v1.NodeSpec": "k8s.io/kubernetes/pkg/api/v1.NodeSpec",
"v1.FlockerVolumeSource": "k8s.io/kubernetes/pkg/api/v1.FlockerVolumeSource",
"v1.ContainerState": "k8s.io/kubernetes/pkg/api/v1.ContainerState",
"v1beta1.ClusterRole": "k8s.io/kubernetes/pkg/apis/rbac/v1beta1.ClusterRole",
"v1beta1.StorageClass": "k8s.io/kubernetes/pkg/apis/storage/v1beta1.StorageClass",
"v1.FlexVolumeSource": "k8s.io/kubernetes/pkg/api/v1.FlexVolumeSource",
"v1.SecretKeySelector": "k8s.io/kubernetes/pkg/api/v1.SecretKeySelector",
"v1.DeleteOptions": "k8s.io/kubernetes/pkg/api/v1.DeleteOptions",
"v1.PodStatus": "k8s.io/kubernetes/pkg/api/v1.PodStatus",
"v1.NodeStatus": "k8s.io/kubernetes/pkg/api/v1.NodeStatus",
"v1.ServiceSpec": "k8s.io/kubernetes/pkg/api/v1.ServiceSpec",
"v1.AttachedVolume": "k8s.io/kubernetes/pkg/api/v1.AttachedVolume",
"v1.PersistentVolume": "k8s.io/kubernetes/pkg/api/v1.PersistentVolume",
"v1.LimitRangeList": "k8s.io/kubernetes/pkg/api/v1.LimitRangeList",
"v1alpha1.Role": "k8s.io/kubernetes/pkg/apis/rbac/v1alpha1.Role",
"v1.Affinity": "k8s.io/kubernetes/pkg/api/v1.Affinity",
"v1beta1.PodDisruptionBudget": "k8s.io/kubernetes/pkg/apis/policy/v1beta1.PodDisruptionBudget",
"v1alpha1.RoleBindingList": "k8s.io/kubernetes/pkg/apis/rbac/v1alpha1.RoleBindingList",
"v1.PodAffinity": "k8s.io/kubernetes/pkg/api/v1.PodAffinity",
"v1beta1.SELinuxStrategyOptions": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.SELinuxStrategyOptions",
"v1.ResourceQuotaList": "k8s.io/kubernetes/pkg/api/v1.ResourceQuotaList",
"v1.PodList": "k8s.io/kubernetes/pkg/api/v1.PodList",
"v1.EnvVarSource": "k8s.io/kubernetes/pkg/api/v1.EnvVarSource",
"v1beta1.TokenReviewStatus": "k8s.io/kubernetes/pkg/apis/authentication/v1beta1.TokenReviewStatus",
"v1.PersistentVolumeClaimList": "k8s.io/kubernetes/pkg/api/v1.PersistentVolumeClaimList",
"v1beta1.RoleList": "k8s.io/kubernetes/pkg/apis/rbac/v1beta1.RoleList",
"v1.ListMeta": "k8s.io/apimachinery/pkg/apis/meta/v1.ListMeta",
"v1.ObjectMeta": "k8s.io/apimachinery/pkg/apis/meta/v1.ObjectMeta",
"v1.APIGroupList": "k8s.io/apimachinery/pkg/apis/meta/v1.APIGroupList",
"v2alpha1.Job": "k8s.io/kubernetes/pkg/apis/batch/v2alpha1.Job",
"v1.EnvFromSource": "k8s.io/kubernetes/pkg/api/v1.EnvFromSource",
"v1beta1.IngressStatus": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.IngressStatus",
"v1.Service": "k8s.io/kubernetes/pkg/api/v1.Service",
"v1beta1.DaemonSetStatus": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.DaemonSetStatus",
"v1alpha1.Subject": "k8s.io/kubernetes/pkg/apis/rbac/v1alpha1.Subject",
"v1.HorizontalPodAutoscaler": "k8s.io/kubernetes/pkg/apis/autoscaling/v1.HorizontalPodAutoscaler",
"v1.StatusCause": "k8s.io/apimachinery/pkg/apis/meta/v1.StatusCause",
"v1.NodeSelectorRequirement": "k8s.io/kubernetes/pkg/api/v1.NodeSelectorRequirement",
"v1beta1.NetworkPolicyIngressRule": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.NetworkPolicyIngressRule",
"v1beta1.ThirdPartyResource": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.ThirdPartyResource",
"v1beta1.PodSecurityPolicy": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.PodSecurityPolicy",
"v1beta1.StatefulSet": "k8s.io/kubernetes/pkg/apis/apps/v1beta1.StatefulSet",
"v1.LabelSelector": "k8s.io/apimachinery/pkg/apis/meta/v1.LabelSelector",
"v1.ScaleSpec": "k8s.io/kubernetes/pkg/apis/autoscaling/v1.ScaleSpec",
"v1.DownwardAPIVolumeFile": "k8s.io/kubernetes/pkg/api/v1.DownwardAPIVolumeFile",
"v1beta1.HorizontalPodAutoscaler": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.HorizontalPodAutoscaler",
"v1.AWSElasticBlockStoreVolumeSource": "k8s.io/kubernetes/pkg/api/v1.AWSElasticBlockStoreVolumeSource",
"v1.ComponentStatus": "k8s.io/kubernetes/pkg/api/v1.ComponentStatus",
"v2alpha1.JobSpec": "k8s.io/kubernetes/pkg/apis/batch/v2alpha1.JobSpec",
"v1.ContainerImage": "k8s.io/kubernetes/pkg/api/v1.ContainerImage",
"v1.ReplicationControllerStatus": "k8s.io/kubernetes/pkg/api/v1.ReplicationControllerStatus",
"v1.ResourceQuota": "k8s.io/kubernetes/pkg/api/v1.ResourceQuota",
"v1beta1.NetworkPolicyList": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.NetworkPolicyList",
"v1beta1.NonResourceAttributes": "k8s.io/kubernetes/pkg/apis/authorization/v1beta1.NonResourceAttributes",
"v1.JobCondition": "k8s.io/kubernetes/pkg/apis/batch/v1.JobCondition",
"v1.LabelSelectorRequirement": "k8s.io/apimachinery/pkg/apis/meta/v1.LabelSelectorRequirement",
"v1beta1.Deployment": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.Deployment",
"v1.LoadBalancerIngress": "k8s.io/kubernetes/pkg/api/v1.LoadBalancerIngress",
"v1.SecretList": "k8s.io/kubernetes/pkg/api/v1.SecretList",
"v1beta1.ReplicaSetSpec": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.ReplicaSetSpec",
"v1beta1.RoleBindingList": "k8s.io/kubernetes/pkg/apis/rbac/v1beta1.RoleBindingList",
"v1.ServicePort": "k8s.io/kubernetes/pkg/api/v1.ServicePort",
"v1.Namespace": "k8s.io/kubernetes/pkg/api/v1.Namespace",
"v1beta1.NetworkPolicyPeer": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.NetworkPolicyPeer",
"v1.ReplicationControllerList": "k8s.io/kubernetes/pkg/api/v1.ReplicationControllerList",
"v1beta1.ReplicaSetCondition": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.ReplicaSetCondition",
"v1.ReplicationControllerCondition": "k8s.io/kubernetes/pkg/api/v1.ReplicationControllerCondition",
"v1.DaemonEndpoint": "k8s.io/kubernetes/pkg/api/v1.DaemonEndpoint",
"v1beta1.NetworkPolicyPort": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.NetworkPolicyPort",
"v1.NodeSystemInfo": "k8s.io/kubernetes/pkg/api/v1.NodeSystemInfo",
"v1.LimitRangeItem": "k8s.io/kubernetes/pkg/api/v1.LimitRangeItem",
"v1.ConfigMapVolumeSource": "k8s.io/kubernetes/pkg/api/v1.ConfigMapVolumeSource",
"v1beta1.ClusterRoleList": "k8s.io/kubernetes/pkg/apis/rbac/v1beta1.ClusterRoleList",
"v1beta1.ResourceAttributes": "k8s.io/kubernetes/pkg/apis/authorization/v1beta1.ResourceAttributes",
"v1.Pod": "k8s.io/kubernetes/pkg/api/v1.Pod",
"v1.FCVolumeSource": "k8s.io/kubernetes/pkg/api/v1.FCVolumeSource",
"v1beta1.SubresourceReference": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.SubresourceReference",
"v1.ResourceQuotaStatus": "k8s.io/kubernetes/pkg/api/v1.ResourceQuotaStatus",
"v1alpha1.RoleBinding": "k8s.io/kubernetes/pkg/apis/rbac/v1alpha1.RoleBinding",
"v1.PodCondition": "k8s.io/kubernetes/pkg/api/v1.PodCondition",
"v1.GroupVersionForDiscovery": "k8s.io/apimachinery/pkg/apis/meta/v1.GroupVersionForDiscovery",
"v1.NamespaceStatus": "k8s.io/kubernetes/pkg/api/v1.NamespaceStatus",
"v1.Job": "k8s.io/kubernetes/pkg/apis/batch/v1.Job",
"v1.PersistentVolumeClaimVolumeSource": "k8s.io/kubernetes/pkg/api/v1.PersistentVolumeClaimVolumeSource",
"v1.Handler": "k8s.io/kubernetes/pkg/api/v1.Handler",
"v1.ComponentStatusList": "k8s.io/kubernetes/pkg/api/v1.ComponentStatusList",
"v1.ServerAddressByClientCIDR": "k8s.io/apimachinery/pkg/apis/meta/v1.ServerAddressByClientCIDR",
"v1.PodAntiAffinity": "k8s.io/kubernetes/pkg/api/v1.PodAntiAffinity",
"v1.ISCSIVolumeSource": "k8s.io/kubernetes/pkg/api/v1.ISCSIVolumeSource",
"v1.ContainerStateRunning": "k8s.io/kubernetes/pkg/api/v1.ContainerStateRunning",
"v1.WeightedPodAffinityTerm": "k8s.io/kubernetes/pkg/api/v1.WeightedPodAffinityTerm",
"v1beta1.HostPortRange": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.HostPortRange",
"v1.HorizontalPodAutoscalerSpec": "k8s.io/kubernetes/pkg/apis/autoscaling/v1.HorizontalPodAutoscalerSpec",
"v1.HorizontalPodAutoscalerList": "k8s.io/kubernetes/pkg/apis/autoscaling/v1.HorizontalPodAutoscalerList",
"v1beta1.RoleRef": "k8s.io/kubernetes/pkg/apis/rbac/v1beta1.RoleRef",
"v1.Probe": "k8s.io/kubernetes/pkg/api/v1.Probe",
"v1beta1.IngressTLS": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.IngressTLS",
"v1beta1.ThirdPartyResourceList": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.ThirdPartyResourceList",
"v1beta1.DaemonSet": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.DaemonSet",
"v1.APIGroup": "k8s.io/apimachinery/pkg/apis/meta/v1.APIGroup",
"v1beta1.Subject": "k8s.io/kubernetes/pkg/apis/rbac/v1beta1.Subject",
"v1beta1.DeploymentList": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.DeploymentList",
"v1.NodeAffinity": "k8s.io/kubernetes/pkg/api/v1.NodeAffinity",
"v1beta1.RollingUpdateDeployment": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.RollingUpdateDeployment",
"v1beta1.APIVersion": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.APIVersion",
"v1alpha1.CertificateSigningRequest": "k8s.io/kubernetes/pkg/apis/certificates/v1alpha1.CertificateSigningRequest",
"v1.CinderVolumeSource": "k8s.io/kubernetes/pkg/api/v1.CinderVolumeSource",
"v1.NamespaceSpec": "k8s.io/kubernetes/pkg/api/v1.NamespaceSpec",
"v1beta1.PodDisruptionBudgetSpec": "k8s.io/kubernetes/pkg/apis/policy/v1beta1.PodDisruptionBudgetSpec",
"v1.Patch": "k8s.io/apimachinery/pkg/apis/meta/v1.Patch",
"v1beta1.ClusterRoleBinding": "k8s.io/kubernetes/pkg/apis/rbac/v1beta1.ClusterRoleBinding",
"v1beta1.HorizontalPodAutoscalerSpec": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.HorizontalPodAutoscalerSpec",
"v1.PersistentVolumeClaimSpec": "k8s.io/kubernetes/pkg/api/v1.PersistentVolumeClaimSpec",
"v1.Secret": "k8s.io/kubernetes/pkg/api/v1.Secret",
"v1.NodeCondition": "k8s.io/kubernetes/pkg/api/v1.NodeCondition",
"v1.LocalObjectReference": "k8s.io/kubernetes/pkg/api/v1.LocalObjectReference",
"runtime.RawExtension": "k8s.io/apimachinery/pkg/runtime.RawExtension",
"v1.PreferredSchedulingTerm": "k8s.io/kubernetes/pkg/api/v1.PreferredSchedulingTerm",
"v1.RBDVolumeSource": "k8s.io/kubernetes/pkg/api/v1.RBDVolumeSource",
"v1.KeyToPath": "k8s.io/kubernetes/pkg/api/v1.KeyToPath",
"v1.ScaleStatus": "k8s.io/kubernetes/pkg/apis/autoscaling/v1.ScaleStatus",
"v1alpha1.PolicyRule": "k8s.io/kubernetes/pkg/apis/rbac/v1alpha1.PolicyRule",
"v1.EndpointPort": "k8s.io/kubernetes/pkg/api/v1.EndpointPort",
"v1beta1.IngressList": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.IngressList",
"v1.EndpointAddress": "k8s.io/kubernetes/pkg/api/v1.EndpointAddress",
"v1.NodeSelector": "k8s.io/kubernetes/pkg/api/v1.NodeSelector",
"v1beta1.StorageClassList": "k8s.io/kubernetes/pkg/apis/storage/v1beta1.StorageClassList",
"v1.ServiceList": "k8s.io/kubernetes/pkg/api/v1.ServiceList",
"v2alpha1.CronJobSpec": "k8s.io/kubernetes/pkg/apis/batch/v2alpha1.CronJobSpec",
"v1.ContainerStateTerminated": "k8s.io/kubernetes/pkg/api/v1.ContainerStateTerminated",
"v1beta1.TokenReview": "k8s.io/kubernetes/pkg/apis/authentication/v1beta1.TokenReview",
"v1beta1.IngressBackend": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.IngressBackend",
"v1.Time": "k8s.io/apimachinery/pkg/apis/meta/v1.Time",
"v1beta1.IngressSpec": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.IngressSpec",
"v2alpha1.JobTemplateSpec": "k8s.io/kubernetes/pkg/apis/batch/v2alpha1.JobTemplateSpec",
"v1.LimitRange": "k8s.io/kubernetes/pkg/api/v1.LimitRange",
"v1beta1.UserInfo": "k8s.io/kubernetes/pkg/apis/authentication/v1beta1.UserInfo",
"v1.ResourceQuotaSpec": "k8s.io/kubernetes/pkg/api/v1.ResourceQuotaSpec",
"v1.ContainerPort": "k8s.io/kubernetes/pkg/api/v1.ContainerPort",
"v1beta1.HTTPIngressRuleValue": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.HTTPIngressRuleValue",
"v1.AzureFileVolumeSource": "k8s.io/kubernetes/pkg/api/v1.AzureFileVolumeSource",
"v1beta1.NetworkPolicySpec": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.NetworkPolicySpec",
"v1.PodTemplateSpec": "k8s.io/kubernetes/pkg/api/v1.PodTemplateSpec",
"v1.SecretVolumeSource": "k8s.io/kubernetes/pkg/api/v1.SecretVolumeSource",
"v1.PodSpec": "k8s.io/kubernetes/pkg/api/v1.PodSpec",
"v1.CephFSVolumeSource": "k8s.io/kubernetes/pkg/api/v1.CephFSVolumeSource",
"v1beta1.CPUTargetUtilization": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.CPUTargetUtilization",
"v1.Volume": "k8s.io/kubernetes/pkg/api/v1.Volume",
"v1beta1.Ingress": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.Ingress",
"v1beta1.HorizontalPodAutoscalerList": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.HorizontalPodAutoscalerList",
"v1.PersistentVolumeStatus": "k8s.io/kubernetes/pkg/api/v1.PersistentVolumeStatus",
"v1beta1.IDRange": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.IDRange",
"v2alpha1.JobCondition": "k8s.io/kubernetes/pkg/apis/batch/v2alpha1.JobCondition",
"v1beta1.IngressRule": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.IngressRule",
"v1alpha1.RoleRef": "k8s.io/kubernetes/pkg/apis/rbac/v1alpha1.RoleRef",
"v1.PodAffinityTerm": "k8s.io/kubernetes/pkg/api/v1.PodAffinityTerm",
"v1.ObjectReference": "k8s.io/kubernetes/pkg/api/v1.ObjectReference",
"v1.ServiceStatus": "k8s.io/kubernetes/pkg/api/v1.ServiceStatus",
"v1.APIResource": "k8s.io/apimachinery/pkg/apis/meta/v1.APIResource",
"v1beta1.Scale": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.Scale",
"v1.AzureDiskVolumeSource": "k8s.io/kubernetes/pkg/api/v1.AzureDiskVolumeSource",
"v1beta1.SubjectAccessReviewStatus": "k8s.io/kubernetes/pkg/apis/authorization/v1beta1.SubjectAccessReviewStatus",
"v1.ConfigMap": "k8s.io/kubernetes/pkg/api/v1.ConfigMap",
"v1.CrossVersionObjectReference": "k8s.io/kubernetes/pkg/apis/autoscaling/v1.CrossVersionObjectReference",
"v1.APIVersions": "k8s.io/apimachinery/pkg/apis/meta/v1.APIVersions",
"v1alpha1.ClusterRoleList": "k8s.io/kubernetes/pkg/apis/rbac/v1alpha1.ClusterRoleList",
"v1.Node": "k8s.io/kubernetes/pkg/api/v1.Node",
"resource.Quantity": "k8s.io/kubernetes/pkg/api/resource.Quantity",
"v1.Event": "k8s.io/kubernetes/pkg/api/v1.Event",
"v1.JobStatus": "k8s.io/kubernetes/pkg/apis/batch/v1.JobStatus",
"v1.PersistentVolumeSpec": "k8s.io/kubernetes/pkg/api/v1.PersistentVolumeSpec",
"v1beta1.SubjectAccessReviewSpec": "k8s.io/kubernetes/pkg/apis/authorization/v1beta1.SubjectAccessReviewSpec",
"v1.ResourceFieldSelector": "k8s.io/kubernetes/pkg/api/v1.ResourceFieldSelector",
"v1.EndpointSubset": "k8s.io/kubernetes/pkg/api/v1.EndpointSubset",
"v1alpha1.CertificateSigningRequestSpec": "k8s.io/kubernetes/pkg/apis/certificates/v1alpha1.CertificateSigningRequestSpec",
"v1.HostPathVolumeSource": "k8s.io/kubernetes/pkg/api/v1.HostPathVolumeSource",
"v1.LoadBalancerStatus": "k8s.io/kubernetes/pkg/api/v1.LoadBalancerStatus",
"v1beta1.HTTPIngressPath": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.HTTPIngressPath",
"v1beta1.Role": "k8s.io/kubernetes/pkg/apis/rbac/v1beta1.Role",
"v1beta1.DeploymentStrategy": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.DeploymentStrategy",
"v1beta1.RunAsUserStrategyOptions": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.RunAsUserStrategyOptions",
"v1beta1.DeploymentSpec": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.DeploymentSpec",
"v1.ExecAction": "k8s.io/kubernetes/pkg/api/v1.ExecAction",
"v1beta1.PodSecurityPolicySpec": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.PodSecurityPolicySpec",
"v1.HorizontalPodAutoscalerStatus": "k8s.io/kubernetes/pkg/apis/autoscaling/v1.HorizontalPodAutoscalerStatus",
"v1.PersistentVolumeList": "k8s.io/kubernetes/pkg/api/v1.PersistentVolumeList",
"v1alpha1.ClusterRole": "k8s.io/kubernetes/pkg/apis/rbac/v1alpha1.ClusterRole",
"v1.JobSpec": "k8s.io/kubernetes/pkg/apis/batch/v1.JobSpec",
"v1beta1.DaemonSetSpec": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.DaemonSetSpec",
"v2alpha1.CronJobList": "k8s.io/kubernetes/pkg/apis/batch/v2alpha1.CronJobList",
"v1.Endpoints": "k8s.io/kubernetes/pkg/api/v1.Endpoints",
"v1.SELinuxOptions": "k8s.io/kubernetes/pkg/api/v1.SELinuxOptions",
"v1beta1.SelfSubjectAccessReviewSpec": "k8s.io/kubernetes/pkg/apis/authorization/v1beta1.SelfSubjectAccessReviewSpec",
"v1beta1.ScaleStatus": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.ScaleStatus",
"v1.NodeSelectorTerm": "k8s.io/kubernetes/pkg/api/v1.NodeSelectorTerm",
"v1alpha1.CertificateSigningRequestStatus": "k8s.io/kubernetes/pkg/apis/certificates/v1alpha1.CertificateSigningRequestStatus",
"v1.StatusDetails": "k8s.io/apimachinery/pkg/apis/meta/v1.StatusDetails",
"v2alpha1.JobStatus": "k8s.io/kubernetes/pkg/apis/batch/v2alpha1.JobStatus",
"v1beta1.DeploymentRollback": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.DeploymentRollback",
"v1.GlusterfsVolumeSource": "k8s.io/kubernetes/pkg/api/v1.GlusterfsVolumeSource",
"v1.ServiceAccountList": "k8s.io/kubernetes/pkg/api/v1.ServiceAccountList",
"v1.JobList": "k8s.io/kubernetes/pkg/apis/batch/v1.JobList",
"v1.EventList": "k8s.io/kubernetes/pkg/api/v1.EventList",
"v1.ContainerStateWaiting": "k8s.io/kubernetes/pkg/api/v1.ContainerStateWaiting",
"v1.APIResourceList": "k8s.io/apimachinery/pkg/apis/meta/v1.APIResourceList",
"v1.ContainerStatus": "k8s.io/kubernetes/pkg/api/v1.ContainerStatus",
"v2alpha1.JobList": "k8s.io/kubernetes/pkg/apis/batch/v2alpha1.JobList",
"v1.ConfigMapKeySelector": "k8s.io/kubernetes/pkg/api/v1.ConfigMapKeySelector",
"v1.PhotonPersistentDiskVolumeSource": "k8s.io/kubernetes/pkg/api/v1.PhotonPersistentDiskVolumeSource",
"v1.PodTemplateList": "k8s.io/kubernetes/pkg/api/v1.PodTemplateList",
"v1.PersistentVolumeClaimStatus": "k8s.io/kubernetes/pkg/api/v1.PersistentVolumeClaimStatus",
"v1.ServiceAccount": "k8s.io/kubernetes/pkg/api/v1.ServiceAccount",
"v1alpha1.CertificateSigningRequestList": "k8s.io/kubernetes/pkg/apis/certificates/v1alpha1.CertificateSigningRequestList",
"v1beta1.SupplementalGroupsStrategyOptions": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.SupplementalGroupsStrategyOptions",
"v1.HTTPHeader": "k8s.io/kubernetes/pkg/api/v1.HTTPHeader",
"version.Info": "k8s.io/apimachinery/pkg/version.Info",
"v1.EventSource": "k8s.io/kubernetes/pkg/api/v1.EventSource",
"v1alpha1.ClusterRoleBindingList": "k8s.io/kubernetes/pkg/apis/rbac/v1alpha1.ClusterRoleBindingList",
"v1.OwnerReference": "k8s.io/apimachinery/pkg/apis/meta/v1.OwnerReference",
"v1beta1.ClusterRoleBindingList": "k8s.io/kubernetes/pkg/apis/rbac/v1beta1.ClusterRoleBindingList",
"v1beta1.ScaleSpec": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.ScaleSpec",
"v1.GitRepoVolumeSource": "k8s.io/kubernetes/pkg/api/v1.GitRepoVolumeSource",
"v1beta1.NetworkPolicy": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.NetworkPolicy",
"v1.ConfigMapEnvSource": "k8s.io/kubernetes/pkg/api/v1.ConfigMapEnvSource",
"v1.PodTemplate": "k8s.io/kubernetes/pkg/api/v1.PodTemplate",
"v1beta1.DeploymentCondition": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.DeploymentCondition",
"v1beta1.PodDisruptionBudgetStatus": "k8s.io/kubernetes/pkg/apis/policy/v1beta1.PodDisruptionBudgetStatus",
"v1.EnvVar": "k8s.io/kubernetes/pkg/api/v1.EnvVar",
"v1.LimitRangeSpec": "k8s.io/kubernetes/pkg/api/v1.LimitRangeSpec",
"v1.DownwardAPIVolumeSource": "k8s.io/kubernetes/pkg/api/v1.DownwardAPIVolumeSource",
"v1.NodeDaemonEndpoints": "k8s.io/kubernetes/pkg/api/v1.NodeDaemonEndpoints",
"v1.ComponentCondition": "k8s.io/kubernetes/pkg/api/v1.ComponentCondition",
"v1alpha1.CertificateSigningRequestCondition": "k8s.io/kubernetes/pkg/apis/certificates/v1alpha1.CertificateSigningRequestCondition",
"v1.SecurityContext": "k8s.io/kubernetes/pkg/api/v1.SecurityContext",
"v1beta1.LocalSubjectAccessReview": "k8s.io/kubernetes/pkg/apis/authorization/v1beta1.LocalSubjectAccessReview",
"v1beta1.StatefulSetSpec": "k8s.io/kubernetes/pkg/apis/apps/v1beta1.StatefulSetSpec",
"v1.NodeAddress": "k8s.io/kubernetes/pkg/api/v1.NodeAddress",
"v1.QuobyteVolumeSource": "k8s.io/kubernetes/pkg/api/v1.QuobyteVolumeSource",
"v1.Capabilities": "k8s.io/kubernetes/pkg/api/v1.Capabilities",
"v1.GCEPersistentDiskVolumeSource": "k8s.io/kubernetes/pkg/api/v1.GCEPersistentDiskVolumeSource",
"v1beta1.ReplicaSet": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.ReplicaSet",
"v1beta1.HorizontalPodAutoscalerStatus": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.HorizontalPodAutoscalerStatus",
"v1beta1.PolicyRule": "k8s.io/kubernetes/pkg/apis/rbac/v1beta1.PolicyRule",
"v1.ConfigMapList": "k8s.io/kubernetes/pkg/api/v1.ConfigMapList",
"v1.Lifecycle": "k8s.io/kubernetes/pkg/api/v1.Lifecycle",
"v1beta1.SelfSubjectAccessReview": "k8s.io/kubernetes/pkg/apis/authorization/v1beta1.SelfSubjectAccessReview",
"v2alpha1.CronJob": "k8s.io/kubernetes/pkg/apis/batch/v2alpha1.CronJob",
"v2alpha1.CronJobStatus": "k8s.io/kubernetes/pkg/apis/batch/v2alpha1.CronJobStatus",
"v1beta1.SubjectAccessReview": "k8s.io/kubernetes/pkg/apis/authorization/v1beta1.SubjectAccessReview",
"v1.Preconditions": "k8s.io/kubernetes/pkg/api/v1.Preconditions",
"v1beta1.DaemonSetList": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.DaemonSetList",
"v1.PersistentVolumeClaim": "k8s.io/kubernetes/pkg/api/v1.PersistentVolumeClaim",
"v1.Scale": "k8s.io/kubernetes/pkg/apis/autoscaling/v1.Scale",
"v1beta1.StatefulSetStatus": "k8s.io/kubernetes/pkg/apis/apps/v1beta1.StatefulSetStatus",
"v1.NFSVolumeSource": "k8s.io/kubernetes/pkg/api/v1.NFSVolumeSource",
"v1.ObjectFieldSelector": "k8s.io/kubernetes/pkg/api/v1.ObjectFieldSelector",
"v1.ResourceRequirements": "k8s.io/kubernetes/pkg/api/v1.ResourceRequirements",
"v1.WatchEvent": "k8s.io/apimachinery/pkg/apis/meta/v1.WatchEvent",
"v1.ReplicationControllerSpec": "k8s.io/kubernetes/pkg/api/v1.ReplicationControllerSpec",
"v1.HTTPGetAction": "k8s.io/kubernetes/pkg/api/v1.HTTPGetAction",
"v1beta1.RollbackConfig": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.RollbackConfig",
"v1beta1.TokenReviewSpec": "k8s.io/kubernetes/pkg/apis/authentication/v1beta1.TokenReviewSpec",
"v1.PodSecurityContext": "k8s.io/kubernetes/pkg/api/v1.PodSecurityContext",
"v1beta1.PodDisruptionBudgetList": "k8s.io/kubernetes/pkg/apis/policy/v1beta1.PodDisruptionBudgetList",
"v1.VolumeMount": "k8s.io/kubernetes/pkg/api/v1.VolumeMount",
"v1.ReplicationController": "k8s.io/kubernetes/pkg/api/v1.ReplicationController",
"v1.NamespaceList": "k8s.io/kubernetes/pkg/api/v1.NamespaceList",
"v1alpha1.ClusterRoleBinding": "k8s.io/kubernetes/pkg/apis/rbac/v1alpha1.ClusterRoleBinding",
"v1.TCPSocketAction": "k8s.io/kubernetes/pkg/api/v1.TCPSocketAction",
"v1.Binding": "k8s.io/kubernetes/pkg/api/v1.Binding",
"v1beta1.ReplicaSetStatus": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.ReplicaSetStatus",
"intstr.IntOrString": "k8s.io/kubernetes/pkg/util/intstr.IntOrString",
"v1.EndpointsList": "k8s.io/kubernetes/pkg/api/v1.EndpointsList",
"v1.Container": "k8s.io/kubernetes/pkg/api/v1.Container",
"v1alpha1.RoleList": "k8s.io/kubernetes/pkg/apis/rbac/v1alpha1.RoleList",
"v1.VsphereVirtualDiskVolumeSource": "k8s.io/kubernetes/pkg/api/v1.VsphereVirtualDiskVolumeSource",
"v1.NodeList": "k8s.io/kubernetes/pkg/api/v1.NodeList",
"v1.EmptyDirVolumeSource": "k8s.io/kubernetes/pkg/api/v1.EmptyDirVolumeSource",
"v1beta1.FSGroupStrategyOptions": "k8s.io/kubernetes/pkg/apis/extensions/v1beta1.FSGroupStrategyOptions",
"v1.Status": "k8s.io/apimachinery/pkg/apis/meta/v1.Status",
}
for k, v := range compatibilityMap {
if _, found := s.Definitions[v]; !found {
continue
}
s.Definitions[k] = spec.Schema{
SchemaProps: spec.SchemaProps{
Ref: spec.MustCreateRef("#/definitions/" + openapi.EscapeJsonPointer(v)),
Description: fmt.Sprintf("Deprecated. Please use %s instead.", v),
},
}
}
return s, nil
}
| [
"\"KUBE_API_VERSIONS\"",
"\"KUBE_API_VERSIONS\""
]
| []
| [
"KUBE_API_VERSIONS"
]
| [] | ["KUBE_API_VERSIONS"] | go | 1 | 0 | |
tests/settings.py | import os
import sys
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.sites',
'import_export',
'core',
]
SITE_ID = 1
ROOT_URLCONF = "urls"
DEBUG = True
STATIC_URL = '/static/'
SECRET_KEY = '2n6)=vnp8@bu0om9d05vwf7@=5vpn%)97-!d*t4zq1mku%0-@j'
MIDDLEWARE = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': (
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.request',
),
},
},
]
if os.environ.get('IMPORT_EXPORT_TEST_TYPE') == 'mysql-innodb':
IMPORT_EXPORT_USE_TRANSACTIONS = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'import_export',
'USER': os.environ.get('IMPORT_EXPORT_MYSQL_USER', 'root'),
'PASSWORD': os.environ.get('IMPORT_EXPORT_MYSQL_PASSWORD', 'password'),
'HOST': '127.0.0.1',
'PORT': 3306,
'TEST': {
'CHARSET': 'utf8',
'COLLATION': 'utf8_general_ci',
}
}
}
elif os.environ.get('IMPORT_EXPORT_TEST_TYPE') == 'postgres':
IMPORT_EXPORT_USE_TRANSACTIONS = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': 'import_export',
'USER': os.environ.get('IMPORT_EXPORT_POSTGRESQL_USER'),
'PASSWORD': os.environ.get('IMPORT_EXPORT_POSTGRESQL_PASSWORD'),
'HOST': 'localhost',
'PORT': 5432
}
}
else:
if 'test' in sys.argv:
database_name = ''
else:
database_name = os.path.join(os.path.dirname(__file__), 'database.db')
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': database_name,
}
}
LOGGING = {
'version': 1,
'disable_existing_loggers': True,
'handlers': {
'console': {
'class': 'logging.NullHandler'
}
},
'root': {
'handlers': ['console'],
}}
# USE_TZ = True
# TIME_ZONE = 'Europe/Ljubljana'
| []
| []
| [
"IMPORT_EXPORT_MYSQL_USER",
"IMPORT_EXPORT_TEST_TYPE",
"IMPORT_EXPORT_POSTGRESQL_USER",
"IMPORT_EXPORT_MYSQL_PASSWORD",
"IMPORT_EXPORT_POSTGRESQL_PASSWORD"
]
| [] | ["IMPORT_EXPORT_MYSQL_USER", "IMPORT_EXPORT_TEST_TYPE", "IMPORT_EXPORT_POSTGRESQL_USER", "IMPORT_EXPORT_MYSQL_PASSWORD", "IMPORT_EXPORT_POSTGRESQL_PASSWORD"] | python | 5 | 0 | |
config.py | import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config(object):
SERVER_NAME = os.environ.get('SERVER_NAME') or 'localhost.dev:5000'
SECRET_KEY = os.environ.get('SECRET_KEY') or 'nunca-lo-adivinaras'
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir,'app.db')
SQLALCHEMY_TRACK_MODIFICATIONS = False
BOOTSTRAP_SERVE_LOCAL=True
MAIL_SERVER = os.environ.get('MAIL_SERVER')
MAIL_PORT = int(os.environ.get('MAIL_PORT') or 25)
MAIL_USE_TLS = os.environ.get('MAIL_USE_TLS')
MAIL_USERNAME = os.environ.get('MAIL_USERNAME')
MAIL_PASSWORD = os.environ.get('MAIL_PASSWORD')
ADMINS = ['[email protected]']
BOOTSTRAP_SERVE_LOCAL = True | []
| []
| [
"MAIL_SERVER",
"MAIL_PASSWORD",
"DATABASE_URL",
"MAIL_PORT",
"SERVER_NAME",
"SECRET_KEY",
"MAIL_USERNAME",
"MAIL_USE_TLS"
]
| [] | ["MAIL_SERVER", "MAIL_PASSWORD", "DATABASE_URL", "MAIL_PORT", "SERVER_NAME", "SECRET_KEY", "MAIL_USERNAME", "MAIL_USE_TLS"] | python | 8 | 0 | |
dcos-hazelcast/src/main/java/mesosphere/dcos/hazelcast/discovery/DcosDiscoveryStrategy.java | /*
* Copyright 2017 Johannes Unterstein ([email protected])
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package mesosphere.dcos.hazelcast.discovery;
import com.hazelcast.logging.ILogger;
import com.hazelcast.nio.Address;
import com.hazelcast.spi.discovery.AbstractDiscoveryStrategy;
import com.hazelcast.spi.discovery.DiscoveryNode;
import com.hazelcast.spi.discovery.SimpleDiscoveryNode;
import mesosphere.dcos.hazelcast.DcosHazelcastApp;
import org.apache.commons.lang3.StringUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.net.UnknownHostException;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
public class DcosDiscoveryStrategy extends AbstractDiscoveryStrategy {
private static final Logger LOG = LoggerFactory.getLogger(DcosHazelcastApp.class);
DcosDiscoveryStrategy(ILogger logger, Map<String, Comparable> properties) {
super(logger, properties);
}
@Override
public Iterable<DiscoveryNode> discoverNodes() {
List<DiscoveryNode> servers = new LinkedList<>();
for (String node : StringUtils.split(System.getenv("HAZELCAST_INITIAL_MEMBERS"), System.lineSeparator())) {
try {
servers.add(new SimpleDiscoveryNode(new Address(node, 5701)));
} catch (UnknownHostException e) {
LOG.warn(String.format("DNS name '%s' not resolvable", node), e);
}
}
return servers;
}
@Override
public void destroy() {
}
}
| [
"\"HAZELCAST_INITIAL_MEMBERS\""
]
| []
| [
"HAZELCAST_INITIAL_MEMBERS"
]
| [] | ["HAZELCAST_INITIAL_MEMBERS"] | java | 1 | 0 | |
main/main.go | // Copyright 1999-2019 Alibaba Group Holding Limited
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package main
import (
"os"
"github.com/aliyun/aliyun-cli/cli"
"github.com/aliyun/aliyun-cli/config"
"github.com/aliyun/aliyun-cli/i18n"
"github.com/aliyun/aliyun-cli/openapi"
"github.com/aliyun/aliyun-cli/oss/lib"
)
/**
## Configure
$ aliyuncli configure
Aliyun Access Key ID [****************wQ7v]:
Aliyun Access Key Secret [****************fxGu]:
Default Region Id [cn-hangzhou]:
Default output format [json]:
## OpenApi mode
$ aliyuncli Ecs DescribeInstances
$ aliyuncli Ecs StartInstance --InstanceId your_instance_id
$ aliyuncli Rds DescribeDBInstances
## use HTTPS(SSL/TLS)
$ aliyuncli Ecs DescribeInstances --secure
*/
func main() {
cli.PlatformCompatible()
writer := cli.DefaultWriter()
//
// load current configuration
profile, err := config.LoadCurrentProfile(writer)
if err != nil {
cli.Errorf(writer, "ERROR: load current configuration failed %s", err)
return
}
// set user agent
userAgentFromEnv := os.Getenv("ALIYUN_USER_AGENT")
config.SetUserAgent(userAgentFromEnv)
// set language with current profile
i18n.SetLanguage(profile.Language)
// create root command
rootCmd := &cli.Command{
Name: "aliyun",
Short: i18n.T("Alibaba Cloud Command Line Interface Version "+cli.Version, "阿里云CLI命令行工具 "+cli.Version),
Usage: "aliyun <product> <operation> [--parameter1 value1 --parameter2 value2 ...]",
Sample: "aliyun ecs DescribeRegions",
EnableUnknownFlag: true,
}
// add default flags
config.AddFlags(rootCmd.Flags())
openapi.AddFlags(rootCmd.Flags())
// new open api commando to process rootCmd
commando := openapi.NewCommando(writer, profile)
commando.InitWithCommand(rootCmd)
ctx := cli.NewCommandContext(writer)
ctx.EnterCommand(rootCmd)
ctx.SetCompletion(cli.ParseCompletionForShell())
rootCmd.AddSubCommand(config.NewConfigureCommand())
// rootCmd.AddSubCommand(command.NewTestCommand())
rootCmd.AddSubCommand(lib.NewOssCommand())
rootCmd.AddSubCommand(cli.NewVersionCommand())
rootCmd.AddSubCommand(cli.NewAutoCompleteCommand())
rootCmd.Execute(ctx, os.Args[1:])
}
| [
"\"ALIYUN_USER_AGENT\""
]
| []
| [
"ALIYUN_USER_AGENT"
]
| [] | ["ALIYUN_USER_AGENT"] | go | 1 | 0 | |
application.wsgi | import logging
import os
import sys
logging.basicConfig(stream=sys.stderr)
os.environ['FLASK_CONFIG'] = 'production'
os.environ['DATABASE_URL'] ='DATABASE_PATH'
os.environ['MAIL_USERNAME'] = 'MAIL_USER'
os.environ['MAIL_PASSWORD'] = 'MAIL_PASSWORD'
PROJECT_DIR = "APPLICATION_PATH"
sys.path.insert(0, PROJECT_DIR)
activate_this = os.path.join(PROJECT_DIR, 'VENV_BIN_PATH', 'activate_this.py')
with open(activate_this) as file_:
exec(file_.read(), dict(__file__=activate_this))
from manage import app as application | []
| []
| [
"MAIL_PASSWORD",
"DATABASE_URL",
"MAIL_USERNAME",
"FLASK_CONFIG"
]
| [] | ["MAIL_PASSWORD", "DATABASE_URL", "MAIL_USERNAME", "FLASK_CONFIG"] | python | 4 | 0 | |
{{ cookiecutter.project_slug }}/{{ cookiecutter.project_slug }}/wsgi.py | """
WSGI config for {{ cookiecutter.project_slug }} project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
from dotenv import load_dotenv
load_dotenv()
os.environ.setdefault('DJANGO_SETTINGS_MODULE', '{{ cookiecutter.project_slug }}.settings.prod')
application = get_wsgi_application()
| []
| []
| []
| [] | [] | python | 0 | 0 | |
pkg/start/start_integration_test.go | package start
import (
"context"
"encoding/json"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"reflect"
"regexp"
"strings"
"testing"
"time"
v1 "k8s.io/api/core/v1"
"k8s.io/apimachinery/pkg/api/errors"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
"k8s.io/apimachinery/pkg/types"
"k8s.io/apimachinery/pkg/util/diff"
randutil "k8s.io/apimachinery/pkg/util/rand"
"k8s.io/apimachinery/pkg/util/wait"
"k8s.io/client-go/kubernetes"
configv1 "github.com/openshift/api/config/v1"
clientset "github.com/openshift/client-go/config/clientset/versioned"
"github.com/openshift/cluster-version-operator/lib/resourcemerge"
"github.com/openshift/cluster-version-operator/pkg/cvo"
)
var (
version_0_0_1 = map[string]interface{}{
"release-manifests": map[string]interface{}{
"image-references": `
{
"kind": "ImageStream",
"apiVersion": "image.openshift.io/v1",
"metadata": {
"name": "0.0.1"
}
}
`,
// this manifest should not have ReleaseImage replaced because it is part of the user facing payload
"config2.json": `
{
"kind": "ConfigMap",
"apiVersion": "v1",
"metadata": {
"name": "config2",
"namespace": "$(NAMESPACE)"
},
"data": {
"version": "0.0.1",
"releaseImage": "{{.ReleaseImage}}"
}
}
`,
},
"manifests": map[string]interface{}{
// this manifest is part of the innate payload and should have ReleaseImage replaced
"config1.json": `
{
"kind": "ConfigMap",
"apiVersion": "v1",
"metadata": {
"name": "config1",
"namespace": "$(NAMESPACE)"
},
"data": {
"version": "0.0.1",
"releaseImage": "{{.ReleaseImage}}"
}
}
`,
},
}
version_0_0_2 = map[string]interface{}{
"release-manifests": map[string]interface{}{
"image-references": `
{
"kind": "ImageStream",
"apiVersion": "image.openshift.io/v1",
"metadata": {
"name": "0.0.2"
}
}
`,
"config2.json": `
{
"kind": "ConfigMap",
"apiVersion": "v1",
"metadata": {
"name": "config2",
"namespace": "$(NAMESPACE)"
},
"data": {
"version": "0.0.2",
"releaseImage": "{{.ReleaseImage}}"
}
}
`,
},
"manifests": map[string]interface{}{
"config1.json": `
{
"kind": "ConfigMap",
"apiVersion": "v1",
"metadata": {
"name": "config1",
"namespace": "$(NAMESPACE)"
},
"data": {
"version": "0.0.2",
"releaseImage": "{{.ReleaseImage}}"
}
}
`,
},
}
version_0_0_2_failing = map[string]interface{}{
"release-manifests": map[string]interface{}{
"image-references": `
{
"kind": "ImageStream",
"apiVersion": "image.openshift.io/v1",
"metadata": {
"name": "0.0.2"
}
}
`,
// has invalid label value, API server will reject
"config2.json": `
{
"kind": "ConfigMap",
"apiVersion": "v1",
"metadata": {
"name": "config2",
"namespace": "$(NAMESPACE)",
"labels": {"": ""}
},
"data": {
"version": "0.0.2",
"releaseImage": "{{.ReleaseImage}}"
}
}
`,
},
"manifests": map[string]interface{}{
"config1.json": `
{
"kind": "ConfigMap",
"apiVersion": "v1",
"metadata": {
"name": "config1",
"namespace": "$(NAMESPACE)"
},
"data": {
"version": "0.0.2",
"releaseImage": "{{.ReleaseImage}}"
}
}
`,
},
}
)
func TestIntegrationCVO_initializeAndUpgrade(t *testing.T) {
if os.Getenv("TEST_INTEGRATION") != "1" {
t.Skipf("Integration tests are disabled unless TEST_INTEGRATION=1")
}
t.Parallel()
// use the same client setup as the start command
cb, err := newClientBuilder("")
if err != nil {
t.Fatal(err)
}
cfg := cb.RestConfig()
kc := cb.KubeClientOrDie("integration-test")
client := cb.ClientOrDie("integration-test")
ns := fmt.Sprintf("e2e-cvo-%s", randutil.String(4))
if _, err := kc.Core().Namespaces().Create(&v1.Namespace{
ObjectMeta: metav1.ObjectMeta{
Name: ns,
},
}); err != nil {
t.Fatal(err)
}
defer func() {
if err := client.Config().ClusterVersions().Delete(ns, nil); err != nil {
t.Logf("failed to delete cluster version %s: %v", ns, err)
}
if err := kc.Core().Namespaces().Delete(ns, nil); err != nil {
t.Logf("failed to delete namespace %s: %v", ns, err)
}
}()
dir, err := ioutil.TempDir("", "cvo-test")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(dir)
if err := createContent(filepath.Join(dir, "0.0.1"), version_0_0_1, map[string]string{"NAMESPACE": ns}); err != nil {
t.Fatal(err)
}
if err := createContent(filepath.Join(dir, "0.0.2"), version_0_0_2, map[string]string{"NAMESPACE": ns}); err != nil {
t.Fatal(err)
}
payloadImage1 := "arbitrary/release:image"
payloadImage2 := "arbitrary/release:image-2"
retriever := &mapPayloadRetriever{map[string]string{
payloadImage1: filepath.Join(dir, "0.0.1"),
payloadImage2: filepath.Join(dir, "0.0.2"),
}}
options := NewOptions()
options.Namespace = ns
options.Name = ns
options.ListenAddr = ""
options.NodeName = "test-node"
options.ReleaseImage = payloadImage1
options.PayloadOverride = filepath.Join(dir, "ignored")
options.EnableMetrics = false
controllers := options.NewControllerContext(cb)
worker := cvo.NewSyncWorker(retriever, cvo.NewResourceBuilder(cfg), 5*time.Second, wait.Backoff{Steps: 3}).(*cvo.SyncWorker)
controllers.CVO.SetSyncWorkerForTesting(worker)
stopCh := make(chan struct{})
defer close(stopCh)
controllers.Start(stopCh)
t.Logf("wait until we observe the cluster version become available")
lastCV, err := waitForAvailableUpdate(t, client, ns, false, "0.0.1")
if err != nil {
t.Logf("latest version:\n%s", printCV(lastCV))
t.Fatalf("cluster version never became available: %v", err)
}
status := worker.Status()
t.Logf("verify the available cluster version's status matches our expectations")
t.Logf("Cluster version:\n%s", printCV(lastCV))
verifyClusterVersionStatus(t, lastCV, configv1.Update{Payload: payloadImage1, Version: "0.0.1"}, 1)
verifyReleasePayload(t, kc, ns, "0.0.1", payloadImage1)
t.Logf("wait for the next resync and verify that status didn't change")
if err := wait.Poll(time.Second, 30*time.Second, func() (bool, error) {
updated := worker.Status()
if updated.Completed > status.Completed {
return true, nil
}
return false, nil
}); err != nil {
t.Fatal(err)
}
time.Sleep(time.Second)
cv, err := client.Config().ClusterVersions().Get(ns, metav1.GetOptions{})
if err != nil {
t.Fatal(err)
}
if !reflect.DeepEqual(cv.Status, lastCV.Status) {
t.Fatalf("unexpected: %s", diff.ObjectReflectDiff(lastCV.Status, cv.Status))
}
verifyReleasePayload(t, kc, ns, "0.0.1", payloadImage1)
t.Logf("trigger an update to a new version")
cv, err = client.Config().ClusterVersions().Patch(ns, types.MergePatchType, []byte(fmt.Sprintf(`{"spec":{"desiredUpdate":{"payload":"%s"}}}`, payloadImage2)))
if err != nil {
t.Fatal(err)
}
if cv.Spec.DesiredUpdate == nil {
t.Fatalf("cluster desired version was not preserved: %s", printCV(cv))
}
t.Logf("wait for the new version to be available")
lastCV, err = waitForAvailableUpdate(t, client, ns, false, "0.0.1", "0.0.2")
if err != nil {
t.Logf("latest version:\n%s", printCV(lastCV))
t.Fatalf("cluster version never reached available at 0.0.2: %v", err)
}
t.Logf("Upgraded version:\n%s", printCV(lastCV))
verifyClusterVersionStatus(t, lastCV, configv1.Update{Payload: payloadImage2, Version: "0.0.2"}, 2)
verifyReleasePayload(t, kc, ns, "0.0.2", payloadImage2)
t.Logf("delete an object so that the next resync will recover it")
if err := kc.CoreV1().ConfigMaps(ns).Delete("config1", nil); err != nil {
t.Fatalf("couldn't delete CVO managed object: %v", err)
}
status = worker.Status()
t.Logf("wait for the next resync and verify that status didn't change")
if err := wait.Poll(time.Second, 30*time.Second, func() (bool, error) {
updated := worker.Status()
if updated.Completed > status.Completed {
return true, nil
}
return false, nil
}); err != nil {
t.Fatal(err)
}
time.Sleep(time.Second)
cv, err = client.Config().ClusterVersions().Get(ns, metav1.GetOptions{})
if err != nil {
t.Fatal(err)
}
if !reflect.DeepEqual(cv.Status, lastCV.Status) {
t.Fatalf("unexpected: %s", diff.ObjectReflectDiff(lastCV.Status, cv.Status))
}
// should have recreated our deleted object
verifyReleasePayload(t, kc, ns, "0.0.2", payloadImage2)
}
func TestIntegrationCVO_initializeAndHandleError(t *testing.T) {
if os.Getenv("TEST_INTEGRATION") != "1" {
t.Skipf("Integration tests are disabled unless TEST_INTEGRATION=1")
}
t.Parallel()
// use the same client setup as the start command
cb, err := newClientBuilder("")
if err != nil {
t.Fatal(err)
}
cfg := cb.RestConfig()
kc := cb.KubeClientOrDie("integration-test")
client := cb.ClientOrDie("integration-test")
ns := fmt.Sprintf("e2e-cvo-%s", randutil.String(4))
if _, err := kc.Core().Namespaces().Create(&v1.Namespace{
ObjectMeta: metav1.ObjectMeta{
Name: ns,
},
}); err != nil {
t.Fatal(err)
}
defer func() {
if err := client.Config().ClusterVersions().Delete(ns, nil); err != nil {
t.Logf("failed to delete cluster version %s: %v", ns, err)
}
if err := kc.Core().Namespaces().Delete(ns, nil); err != nil {
t.Logf("failed to delete namespace %s: %v", ns, err)
}
}()
dir, err := ioutil.TempDir("", "cvo-test")
if err != nil {
t.Fatal(err)
}
defer os.RemoveAll(dir)
if err := createContent(filepath.Join(dir, "0.0.1"), version_0_0_1, map[string]string{"NAMESPACE": ns}); err != nil {
t.Fatal(err)
}
if err := createContent(filepath.Join(dir, "0.0.2"), version_0_0_2_failing, map[string]string{"NAMESPACE": ns}); err != nil {
t.Fatal(err)
}
payloadImage1 := "arbitrary/release:image"
payloadImage2 := "arbitrary/release:image-2-failing"
retriever := &mapPayloadRetriever{map[string]string{
payloadImage1: filepath.Join(dir, "0.0.1"),
payloadImage2: filepath.Join(dir, "0.0.2"),
}}
options := NewOptions()
options.Namespace = ns
options.Name = ns
options.ListenAddr = ""
options.NodeName = "test-node"
options.ReleaseImage = payloadImage1
options.PayloadOverride = filepath.Join(dir, "ignored")
options.EnableMetrics = false
controllers := options.NewControllerContext(cb)
worker := cvo.NewSyncWorker(retriever, cvo.NewResourceBuilder(cfg), 5*time.Second, wait.Backoff{Steps: 3}).(*cvo.SyncWorker)
controllers.CVO.SetSyncWorkerForTesting(worker)
stopCh := make(chan struct{})
defer close(stopCh)
controllers.Start(stopCh)
t.Logf("wait until we observe the cluster version become available")
lastCV, err := waitForAvailableUpdate(t, client, ns, false, "0.0.1")
if err != nil {
t.Logf("latest version:\n%s", printCV(lastCV))
t.Fatalf("cluster version never became available: %v", err)
}
t.Logf("verify the available cluster version's status matches our expectations")
t.Logf("Cluster version:\n%s", printCV(lastCV))
verifyClusterVersionStatus(t, lastCV, configv1.Update{Payload: payloadImage1, Version: "0.0.1"}, 1)
verifyReleasePayload(t, kc, ns, "0.0.1", payloadImage1)
t.Logf("trigger an update to a new version that should fail")
cv, err := client.Config().ClusterVersions().Patch(ns, types.MergePatchType, []byte(fmt.Sprintf(`{"spec":{"desiredUpdate":{"payload":"%s"}}}`, payloadImage2)))
if err != nil {
t.Fatal(err)
}
if cv.Spec.DesiredUpdate == nil {
t.Fatalf("cluster desired version was not preserved: %s", printCV(cv))
}
t.Logf("wait for operator to report failure")
lastCV, err = waitUntilUpgradeFails(
t, client, ns,
"UpdatePayloadResourceInvalid",
fmt.Sprintf(
`Could not update configmap "%s/config2" (v1, 2 of 2): the object is invalid, possibly due to local cluster configuration`,
ns,
),
"Unable to apply 0.0.2: some cluster configuration is invalid",
"0.0.1", "0.0.2",
)
if err != nil {
t.Logf("latest version:\n%s", printCV(lastCV))
t.Fatalf("cluster version didn't report failure: %v", err)
}
t.Logf("ensure that one config map was updated and the other was not")
verifyReleasePayloadConfigMap1(t, kc, ns, "0.0.2", payloadImage2)
verifyReleasePayloadConfigMap2(t, kc, ns, "0.0.1", payloadImage1)
t.Logf("switch back to 0.0.1 and verify it succeeds")
cv, err = client.Config().ClusterVersions().Patch(ns, types.MergePatchType, []byte(`{"spec":{"desiredUpdate":{"payload":"", "version":"0.0.1"}}}`))
if err != nil {
t.Fatal(err)
}
if cv.Spec.DesiredUpdate == nil {
t.Fatalf("cluster desired version was not preserved: %s", printCV(cv))
}
lastCV, err = waitForAvailableUpdate(t, client, ns, true, "0.0.2", "0.0.1")
if err != nil {
t.Logf("latest version:\n%s", printCV(lastCV))
t.Fatalf("cluster version never reverted to 0.0.1: %v", err)
}
verifyClusterVersionStatus(t, lastCV, configv1.Update{Payload: payloadImage1, Version: "0.0.1"}, 3)
verifyReleasePayload(t, kc, ns, "0.0.1", payloadImage1)
}
func TestIntegrationCVO_gracefulStepDown(t *testing.T) {
if os.Getenv("TEST_INTEGRATION") != "1" {
t.Skipf("Integration tests are disabled unless TEST_INTEGRATION=1")
}
t.Parallel()
// use the same client setup as the start command
cb, err := newClientBuilder("")
if err != nil {
t.Fatal(err)
}
cfg := cb.RestConfig()
kc := cb.KubeClientOrDie("integration-test")
client := cb.ClientOrDie("integration-test")
ns := fmt.Sprintf("e2e-cvo-%s", randutil.String(4))
if _, err := kc.Core().Namespaces().Create(&v1.Namespace{
ObjectMeta: metav1.ObjectMeta{
Name: ns,
},
}); err != nil {
t.Fatal(err)
}
defer func() {
if err := client.Config().ClusterVersions().Delete(ns, nil); err != nil {
t.Logf("failed to delete cluster version %s: %v", ns, err)
}
if err := kc.Core().Namespaces().Delete(ns, nil); err != nil {
t.Logf("failed to delete namespace %s: %v", ns, err)
}
}()
options := NewOptions()
options.Namespace = ns
options.Name = ns
options.ListenAddr = ""
options.NodeName = "test-node"
options.EnableMetrics = false
controllers := options.NewControllerContext(cb)
worker := cvo.NewSyncWorker(&mapPayloadRetriever{}, cvo.NewResourceBuilder(cfg), 5*time.Second, wait.Backoff{Steps: 3}).(*cvo.SyncWorker)
controllers.CVO.SetSyncWorkerForTesting(worker)
lock, err := createResourceLock(cb, ns, ns)
if err != nil {
t.Fatal(err)
}
t.Logf("the controller should create a lock record on a config map")
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
done := make(chan struct{})
go func() {
options.run(ctx, controllers, lock)
close(done)
}()
// wait until the lock record exists
err = wait.PollImmediate(200*time.Millisecond, 60*time.Second, func() (bool, error) {
_, err := kc.Core().ConfigMaps(ns).Get(ns, metav1.GetOptions{})
if err != nil {
if errors.IsNotFound(err) {
return false, nil
}
return false, err
}
return true, nil
})
if err != nil {
t.Fatal(err)
}
t.Logf("verify the controller writes a leadership change event")
events, err := kc.Core().Events(ns).List(metav1.ListOptions{})
if err != nil {
t.Fatal(err)
}
if !hasLeaderEvent(events.Items, ns) {
t.Fatalf("no leader election events found in\n%#v", events.Items)
}
t.Logf("after the context is closed, the lock record should be deleted quickly")
cancel()
startTime := time.Now()
var endTime time.Time
// the lock should be deleted immediately
err = wait.PollImmediate(100*time.Millisecond, 3*time.Second, func() (bool, error) {
_, err := kc.Core().ConfigMaps(ns).Get(ns, metav1.GetOptions{})
if errors.IsNotFound(err) {
endTime = time.Now()
return true, nil
}
if err != nil {
return false, err
}
return false, nil
})
if err != nil {
t.Fatal(err)
}
t.Logf("lock deleted in %s", endTime.Sub(startTime))
select {
case <-time.After(time.Second):
t.Fatalf("controller should exit more quickly")
case <-done:
}
}
// waitForAvailableUpdates checks invariants during an upgrade process. versions is a list of the expected versions that
// should be seen during update, with the last version being the one we wait to see.
func waitForAvailableUpdate(t *testing.T, client clientset.Interface, ns string, allowIncrementalFailure bool, versions ...string) (*configv1.ClusterVersion, error) {
var lastCV *configv1.ClusterVersion
return lastCV, wait.PollImmediate(200*time.Millisecond, 60*time.Second, func() (bool, error) {
cv, err := client.Config().ClusterVersions().Get(ns, metav1.GetOptions{})
if errors.IsNotFound(err) {
return false, nil
}
if err != nil {
return false, err
}
lastCV = cv
if !allowIncrementalFailure {
if failing := resourcemerge.FindOperatorStatusCondition(cv.Status.Conditions, configv1.OperatorFailing); failing != nil && failing.Status == configv1.ConditionTrue {
return false, fmt.Errorf("operator listed as failing (%s): %s", failing.Reason, failing.Message)
}
}
// just wait until the operator is available
if len(versions) == 0 {
available := resourcemerge.FindOperatorStatusCondition(cv.Status.Conditions, configv1.OperatorAvailable)
return available != nil && available.Status == configv1.ConditionTrue, nil
}
if len(versions) == 1 {
if available := resourcemerge.FindOperatorStatusCondition(cv.Status.Conditions, configv1.OperatorAvailable); available == nil || available.Status == configv1.ConditionFalse {
if progressing := resourcemerge.FindOperatorStatusCondition(cv.Status.Conditions, configv1.OperatorProgressing); available != nil && (progressing == nil || progressing.Status != configv1.ConditionTrue) {
return false, fmt.Errorf("initializing operator should have progressing if available is false: %#v", progressing)
}
return false, nil
}
if len(cv.Status.History) == 0 {
return false, fmt.Errorf("initializing operator should have history after available goes true")
}
if cv.Status.History[0].Version != versions[len(versions)-1] {
return false, fmt.Errorf("initializing operator should report the target version in history once available")
}
if cv.Status.History[0].State != configv1.CompletedUpdate {
return false, fmt.Errorf("initializing operator should report history completed %#v", cv.Status.History[0])
}
if progressing := resourcemerge.FindOperatorStatusCondition(cv.Status.Conditions, configv1.OperatorProgressing); progressing == nil || progressing.Status == configv1.ConditionTrue {
return false, fmt.Errorf("initializing operator should never be available and still progressing or lacking the condition: %#v", progressing)
}
return true, nil
}
if available := resourcemerge.FindOperatorStatusCondition(cv.Status.Conditions, configv1.OperatorAvailable); available == nil || available.Status == configv1.ConditionFalse {
return false, fmt.Errorf("upgrading operator should remain available: %#v", available)
}
if !stringInSlice(versions, cv.Status.Desired.Version) {
return false, fmt.Errorf("upgrading operator status reported desired version %s which is not in the allowed set %v", cv.Status.Desired.Version, versions)
}
if len(cv.Status.History) == 0 {
return false, fmt.Errorf("upgrading operator should have at least once history entry")
}
if !stringInSlice(versions, cv.Status.History[0].Version) {
return false, fmt.Errorf("upgrading operator should have a valid history[0] version %s: %v", cv.Status.Desired.Version, versions)
}
if cv.Status.History[0].Version != versions[len(versions)-1] {
return false, nil
}
if failing := resourcemerge.FindOperatorStatusCondition(cv.Status.Conditions, configv1.OperatorFailing); failing != nil && failing.Status == configv1.ConditionTrue {
return false, fmt.Errorf("operator listed as failing (%s): %s", failing.Reason, failing.Message)
}
progressing := resourcemerge.FindOperatorStatusCondition(cv.Status.Conditions, configv1.OperatorProgressing)
if cv.Status.History[0].State != configv1.CompletedUpdate {
if progressing == nil || progressing.Status != configv1.ConditionTrue {
return false, fmt.Errorf("upgrading operator should have progressing true: %#v", progressing)
}
return false, nil
}
if progressing == nil || progressing.Status != configv1.ConditionFalse {
return false, fmt.Errorf("upgraded operator should have progressing condition false: %#v", progressing)
}
return true, nil
})
}
// waitUntilUpgradeFails checks invariants during an upgrade process. versions is a list of the expected versions that
// should be seen during update, with the last version being the one we wait to see.
func waitUntilUpgradeFails(t *testing.T, client clientset.Interface, ns string, failingReason, failingMessage, progressingMessage string, versions ...string) (*configv1.ClusterVersion, error) {
var lastCV *configv1.ClusterVersion
return lastCV, wait.PollImmediate(200*time.Millisecond, 60*time.Second, func() (bool, error) {
cv, err := client.Config().ClusterVersions().Get(ns, metav1.GetOptions{})
if errors.IsNotFound(err) {
return false, nil
}
if err != nil {
return false, err
}
lastCV = cv
if c := resourcemerge.FindOperatorStatusCondition(cv.Status.Conditions, configv1.OperatorAvailable); c == nil || c.Status != configv1.ConditionTrue {
return false, fmt.Errorf("operator should remain available: %#v", c)
}
// just wait until the operator is failing
if len(versions) == 0 {
c := resourcemerge.FindOperatorStatusCondition(cv.Status.Conditions, configv1.OperatorFailing)
return c != nil && c.Status == configv1.ConditionTrue, nil
}
// TODO: add a test for initializing to an error state
// if len(versions) == 1 {
// if available := resourcemerge.FindOperatorStatusCondition(cv.Status.Conditions, configv1.OperatorAvailable); available == nil || available.Status == configv1.ConditionFalse {
// if progressing := resourcemerge.FindOperatorStatusCondition(cv.Status.Conditions, configv1.OperatorProgressing); available != nil && (progressing == nil || progressing.Status != configv1.ConditionTrue) {
// return false, fmt.Errorf("initializing operator should have progressing if available is false: %#v", progressing)
// }
// return false, nil
// }
// if len(cv.Status.History) == 0 {
// return false, fmt.Errorf("initializing operator should have history after available goes true")
// }
// if cv.Status.History[0].Version != versions[len(versions)-1] {
// return false, fmt.Errorf("initializing operator should report the target version in history once available")
// }
// if cv.Status.History[0].State != configv1.CompletedUpdate {
// return false, fmt.Errorf("initializing operator should report history completed %#v", cv.Status.History[0])
// }
// if progressing := resourcemerge.FindOperatorStatusCondition(cv.Status.Conditions, configv1.OperatorProgressing); progressing == nil || progressing.Status == configv1.ConditionTrue {
// return false, fmt.Errorf("initializing operator should never be available and still progressing or lacking the condition: %#v", progressing)
// }
// return true, nil
// }
if len(versions) == 1 {
return false, fmt.Errorf("unimplemented")
}
if !stringInSlice(versions, cv.Status.Desired.Version) {
return false, fmt.Errorf("upgrading operator status reported desired version %s which is not in the allowed set %v", cv.Status.Desired.Version, versions)
}
if len(cv.Status.History) == 0 {
return false, fmt.Errorf("upgrading operator should have at least once history entry")
}
if !stringInSlice(versions, cv.Status.History[0].Version) {
return false, fmt.Errorf("upgrading operator should have a valid history[0] version %s: %v", cv.Status.Desired.Version, versions)
}
if cv.Status.History[0].Version != versions[len(versions)-1] {
return false, nil
}
if cv.Status.History[0].State == configv1.CompletedUpdate {
return false, fmt.Errorf("upgrading operator to failed payload should remain partial: %#v", cv.Status.History)
}
failing := resourcemerge.FindOperatorStatusCondition(cv.Status.Conditions, configv1.OperatorFailing)
if failing == nil || failing.Status != configv1.ConditionTrue {
return false, nil
}
progressing := resourcemerge.FindOperatorStatusCondition(cv.Status.Conditions, configv1.OperatorProgressing)
if progressing == nil || progressing.Status != configv1.ConditionTrue {
return false, fmt.Errorf("upgrading operator to failed payload should have progressing true: %#v", progressing)
}
if !strings.Contains(failing.Message, failingMessage) {
return false, fmt.Errorf("failure message mismatch: %s", failing.Message)
}
if failing.Reason != failingReason {
return false, fmt.Errorf("failure reason mismatch: %s", failing.Reason)
}
if progressing.Reason != failing.Reason {
return false, fmt.Errorf("failure reason and progressing reason don't match: %s", progressing.Reason)
}
if !strings.Contains(progressing.Message, progressingMessage) {
return false, fmt.Errorf("progressing message mismatch: %s", progressing.Message)
}
return true, nil
})
}
func stringInSlice(slice []string, s string) bool {
for _, item := range slice {
if s == item {
return true
}
}
return false
}
func verifyClusterVersionStatus(t *testing.T, cv *configv1.ClusterVersion, expectedUpdate configv1.Update, expectHistory int) {
t.Helper()
if cv.Status.Desired != expectedUpdate {
t.Fatalf("unexpected: %#v", cv.Status.Desired)
}
if len(cv.Status.History) != expectHistory {
t.Fatalf("unexpected: %#v", cv.Status.History)
}
actual := cv.Status.History[0]
if actual.StartedTime.Time.IsZero() || actual.CompletionTime == nil || actual.CompletionTime.Time.IsZero() || actual.CompletionTime.Time.Before(actual.StartedTime.Time) {
t.Fatalf("unexpected: %s -> %s", actual.StartedTime, actual.CompletionTime)
}
expect := configv1.UpdateHistory{
State: configv1.CompletedUpdate,
Version: expectedUpdate.Version,
Payload: expectedUpdate.Payload,
StartedTime: actual.StartedTime,
CompletionTime: actual.CompletionTime,
}
if !reflect.DeepEqual(expect, actual) {
t.Fatalf("unexpected history: %s", diff.ObjectReflectDiff(expect, actual))
}
if len(cv.Status.VersionHash) == 0 {
t.Fatalf("unexpected version hash: %#v", cv.Status.VersionHash)
}
if cv.Status.Generation != cv.Generation {
t.Fatalf("unexpected generation: %#v", cv.Status.Generation)
}
}
func verifyReleasePayload(t *testing.T, kc kubernetes.Interface, ns, version, payload string) {
t.Helper()
verifyReleasePayloadConfigMap1(t, kc, ns, version, payload)
verifyReleasePayloadConfigMap2(t, kc, ns, version, payload)
}
func verifyReleasePayloadConfigMap1(t *testing.T, kc kubernetes.Interface, ns, version, payload string) {
t.Helper()
cm, err := kc.CoreV1().ConfigMaps(ns).Get("config1", metav1.GetOptions{})
if err != nil {
t.Fatalf("unable to find cm/config1 in ns %s: %v", ns, err)
}
if cm.Data["version"] != version || cm.Data["releaseImage"] != payload {
t.Fatalf("unexpected cm/config1 contents: %#v", cm.Data)
}
}
func verifyReleasePayloadConfigMap2(t *testing.T, kc kubernetes.Interface, ns, version, payload string) {
t.Helper()
cm, err := kc.CoreV1().ConfigMaps(ns).Get("config2", metav1.GetOptions{})
if err != nil {
t.Fatalf("unable to find cm/config2 in ns %s: %v", ns, err)
}
if cm.Data["version"] != version || cm.Data["releaseImage"] != "{{.ReleaseImage}}" {
t.Fatalf("unexpected cm/config2 contents: %#v", cm.Data)
}
}
func hasLeaderEvent(events []v1.Event, name string) bool {
for _, event := range events {
if event.Reason == "LeaderElection" && event.InvolvedObject.Name == name {
return true
}
}
return false
}
func printCV(cv *configv1.ClusterVersion) string {
data, err := json.MarshalIndent(cv, "", " ")
if err != nil {
return fmt.Sprintf("<error: %v>", err)
}
return string(data)
}
var reVariable = regexp.MustCompile(`\$\([a-zA-Z0-9_\-]+\)`)
func TestCreateContentReplacement(t *testing.T) {
replacements := []map[string]string{
{"NS": "other"},
}
in := `Some stuff $(NS) that should be $(NS)`
out := reVariable.ReplaceAllStringFunc(in, func(key string) string {
key = key[2 : len(key)-1]
for _, r := range replacements {
v, ok := r[key]
if !ok {
continue
}
return v
}
return key
})
if out != `Some stuff other that should be other` {
t.Fatal(out)
}
}
func createContent(baseDir string, content map[string]interface{}, replacements ...map[string]string) error {
if err := os.MkdirAll(baseDir, 0750); err != nil {
return err
}
for k, v := range content {
switch t := v.(type) {
case string:
if len(replacements) > 0 {
t = reVariable.ReplaceAllStringFunc(t, func(key string) string {
key = key[2 : len(key)-1]
for _, r := range replacements {
v, ok := r[key]
if !ok {
continue
}
return v
}
return key
})
}
if err := ioutil.WriteFile(filepath.Join(baseDir, k), []byte(t), 0640); err != nil {
return err
}
case map[string]interface{}:
dir := filepath.Join(baseDir, k)
if err := os.Mkdir(dir, 0750); err != nil {
return err
}
if err := createContent(dir, t, replacements...); err != nil {
return err
}
}
}
return nil
}
type mapPayloadRetriever struct {
Paths map[string]string
}
func (r *mapPayloadRetriever) RetrievePayload(ctx context.Context, update configv1.Update) (string, error) {
path, ok := r.Paths[update.Payload]
if !ok {
return "", fmt.Errorf("no payload found for %q", update.Payload)
}
return path, nil
}
| [
"\"TEST_INTEGRATION\"",
"\"TEST_INTEGRATION\"",
"\"TEST_INTEGRATION\""
]
| []
| [
"TEST_INTEGRATION"
]
| [] | ["TEST_INTEGRATION"] | go | 1 | 0 | |
fkie_node_manager/src/fkie_node_manager/master_view_proxy.py | # Software License Agreement (BSD License)
#
# Copyright (c) 2012, Fraunhofer FKIE/US, Alexander Tiderko
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
# * Neither the name of Fraunhofer nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from python_qt_binding import loadUi
from python_qt_binding.QtCore import QRegExp, Qt, QTimer, QSize, Signal
from python_qt_binding.QtGui import QKeySequence # , QBrush, QPen
from rosgraph.names import is_legal_name
import getpass
import os
import roslib
import rospy
import ruamel.yaml
import socket
import time
import traceback
import threading
import uuid
try:
import xmlrpclib as xmlrpcclient
except ImportError:
import xmlrpc.client as xmlrpcclient
from fkie_master_discovery.common import masteruri_from_ros
from fkie_master_discovery.master_info import NodeInfo
from fkie_node_manager_daemon.common import interpret_path, sizeof_fmt, isstring, utf8
from fkie_node_manager_daemon.host import get_hostname, get_port
from fkie_node_manager_daemon import exceptions
from fkie_node_manager_daemon import url as nmdurl
from fkie_node_manager_daemon import screen
from fkie_node_manager_daemon.version import detect_version
from .common import package_name
from .detailed_msg_box import MessageBox, DetailedError
from .echo_dialog import EchoDialog
from .html_delegate import HTMLDelegate
from .launch_config import LaunchConfig # , LaunchConfigException
from .launch_enhanced_line_edit import EnhancedLineEdit
from .launch_server_handler import LaunchServerHandler
from .message_frame import MessageData, MessageFrame
from .node_tree_model import NodeTreeModel, NodeItem, GroupItem, HostItem, NodeInfoIconsDelegate
from .parameter_dialog import ParameterDialog, MasterParameterDialog, ServiceDialog
from .parameter_handler import ParameterHandler
from .parameter_list_model import ParameterModel, ParameterNameItem, ParameterValueItem
from .progress_queue import ProgressQueue # , InteractionNeededError, ProgressThread
from .select_dialog import SelectDialog
from .service_list_model import ServiceModel, ServiceItem, ServiceGroupItem
from .supervised_popen import SupervisedPopen
from .topic_list_model import TopicModel, TopicItem, TopicGroupItem
import fkie_node_manager as nm
try:
from python_qt_binding.QtGui import QAction, QFileDialog, QFrame, QMenu, QShortcut, QWidget
from python_qt_binding.QtGui import QApplication, QVBoxLayout
except Exception:
from python_qt_binding.QtWidgets import QAction, QFileDialog, QFrame, QMenu, QShortcut, QWidget
from python_qt_binding.QtWidgets import QApplication, QVBoxLayout
try:
from python_qt_binding.QtGui import QItemSelection, QItemSelectionModel, QItemSelectionRange, QSortFilterProxyModel
except Exception:
from python_qt_binding.QtCore import QItemSelection, QItemSelectionModel, QItemSelectionRange, QSortFilterProxyModel
# from python_qt_binding import QtUiTools
try:
from diagnostic_msgs.msg import DiagnosticStatus
DIAGNOSTICS_AVAILABLE = True
except Exception:
DIAGNOSTICS_AVAILABLE = False
class MasterViewProxy(QWidget):
'''
This class stores the informations about a ROS master and shows it on request.
'''
updateHostRequest = Signal(str)
host_description_updated = Signal(str, str, str)
''':ivar str,str,str host_description_updated: the signal is emitted on description changes and contains the ROS Master URI, host address and description a parameter.'''
capabilities_update_signal = Signal(str, str, str, list)
''':ivar str,str,str,list(fkie_node_manager_daemon.launch_description.RobotDescription) capabilities_update_signal: the signal is emitted if a description with capabilities is received and has the ROS master URI, host address, the name of the config and a list with descriptions.'''
remove_config_signal = Signal(str)
''':ivar str remove_config_signal: the signal is emitted if a default_cfg was removed'''
description_signal = Signal(str, str, bool)
''':ivar str,str,bool description_signal: the signal is emitted to show a description (title, description)'''
request_xml_editor = Signal(str, str)
''':ivar str,str request_xml_editor: the signal to open a xml editor dialog (configuration path, search text)'''
stop_nodes_signal = Signal(str, list)
''':ivar str,list(str) stop_nodes_signal: the signal is emitted to stop on masteruri the nodes described in the list.'''
robot_icon_updated = Signal(str, str)
''':ivar str, str robot_icon_updated: the signal is emitted, if the robot icon was changed by a configuration (masteruri, path)'''
loaded_config = Signal(object, list)
''':ivar LaunchConfig,list(str) loaded_config: the signal is emitted, after a launchfile is successful loaded (LaunchConfig, [changed nodes (str)])'''
DIAGNOSTIC_LEVELS = {0: 'OK',
1: 'WARN',
2: 'ERROR',
3: 'STALE',
4: 'UNKNOWN',
5: 'UNKNOWN'}
def __init__(self, masteruri, parent=None):
'''
Creates a new master.
:param str masteruri: the URI of the ROS master
'''
QWidget.__init__(self, parent)
self.setObjectName(' - '.join(['MasterViewProxy', masteruri]))
self.masteruri = masteruri
self.mastername = masteruri
self.main_window = parent
try:
self.mastername = get_hostname(self.masteruri)
except Exception:
pass
self._tmpObjects = []
self.__master_state = None
self.__master_info = None
self.__force_update = False
self.__configs = dict() # file name (str): LaunchConfig
self._loaded_args = dict()
# self.__config_mtimes = dict() # file name (str): mtime(float)
# self.__config_includes = dict() # file name (str): dict(included file(str): mtime(float)))
self.__expanded_items = dict() # [file name] : list of expanded nodes
self.__online = False
self.__run_id = ''
# self.rosconfigs = dict() # [launch file path] = LaunchConfig()
self.__in_question = [] # stores the changed files, until the user is interacted
# self.__uses_confgs = dict() # stores the decisions of the user for used configuration to start of node
''':ivar list(str) __in_question: stored the question dialogs for changed files '''
self._stop_ignores = ['rosout', rospy.get_name(), 'node_manager', 'node_manager_daemon', 'master_discovery', 'master_sync', 'default_cfg', 'zeroconf']
self.__echo_topics_dialogs = set() # set with subscibed topics
self.__last_info_text = None
self.__use_sim_time = False
self.__current_user = nm.settings().host_user(self.mastername)
self.__daemon_user = ''
self.__robot_icons = []
self.__current_robot_icon = None
self.__current_parameter_robot_icon = ''
self.__republish_params = {} # { topic : params, created by dialog}
self.__current_icon_height = 8
self.__last_selection = 0
self.__last_node_activation = 0
self.__last_question_start_nmd = 0
self._on_stop_kill_roscore = False
self._on_stop_poweroff = False
self._start_nodes_after_load_cfg = dict()
self._cfg_changed_nodes = dict()
self._stored_diagnostic_messages = dict() # dict(time in seconds: diagnostic_status)
# store the running_nodes to update to duplicates after load a launch file
self.__running_nodes = dict() # dict (node name : masteruri)
self._nodelets = dict() # dict(launchfile: dict(nodelet manager: list(nodes))
self._associations_lock = threading.RLock()
self._associations = dict() # dict(launchfile: dict(node: list(nodes))
self._first_launch = True
self._has_nmd = False
self._changed_binaries = dict()
self.default_load_launch = ''
self._nmd_version, self._nmd_date = detect_version('fkie_node_manager_daemon')
self._diag_nmd_version = None
self._diag_log_dir_size = None
self._timer_nmd_request = QTimer()
self._timer_nmd_request.timeout.connect(self._sysmon_update_callback)
self._timer_nmd_request.setSingleShot(True)
self._ts_last_diagnostic_request = 0
self._has_diagnostics = False
self.has_master_sync = False
# self.default_cfg_handler = DefaultConfigHandler()
# self.default_cfg_handler.node_list_signal.connect(self.on_default_cfg_nodes_retrieved)
# self.default_cfg_handler.description_signal.connect(self.on_default_cfg_descr_retrieved)
# self.default_cfg_handler.err_signal.connect(self.on_default_cfg_err)
self.__launch_servers = {} # uri : (pid, nodes)
self.launch_server_handler = LaunchServerHandler()
self.launch_server_handler.launch_server_signal.connect(self.on_launch_server_retrieved)
self.launch_server_handler.error_signal.connect(self.on_launch_server_err)
self.ui = QWidget()
ui_file = os.path.join(os.path.dirname(os.path.realpath(__file__)), 'ui', 'MasterTab.ui')
loadUi(ui_file, self.ui, custom_widgets={'EnhancedLineEdit': EnhancedLineEdit})
tabLayout = QVBoxLayout(self)
tabLayout.setContentsMargins(0, 0, 0, 0)
tabLayout.addWidget(self.ui)
# set icons
self.ui.progressCancelPrioButton.setIcon(nm.settings().icon('crystal_clear_button_close.png'))
self.ui.progressCancelButton.setIcon(nm.settings().icon('crystal_clear_button_close.png'))
self.ui.startButton.setIcon(nm.settings().icon('sekkyumu_play.png'))
self.ui.stopButton.setIcon(nm.settings().icon('sekkyumu_stop.png'))
self.ui.ioButton.setIcon(nm.settings().icon('crystal_clear_show_io.png'))
self.ui.logButton.setIcon(nm.settings().icon('crystal_clear_show_log.png'))
self.ui.logDeleteButton.setIcon(nm.settings().icon('crystal_clear_show_delete_log.png'))
self.ui.dynamicConfigButton.setIcon(nm.settings().icon('crystal_clear_dyncfg.png'))
self.ui.editRosParamButton.setIcon(nm.settings().icon('default_cfg_edit.png'))
self.ui.editConfigButton.setIcon(nm.settings().icon('crystal_clear_edit_launch.png'))
self.ui.closeCfgButton.setIcon(nm.settings().icon('crystal_clear_button_close.png'))
self.ui.echoTopicButton.setIcon(nm.settings().icon('sekkyumu_topic_echo.png'))
self.ui.hzTopicButton.setIcon(nm.settings().icon('sekkyumu_topic_hz.png'))
self.ui.hzSshTopicButton.setIcon(nm.settings().icon('sekkyumu_topic_echo_ssh_hz.png'))
self.ui.pubTopicButton.setIcon(nm.settings().icon('sekkyumu_topic_pub.png'))
self.ui.pubStopTopicButton.setIcon(nm.settings().icon('sekkyumu_topic_pub_stop.png'))
self.ui.callServiceButton.setIcon(nm.settings().icon('sekkyumu_call_service.png'))
self.ui.getParameterButton.setIcon(nm.settings().icon('crystal_clear_action_db_update.png'))
self.ui.addParameterButton.setIcon(nm.settings().icon('crystal_clear_action_db_add.png'))
self.ui.deleteParameterButton.setIcon(nm.settings().icon('crystal_clear_action_db_remove.png'))
self.ui.saveParameterButton.setIcon(nm.settings().icon('save.png'))
self.ui.transferParameterButton.setIcon(nm.settings().icon('crystal_clear_launch_file_transfer.png'))
self._progress_queue_prio = ProgressQueue(self.ui.progressPrioFrame, self.ui.progressPrioBar, self.ui.progressCancelPrioButton, 'Prio Master - %s' % self.mastername)
self._progress_queue = ProgressQueue(self.ui.progressFrame, self.ui.progressBar, self.ui.progressCancelButton, 'Master - %s' % self.mastername)
self._progress_queue_prio.no_screen_error_signal.connect(self._on_no_screen_error)
self._progress_queue.no_screen_error_signal.connect(self._on_no_screen_error)
# setup the node view
self.node_tree_model = NodeTreeModel(nm.nameres().address(self.masteruri), self.masteruri)
self.node_proxy_model = NodesSortFilterProxyModel(self)
self.node_proxy_model.setSourceModel(self.node_tree_model)
self.ui.nodeTreeView.setModel(self.node_proxy_model)
self.node_tree_model.hostInserted.connect(self.on_host_inserted)
for i, (_, width) in enumerate(NodeTreeModel.header): # _:=name
self.ui.nodeTreeView.setColumnWidth(i, width)
check_for_ros_names = not nm.settings().group_nodes_by_namespace
self.nodeNameDelegate = HTMLDelegate(check_for_ros_names=check_for_ros_names, dec_ascent=True, is_node=True, palette=self.palette())
self.ui.nodeTreeView.setItemDelegateForColumn(0, self.nodeNameDelegate)
self.node_delegate = NodeInfoIconsDelegate()
self.ui.nodeTreeView.setItemDelegateForColumn(1, self.node_delegate)
# self.ui.nodeTreeView.collapsed.connect(self.on_node_collapsed)
self.ui.nodeTreeView.expanded.connect(self.on_node_expanded)
sm = self.ui.nodeTreeView.selectionModel()
sm.selectionChanged.connect(self.on_node_selection_changed)
self.ui.nodeTreeView.activated.connect(self.on_node_activated)
self.ui.nodeTreeView.clicked.connect(self.on_node_clicked)
# self.ui.nodeTreeView.setAcceptDrops(True)
# self.ui.nodeTreeWidget.setSortingEnabled(True)
# setup the topic view
self.topic_model = TopicModel()
self.topic_proxyModel = TopicsSortFilterProxyModel(self)
self.topic_proxyModel.setSourceModel(self.topic_model)
self.ui.topicsView.setModel(self.topic_proxyModel)
self.ui.topicsView.expandAll()
self.ui.topicsView.sortByColumn(0, Qt.AscendingOrder)
# self.ui.topicsView.setModel(self.topic_model)
for i, (_, width) in enumerate(TopicModel.header): # _:=name
self.ui.topicsView.setColumnWidth(i, width)
self.topicNameDelegate = HTMLDelegate(check_for_ros_names=check_for_ros_names, dec_ascent=True, is_node=True, palette=self.palette())
self.topicTypeDelegate = HTMLDelegate(dec_ascent=True)
self.ui.topicsView.setItemDelegateForColumn(0, self.topicNameDelegate)
self.ui.topicsView.setItemDelegateForColumn(3, self.topicTypeDelegate)
sm = self.ui.topicsView.selectionModel()
sm.selectionChanged.connect(self.on_topic_selection_changed)
self.ui.topicsView.activated.connect(self.on_topic_activated)
self.ui.topicsView.clicked.connect(self.on_topic_clicked)
self.ui.topicsView.setSortingEnabled(True)
# setup the service view
self.service_model = ServiceModel()
self.service_proxyModel = ServicesSortFilterProxyModel(self)
self.service_proxyModel.setSourceModel(self.service_model)
self.ui.servicesView.setModel(self.service_proxyModel)
self.ui.servicesView.expandAll()
self.ui.servicesView.sortByColumn(0, Qt.AscendingOrder)
for i, (_, width) in enumerate(ServiceModel.header): # _:=name
self.ui.servicesView.setColumnWidth(i, width)
self.serviceNameDelegate = HTMLDelegate(check_for_ros_names=check_for_ros_names, dec_ascent=True, is_node=True, palette=self.palette())
self.serviceTypeDelegate = HTMLDelegate(dec_ascent=True)
self.ui.servicesView.setItemDelegateForColumn(0, self.serviceNameDelegate)
self.ui.servicesView.setItemDelegateForColumn(1, self.serviceTypeDelegate)
sm = self.ui.servicesView.selectionModel()
sm.selectionChanged.connect(self.on_service_selection_changed)
self.ui.servicesView.activated.connect(self.on_service_activated)
self.ui.servicesView.clicked.connect(self.on_service_clicked)
self.ui.servicesView.setSortingEnabled(True)
# setup the parameter view
self.parameter_model = ParameterModel()
self.parameter_model.itemChanged.connect(self._on_parameter_item_changed)
self.parameter_proxyModel = ParameterSortFilterProxyModel(self)
self.parameter_proxyModel.setSourceModel(self.parameter_model)
self.ui.parameterView.setModel(self.parameter_proxyModel)
for i, (_, width) in enumerate(ParameterModel.header): # _:=name
self.ui.parameterView.setColumnWidth(i, width)
self.parameterNameDelegate = HTMLDelegate(dec_ascent=True, palette=self.palette())
self.ui.parameterView.setItemDelegateForColumn(0, self.parameterNameDelegate)
sm = self.ui.parameterView.selectionModel()
sm.selectionChanged.connect(self.on_parameter_selection_changed)
self.ui.parameterView.setSortingEnabled(True)
# self.parameter_proxyModel.filterAcceptsRow = _filterParameterAcceptsRow
# self.ui.parameterView.activated.connect(self.on_service_activated)
# connect the buttons
self.ui.startButton.clicked.connect(self.on_start_clicked)
self.ui.stopButton.clicked.connect(self.on_stop_clicked)
# self.ui.stopContextButton.toggled.connect(self.on_stop_context_toggled)
self.ui.ioButton.clicked.connect(self.on_io_clicked)
self.ui.logButton.clicked.connect(self.on_log_clicked)
self.ui.logDeleteButton.clicked.connect(self.on_log_delete_clicked)
self.ui.dynamicConfigButton.clicked.connect(self.on_dynamic_config_clicked)
self.ui.editConfigButton.clicked.connect(self.on_edit_config_clicked)
self.ui.editRosParamButton.clicked.connect(self.on_edit_rosparam_clicked)
self.ui.closeCfgButton.clicked.connect(self.on_close_clicked)
self.ui.echoTopicButton.clicked.connect(self.on_topic_echo_clicked)
self.ui.hzTopicButton.clicked.connect(self.on_topic_hz_clicked)
self.ui.hzSshTopicButton.clicked.connect(self.on_topic_hz_ssh_clicked)
self.ui.pubTopicButton.clicked.connect(self.on_topic_pub_clicked)
self.ui.pubStopTopicButton.clicked.connect(self.on_topic_pub_stop_clicked)
self.ui.callServiceButton.clicked.connect(self.on_service_call_clicked)
self.ui.nodeFilterInput.textChanged.connect(self.on_node_filter_changed)
self.ui.topicFilterInput.textChanged.connect(self.on_topic_filter_changed)
self.ui.serviceFilterInput.textChanged.connect(self.on_service_filter_changed)
self.ui.parameterFilterInput.textChanged.connect(self.on_parameter_filter_changed)
self.ui.getParameterButton.clicked.connect(self.on_get_parameter_clicked)
self.ui.addParameterButton.clicked.connect(self.on_add_parameter_clicked)
self.ui.deleteParameterButton.clicked.connect(self.on_delete_parameter_clicked)
self.ui.saveParameterButton.clicked.connect(self.on_save_parameter_clicked)
self.ui.transferParameterButton.clicked.connect(self.on_transfer_parameter_clicked)
# create a handler to request the parameter
self.parameterHandler = ParameterHandler()
self.parameterHandler.parameter_list_signal.connect(self._on_param_list)
self.parameterHandler.parameter_values_signal.connect(self._on_param_values)
self.parameterHandler.delivery_result_signal.connect(self._on_delivered_values)
# create a handler to request sim parameter
self.parameterHandler_sim = ParameterHandler()
# self.parameterHandler_sim.parameter_list_signal.connect(self._on_param_list)
self.parameterHandler_sim.parameter_values_signal.connect(self._on_sim_param_values)
# self.parameterHandler_sim.delivery_result_signal.connect(self._on_delivered_values)
self._shortcut_kill_node = QShortcut(QKeySequence(self.tr("Ctrl+Backspace", "Kill selected node")), self)
self._shortcut_kill_node.activated.connect(self.on_kill_nodes)
self._shortcut_kill_node = QShortcut(QKeySequence(self.tr("Ctrl+Delete", "Removes the registration of selected nodes from ROS master")), self)
self._shortcut_kill_node.activated.connect(self.on_unregister_nodes)
self.ui.ioButton.setEnabled(True)
self.ui.tabWidget.currentChanged.connect(self.on_tab_current_changed)
self._shortcut_screen_show_all = QShortcut(QKeySequence(self.tr("Shift+S", "Show all available screens")), self)
self._shortcut_screen_show_all.activated.connect(self.on_show_all_screens)
self._shortcut_screen_kill = QShortcut(QKeySequence(self.tr("Shift+Backspace", "Kill Screen")), self)
self._shortcut_screen_kill.activated.connect(self.on_kill_screens)
self.loaded_config.connect(self._apply_launch_config)
nm.nmd().launch.mtimes.connect(self._apply_mtimes)
nm.nmd().launch.changed_binaries.connect(self._apply_changed_binaries)
nm.nmd().launch.launch_nodes.connect(self.on_launch_description_retrieved)
nm.nmd().version.version_signal.connect(self.on_nmd_version_retrieved)
nm.nmd().screen.log_dir_size_signal.connect(self.on_log_dir_retrieved)
# set the shortcuts
self._shortcut1 = QShortcut(QKeySequence(self.tr("Alt+1", "Select first group")), self)
self._shortcut1.activated.connect(self.on_shortcut1_activated)
self._shortcut2 = QShortcut(QKeySequence(self.tr("Alt+2", "Select second group")), self)
self._shortcut2.activated.connect(self.on_shortcut2_activated)
self._shortcut3 = QShortcut(QKeySequence(self.tr("Alt+3", "Select third group")), self)
self._shortcut3.activated.connect(self.on_shortcut3_activated)
self._shortcut4 = QShortcut(QKeySequence(self.tr("Alt+4", "Select fourth group")), self)
self._shortcut4.activated.connect(self.on_shortcut4_activated)
self._shortcut5 = QShortcut(QKeySequence(self.tr("Alt+5", "Select fifth group")), self)
self._shortcut5.activated.connect(self.on_shortcut5_activated)
self._shortcut_collapse_all = QShortcut(QKeySequence(self.tr("Alt+C", "Collapse all groups")), self)
self._shortcut_collapse_all.activated.connect(self.on_shortcut_collapse_all)
self._shortcut_expand_all = QShortcut(QKeySequence(self.tr("Alt+E", "Expand all groups")), self)
self._shortcut_expand_all.activated.connect(self.ui.nodeTreeView.expandAll)
self._shortcut_run = QShortcut(QKeySequence(self.tr("Alt+R", "run selected nodes")), self)
self._shortcut_run.activated.connect(self.on_start_clicked)
self._shortcut_stop = QShortcut(QKeySequence(self.tr("Alt+S", "stop selected nodes")), self)
self._shortcut_stop.activated.connect(self.on_stop_clicked)
self.message_frame = MessageFrame()
self.ui.questionFrameLayout.addWidget(self.message_frame.ui)
self.message_frame.accept_signal.connect(self._on_question_ok)
self.message_frame.cancel_signal.connect(self._on_question_cancel)
self.info_frame = MessageFrame(info=True)
self.ui.infoFrameLayout.addWidget(self.info_frame.ui)
self.info_frame.accept_signal.connect(self._on_info_ok)
nm.nmd().file.changed_file.connect(self.on_changed_file)
nm.nmd().screen.multiple_screens.connect(self.on_multiple_screens)
self._sysmon_timer = None
self._sysmon_enabled = False
self._sysmon_timer_idle = QTimer()
self._sysmon_timer_idle.timeout.connect(self._sysmon_update_callback)
self._sysmon_timer_idle.start(nm.settings().sysmon_default_interval * 1000)
# self._shortcut_copy = QShortcut(QKeySequence(self.tr("Ctrl+C", "copy selected values to clipboard")), self)
# self._shortcut_copy.activated.connect(self.on_copy_c_pressed)
self._shortcut_copy = QShortcut(QKeySequence(self.tr("Ctrl+X", "copy selected alternative values to clipboard")), self)
self._shortcut_copy.activated.connect(self.on_copy_x_pressed)
self.ui.controlNodesFrame.resizeEvent = self.resizeEventButtons
# print "================ create", self.objectName()
#
# def __del__(self):
# print " Destroy mester view proxy", self.objectName(), " ..."
# print " ", self.objectName(), "destroyed"
def closeEvent(self, event):
print(' Shutdown master %s ...' % self.masteruri)
if self._sysmon_timer is not None:
self._sysmon_timer.stop()
self._sysmon_timer_idle.stop()
nm.nmd().file.changed_file.disconnect(self.on_changed_file)
nm.nmd().screen.multiple_screens.disconnect(self.on_multiple_screens)
self.launch_server_handler.stop()
self._progress_queue_prio.stop()
self._progress_queue.stop()
if self._on_stop_kill_roscore:
self.killall_roscore()
QWidget.closeEvent(self, event)
print(' Master %s is down!' % self.masteruri)
def stop_echo_dialogs(self):
# stop launched echo dialogs
if self.__echo_topics_dialogs:
self.stop_nodes_by_name(self.__echo_topics_dialogs, only_local=False)
self.__echo_topics_dialogs.clear()
def resizeEventButtons(self, event):
ch_height = 0
increment = 4
min_spacer_size = 8
const_size = 10 * self.ui.verticalLayout_4.spacing() + min_spacer_size + self.ui.line2_2.size().height() + self.ui.line1_2.size().height()
button_size = 9 * (self.ui.startButton.size().height() - self.__current_icon_height)
while (ch_height + increment) * 10 + const_size + button_size <= self.ui.controlNodesFrame.size().height() and ch_height < 32:
ch_height += increment
if ch_height < 8:
ch_height = 8
if ch_height != self.__current_icon_height:
self.__current_icon_height = ch_height
new_size = QSize(self.__current_icon_height, self.__current_icon_height)
self.ui.startButton.setIconSize(new_size)
self.ui.stopButton.setIconSize(new_size)
self.ui.ioButton.setIconSize(new_size)
self.ui.logButton.setIconSize(new_size)
self.ui.logDeleteButton.setIconSize(new_size)
self.ui.dynamicConfigButton.setIconSize(new_size)
self.ui.editConfigButton.setIconSize(new_size)
self.ui.editRosParamButton.setIconSize(new_size)
self.ui.closeCfgButton.setIconSize(new_size)
self.ui.echoTopicButton.setIconSize(new_size)
self.ui.hzTopicButton.setIconSize(new_size)
self.ui.hzSshTopicButton.setIconSize(new_size)
self.ui.pubTopicButton.setIconSize(new_size)
self.ui.pubStopTopicButton.setIconSize(new_size)
self.ui.callServiceButton.setIconSize(new_size)
self.ui.getParameterButton.setIconSize(new_size)
self.ui.addParameterButton.setIconSize(new_size)
self.ui.deleteParameterButton.setIconSize(new_size)
self.ui.saveParameterButton.setIconSize(new_size)
self.ui.transferParameterButton.setIconSize(new_size)
QFrame.resizeEvent(self, event)
@property
def current_user(self):
return self.__current_user
@current_user.setter
def current_user(self, user):
self.__current_user = user
nm.settings().set_host_user(self.mastername, user)
@property
def daemon_user(self):
return self.__daemon_user
@daemon_user.setter
def daemon_user(self, user):
self.__daemon_user = user
if user != self.current_user:
self.message_frame.show_question(MessageFrame.TYPE_NMD_RESTART, "node_manager_daemon is running with different user: '%s'.\nMany features may not function properly!\n\nRestart with user '%s'?\n\nNote: you should first close all open editors related to this host!" % (user, self.current_user), MessageData(self.masteruri))
else:
self.message_frame.hide_question([MessageFrame.TYPE_NMD_RESTART])
@property
def is_local(self):
return nm.is_local(get_hostname(self.masteruri), wait=False)
@property
def online(self):
'''
The online meens that master is discovered and master_info was received.
'''
return self.__online
@online.setter
def online(self, state):
self.__online = state
self._start_queue(self._progress_queue)
self._start_queue(self._progress_queue_prio)
@property
def master_state(self):
return self.__master_state
@master_state.setter
def master_state(self, master_state):
self.__master_state = master_state
@property
def master_info(self):
return self.__master_info
@master_info.setter
def master_info(self, master_info):
'''
Sets the new master information. To determine whether a node is running the
PID and his URI are needed. The PID of remote nodes (host of the ROS master
and the node are different) will be not determine by discovering. Thus this
information must be obtain from other MasterInfo object and stored while
updating.
:param master_info: the mater information object
:type master_info: :class:`fkie_master_discovery.msg.MasterInfo` <http://docs.ros.org/kinetic/api/fkie_master_discovery/html/modules.html#module-fkie_master_discovery.master_info>
'''
try:
update_result = (set(), set(), set(), set(), set(), set(), set(), set(), set())
my_masterinfo = nmdurl.equal_uri(master_info.masteruri, self.masteruri)
if self.__master_info is None:
if my_masterinfo:
self.__master_info = master_info
update_result[0].update(self.__master_info.node_names)
update_result[3].update(self.__master_info.topic_names)
update_result[6].update(self.__master_info.service_names)
else:
update_result = self.__master_info.updateInfo(master_info)
# print "MINFO", self.__master_info.listedState()
# we receive the master info from remove nodes first -> skip
if self.__master_info is None:
return
if my_masterinfo:
nmd_node = master_info.getNode('/node_manager_daemon')
if nmd_node is None: # do not test for PID. It can be None if daemon is busy on load big launch files
self._has_nmd = False
if time.time() - self.__last_question_start_nmd > 10.:
self.__last_question_start_nmd = time.time()
if not self.is_local:
self.message_frame.show_question(MessageFrame.TYPE_NMD, "node_manager_daemon not found for '%s'.\nShould it be started?" % self.masteruri, MessageData(self.masteruri))
else:
self._on_question_ok(MessageFrame.TYPE_NMD, MessageData(self.masteruri))
else:
self.message_frame.hide_question([MessageFrame.TYPE_NMD])
if not self._timer_nmd_request.isActive():
timeout = 2000 if not self._has_nmd else 200
self._timer_nmd_request.start(timeout)
self._has_nmd = True
self.perform_nmd_requests()
try:
if my_masterinfo:
self.update_system_parameter()
self.online = True
# request the info of new remote nodes
hosts2update = set([get_hostname(self.__master_info.getNode(nodename).uri) for nodename in update_result[0]])
hosts2update.update([get_hostname(self.__master_info.getService(nodename).uri) for nodename in update_result[6]])
for host in hosts2update:
if host != get_hostname(self.masteruri):
self.updateHostRequest.emit(host)
except Exception:
pass
# cputimes = os.times()
# cputime_init = cputimes[0] + cputimes[1]
# update nodes in the model
if update_result[0] or update_result[1] or update_result[2] or self.__force_update:
self._update_running_nodes_in_model(self.__master_info)
self.on_node_selection_changed(None, None)
# update diagnostics
new_diagnostic_dict = {}
now = time.time()
for sec, ds in self._stored_diagnostic_messages.items():
added = self.append_diagnostic(ds, False)
if not added and now - sec < 10:
new_diagnostic_dict[sec] = ds
self._stored_diagnostic_messages = new_diagnostic_dict
# Updates the topic view based on the current master information.
if update_result[3] or update_result[4] or update_result[5] or self.__force_update:
self.topic_model.update_model_data(self.__master_info.topics, update_result[3], update_result[4], update_result[5])
self.on_topic_selection_changed(None, None)
# Updates the service view based on the current master information.
if update_result[6] or update_result[7] or update_result[8] or self.__force_update:
self.service_model.update_model_data(self.__master_info.services, update_result[6], update_result[7], update_result[8])
self.on_service_selection_changed(None, None)
# update the default configuration
# self.updateDefaultConfigs(self.__master_info)
self.__force_update = False
# cputimes = os.times()
# cputime = cputimes[0] + cputimes[1] - cputime_init
# print " update on ", self.__master_info.mastername if not self.__master_info is None else self.__master_state.name, cputime
except Exception:
print(traceback.format_exc(3))
def perform_nmd_requests(self):
nmd_uri = nmdurl.nmduri(self.masteruri)
if self._has_nmd:
# only try to get updates from daemon if it is running
nm.nmd().launch.get_nodes_threaded(nmd_uri, self.masteruri)
# self.set_diagnostic_ok('/node_manager_daemon')
nm.nmd().version.get_version_threaded(nmd_uri)
nm.nmd().screen.log_dir_size_threaded(nmd_uri)
nm.nmd().monitor.get_user_threaded(nmd_uri)
self.perform_diagnostic_requests(force=True)
def is_valid_user_master_daemon(self):
if self.__daemon_user:
return self.__daemon_user == self.current_user
return True
def _start_queue(self, queue):
if self.online and self.master_info is not None and isinstance(queue, ProgressQueue):
queue.start()
@property
def use_sim_time(self):
return self.__use_sim_time
def in_process(self):
return self._progress_queue.count() > 0 or self._progress_queue_prio.count() > 0
def force_next_update(self):
self.__force_update = True
def update_system_parameter(self):
self.parameterHandler_sim.requestParameterValues(self.masteruri, ["/run_id", "/use_sim_time", "/robot_icon", "/roslaunch/uris"])
def set_duplicate_nodes(self, running_nodes):
'''
Marks all nodes, which are not running and in a given list as a duplicates nodes.
:param list(str) running_nodes: The list with names of running nodes
'''
# store the running_nodes to update to duplicates after load a launch file
self.__running_nodes = running_nodes
self.node_tree_model.set_duplicate_nodes(running_nodes, (self.master_info is not None and self.master_info.getNodeEndsWith('master_sync')))
def get_nodes_runningIfSync(self):
'''
Returns the list with all running nodes, which are registered by this ROS
master. Also the nodes, which are physically running on remote hosts.
:return: The list with names of running nodes
:rtype: list(str)
'''
if self.master_info is not None and self.master_info.getNodeEndsWith('master_sync'):
return self.master_info.node_names
return []
def get_nodes_runningIfLocal(self, remove_system_nodes=False):
'''
Returns the list with all running nodes, which are running (has process) on this host.
The nodes registered on this ROS master, but running on remote hosts are not
returned.
@return: The dictionary with names of running nodes and their masteruri
@rtype: C{dict(str:str)}
'''
result = dict()
if self.master_info is not None:
for _, node in self.master_info.nodes.items(): # _:=name
if node.isLocal:
if remove_system_nodes or not self._is_in_ignore_list(node.name):
result[node.name] = self.master_info.masteruri
return result
def _update_running_nodes_in_model(self, master_info):
'''
Creates the dictionary with ExtendedNodeInfo objects and updates the nodes view.
:param master_info: the mater information object
:type master_info: :class:`fkie_master_discovery.msg.MasterInfo` <http://docs.ros.org/kinetic/api/fkie_master_discovery/html/modules.html#module-fkie_master_discovery.master_info>
'''
if master_info is not None:
updated_nodes = self.node_tree_model.update_model_data(master_info.nodes, master_info.masteruri)
if updated_nodes:
for node in updated_nodes:
self.main_window.screen_dock.update_node(node)
self.updateButtons()
def getNode(self, node_name):
'''
:param str node_name: The name of the node.
:return: The list the nodes with given name.
:rtype: list(str)
'''
return self.node_tree_model.get_tree_node("%s" % node_name, self.masteruri)
def updateButtons(self, selected_nodes=None):
'''
Updates the enable state of the buttons depending of the selection and
running state of the selected node.
'''
selectedNodes = selected_nodes
if selectedNodes is None:
selectedNodes = self.nodesFromIndexes(self.ui.nodeTreeView.selectionModel().selectedIndexes())
has_running = False
has_stopped = False
for node in selectedNodes:
if node.uri is not None:
has_running = True
else:
has_stopped = True
self.ui.startButton.setEnabled(True)
self.ui.stopButton.setEnabled(True)
# self.ui.ioButton.setEnabled(has_running or has_stopped)
self.ui.logButton.setEnabled(True)
# self.ui.logButton.setEnabled(has_running or has_stopped)
self.ui.logDeleteButton.setEnabled(has_running or has_stopped)
# test for available dynamic reconfigure services
if self.master_info is not None:
dyn_cfg_available = False
for n in selectedNodes:
for srv_name, srv in self.master_info.services.items():
if (srv_name.endswith('/set_parameters')) and n.name in srv.serviceProvider:
dyn_cfg_available = True
break
self.ui.dynamicConfigButton.setEnabled(dyn_cfg_available)
# the configuration is only available, if only one node is selected
self.ui.editConfigButton.setEnabled(len(selectedNodes) == 1 and selectedNodes[0].has_configs())
self.ui.editRosParamButton.setEnabled(len(selectedNodes) == 1)
# enable the close button only for local configurations
self.ui.closeCfgButton.setEnabled(True)
# self.ui.closeCfgButton.setEnabled(len([path for path, _ in self.__configs.items() if (isinstance(path, tuple) and path[2] == self.masteruri) or not isinstance(path, tuple)]) > 0) #_:=cfg
@property
def launchfiles(self):
'''
Returns the copy of the dictionary with loaded launch files on this host
:rtype: dict(str(file) : LaunchConfig)
'''
result = dict()
for (c, cfg) in self.__configs.items():
if not isinstance(c, tuple):
result[c] = cfg
return result
@launchfiles.setter
def launchfiles(self, launchfile):
'''
Loads the launch file. If this file is already loaded, it will be reloaded.
After successful load the node view will be updated.
:param launchfile: the launch file path
:type launchfile: str or tuple(launchfile, dictionary with args)
'''
lfile = launchfile
args = {}
if isinstance(launchfile, tuple):
lfile, args = launchfile
self._progress_queue_prio.add2queue(utf8(uuid.uuid4()),
'Loading %s' % os.path.basename(lfile),
self._load_launchfile,
{'launchfile': lfile,
'args_forced': args
})
self._start_queue(self._progress_queue_prio)
def _load_launchfile(self, launchfile, args_forced={}, pqid=None):
'''
This method will be called in another thread. The configuration parameter
of the launch file will be requested using `LaunchArgsSelectionRequest` and
`InteractionNeededError`. After the file is successful loaded a
`loaded_config` signal will be emitted.
'''
if args_forced:
rospy.loginfo("LOAD launch: %s with args: %s" % (launchfile, args_forced))
else:
rospy.loginfo("LOAD launch: %s" % launchfile)
# load launch configuration
try:
args = {}
changed_nodes = []
if launchfile in self.__configs:
_launch_file, changed_nodes = nm.nmd().launch.reload_launch(launchfile, masteruri=self.masteruri)
else:
# nm.nmd().launch.load_launch(launchfile, argv_forced) CREATE DICT
# on_host should be an nmdurl
_launch_file, args = nm.nmd().launch.load_launch(launchfile, masteruri=self.masteruri, host=self.masteruri, args=args_forced)
# do not load if the loadings process was canceled
if self._progress_queue_prio.has_id(pqid):
cfg = LaunchConfig(launchfile, args=args)
self._loaded_args[launchfile] = args
self.loaded_config.emit(cfg, changed_nodes)
nm.nmd().launch.get_nodes_threaded(launchfile)
except nm.LaunchArgsSelectionRequest as lasr:
raise nm.InteractionNeededError(lasr, self._load_launchfile, {'launchfile': launchfile, 'args_forced': args_forced})
except exceptions.GrpcTimeout as tout:
raise DetailedError("Timeout", "Timeout while load %s" % tout.remote, "Daemon not responded within %.2f seconds while"
"load launch file. You can try to increase the timeout for GRPC requests in node manager settings." % nm.settings().timeout_grpc)
except Exception as e:
print(traceback.format_exc())
err_text = '%s loading failed!' % os.path.basename(launchfile)
rospy.logwarn("Loading launch file: %s", utf8(e))
raise DetailedError("Loading launch file", err_text, utf8(e))
def _apply_launch_config(self, launchcfg, changed_nodes):
filename = launchcfg.launchfile
# store changed nodes for restart
if changed_nodes:
self._cfg_changed_nodes[filename] = changed_nodes
if filename in self.__configs:
# store expanded items
self.__expanded_items[filename] = self._get_expanded_groups()
# close the current loaded configuration with the same name
self.remove_cfg_from_model(filename)
del self.__configs[filename]
def _apply_mtimes(self, launchfile, mtime, includes):
if launchfile in self.__configs:
cfg = self.__configs[launchfile]
cfg.mtime = mtime
cfg.includes = includes
def _apply_changed_binaries(self, launchfile, nodes):
muri = nmdurl.masteruri(launchfile)
if nmdurl.equal_uri(muri, self.masteruri):
for nodename, mtime in nodes.items():
tnodes = self.node_tree_model.get_tree_node(nodename, self.masteruri)
doask = False
try:
if self._changed_binaries[nodename] < mtime:
doask = True
except KeyError:
doask = True
if doask:
for tnode in tnodes:
# ask for each node separately
self._changed_binaries[nodename] = mtime
self.question_restart_changed_binary(tnode)
def perform_master_checks(self):
grpc_url = nmdurl.nmduri(self.masteruri)
lfiles = {}
for path, cfg in self.__configs.items():
if cfg.mtime > 0:
lfiles[path] = cfg.mtime
lfiles.update(cfg.includes)
if self._has_nmd:
# do not connect to the node manager daemon until it is not in the nodes list (not started)
if lfiles:
nm.nmd().file.check_for_changed_files_threaded(lfiles)
nm.nmd().screen.multiple_screens_threaded(grpc_url)
nodes = self.get_nodes_runningIfLocal(True)
if nodes:
nm.nmd().launch.get_changed_binaries_threaded(grpc_url, list(nodes.keys()))
def perform_diagnostic_requests(self, force=False):
now = time.time()
if self._has_nmd and (self._has_diagnostics or force) and now - self._ts_last_diagnostic_request >= 1.0:
nmd_uri = nmdurl.nmduri(self.masteruri)
nm.nmd().monitor.get_system_diagnostics_threaded(nmd_uri)
if not self.has_master_sync:
nm.nmd().monitor.get_diagnostics_threaded(nmd_uri)
elif nmdurl.equal_uri(self.masteruri, self.main_window.getMasteruri()):
nm.nmd().monitor.get_diagnostics_threaded(nmd_uri)
self._ts_last_diagnostic_request = now
def get_files_for_change_check(self):
result = {}
for path, cfg in self.__configs.items():
if cfg.mtime > 0:
result[path] = cfg.mtime
result.update(cfg.includes)
return result
def on_changed_file(self, grpc_path, mtime):
for _path, cfg in self.__configs.items():
if cfg.launchfile == grpc_path:
# test launch file itself
if cfg.mtime != mtime:
# it does not matter how the user response, we update the modification time to avoid a lot of questions
cfg.mtime = mtime
self.question_reload_changed_file(cfg.launchfile, cfg.launchfile)
# continue, perhaps the file is an include in other launch files
else:
# test included files
for incf, incmt in cfg.includes.items():
if incf == grpc_path:
if incmt != mtime:
cfg.includes[incf] = mtime
self.question_reload_changed_file(incf, cfg.launchfile)
break
def reload_global_parameter_at_next_start(self, launchfile):
try:
self.__configs[launchfile].global_param_done = False
self.on_node_selection_changed(None, None, True)
except Exception:
pass
def question_restart_changed_binary(self, changed):
self.message_frame.show_question(MessageFrame.TYPE_BINARY, 'Binaries changed, restart nodes?', MessageData(changed.name, [changed]))
def question_reload_changed_file(self, changed, affected):
_filename, file_extension = os.path.splitext(changed)
if file_extension in nm.settings().launch_view_file_ext or changed.find('.launch.') > 0:
changed_res = "%s[%s]" % (os.path.basename(changed), utf8(package_name(os.path.dirname(changed))[0]))
self.message_frame.show_question(MessageFrame.TYPE_LAUNCH_FILE, 'Reload <b>%s</b>?<br>Changed files:' % os.path.basename(affected), MessageData(affected, [changed_res]))
def question_transfer_changed_file(self, changed, affected):
self.message_frame.show_question(MessageFrame.TYPE_TRANSFER,
"Configuration file '%s' referenced by parameter in <b>%s</b> is changed.<br>Copy to remote host?"
"<br>Don\'t forget to restart the corresponding nodes!" % (changed, os.path.basename(affected)), MessageData(changed))
def _get_nodelets(self, nodename, configname=''):
if configname and configname in self._nodelets:
if nodename in self._nodelets[configname]:
return self._nodelets[configname][nodename]
else:
for configname, mngrs in self._nodelets.items():
if nodename in mngrs:
return mngrs[nodename]
return []
def _get_nodelet_manager(self, nodename, configname=''):
if configname and configname in self._nodelets:
for mngr, nodelets in self._nodelets[configname].items():
if nodename in nodelets:
return mngr
else:
for configname, mngrs in self._nodelets.items():
for mngr, nodelets in mngrs.items():
if nodename in nodelets:
return mngr
return None
def _get_expanded_groups(self):
'''
Returns a list of group names, which are expanded.
'''
result = []
try:
for r in range(self.ui.nodeTreeView.model().rowCount()):
index_host = self.ui.nodeTreeView.model().index(r, 0)
if index_host.isValid() and self.ui.nodeTreeView.isExpanded(index_host):
if self.ui.nodeTreeView.model().hasChildren(index_host):
for c in range(self.ui.nodeTreeView.model().rowCount(index_host)):
index_cap = self.ui.nodeTreeView.model().index(c, 0, index_host)
if index_cap.isValid() and self.ui.nodeTreeView.isExpanded(index_cap):
model_index = self.node_proxy_model.mapToSource(index_cap)
item = self.node_tree_model.itemFromIndex(model_index)
if isinstance(item, (GroupItem, HostItem)):
result.append(item.name)
except Exception:
print(traceback.format_exc(3))
return result
def _expand_groups(self, groups=None):
'''
Expands all groups, which are in the given list. If no list is given,
expands all groups of expanded hosts.
'''
try:
for r in range(self.ui.nodeTreeView.model().rowCount()):
index_host = self.ui.nodeTreeView.model().index(r, 0)
if index_host.isValid() and self.ui.nodeTreeView.isExpanded(index_host):
if self.ui.nodeTreeView.model().hasChildren(index_host):
for c in range(self.ui.nodeTreeView.model().rowCount(index_host)):
index_cap = self.ui.nodeTreeView.model().index(c, 0, index_host)
if index_cap.isValid():
model_index = self.node_proxy_model.mapToSource(index_cap)
item = self.node_tree_model.itemFromIndex(model_index)
if isinstance(item, (GroupItem, HostItem)):
if groups is None or item.name in groups:
self.ui.nodeTreeView.setExpanded(index_cap, True)
except Exception:
print(traceback.format_exc(3))
def update_robot_icon(self, force=False):
'''
Update the current robot icon. If the icon was changed a `robot_icon_updated`
signal will be emitted.
:return: the path to the current robot icon
:rtype: str
'''
for l in self.__robot_icons:
try:
icon = self.__configs[l].get_robot_icon()
if icon:
if icon != self.__current_robot_icon or force:
self.__current_robot_icon = icon
self.robot_icon_updated.emit(self.masteruri, icon)
return icon
except Exception:
pass
self.__current_robot_icon = self.__current_parameter_robot_icon
self.robot_icon_updated.emit(self.masteruri, utf8(self.__current_robot_icon))
return self.__current_robot_icon
def appendConfigToModel(self, launchfile, rosconfig):
'''
Update the node view
:param str launchfile: the launch file path
:param rosconfig: the configuration
:type rosconfig: :class:`fkie_node_manager.launch_config.LaunchConfig`
'''
hosts = dict() # dict(addr : dict(node : [config]) )
addr = get_hostname(self.masteruri)
masteruri = self.masteruri
for n in rosconfig.nodes:
if n.machine_name and not n.machine_name == 'localhost':
if n.machine_name not in rosconfig.machines:
raise Exception(''.join(["ERROR: unknown machine [", n.machine_name, "]"]))
addr = rosconfig.machines[n.machine_name].address
masteruri = nm.nameres().masteruri(n.machine_name)
if masteruri is None:
masteruri = nm.nameres().masteruribyaddr(n.machine_name)
node = roslib.names.ns_join(n.namespace, n.name)
if (masteruri, addr) not in hosts:
hosts[(masteruri, addr)] = dict()
hosts[(masteruri, addr)][node] = launchfile
# add the configurations for each host separately
for ((masteruri, addr), nodes) in hosts.items():
self.node_tree_model.append_config(masteruri, addr, nodes)
self.updateButtons()
def remove_cfg_from_model(self, launchfile):
'''
Update the node view after removed configuration.
:param str launchfile: the grpc path of the launch file
'''
self.remove_config_signal.emit(launchfile)
self.node_tree_model.remove_config(launchfile)
self.updateButtons()
def on_launch_description_retrieved(self, url, launch_descriptions):
'''
Handles the new list with nodes from default configuration service.
:param str url: the URI of the node manager daemon
:param launch_descriptions: a list with configuration description.
:type launch_descriptions: list(:class:`fkie_node_manager_daemon.launch_description.LaunchDescription`)
'''
if self._first_launch:
self._first_launch = False
if self.default_load_launch:
lfile = nmdurl.join(nmdurl.nmduri(self.masteruri), self.default_load_launch)
if os.path.isdir(self.default_load_launch):
self.main_window.launch_dock.launchlist_model.set_path(lfile)
elif os.path.isfile(self.default_load_launch):
self.main_window.launch_dock.launchlist_model.set_path(os.path.dirname(self.default_load_launch))
self._progress_queue_prio.add2queue(utf8(uuid.uuid4()),
'Loading %s' % os.path.basename(lfile),
self._load_launchfile,
{'launchfile': lfile,
'args_forced': {}
})
self._start_queue(self._progress_queue)
masteruri = self.masteruri
host = get_hostname(masteruri)
host_addr = host
if host_addr is None:
host_addr = host
new_configs = []
for ld in launch_descriptions:
# TODO: check masteruri and host
if ld.masteruri != masteruri:
# rospy.logdebug("skip apply config %s from %s to %s with configs %s ", ld.path, ld.masteruri, masteruri, self.__configs)
continue
# add the new config
if ld.path not in self.__configs:
args = {}
if ld.path in self._loaded_args:
args = self._loaded_args[ld.path]
self.__configs[ld.path] = LaunchConfig(ld.path, args=args)
nm.nmd().launch.get_mtimes_threaded(ld.path)
new_configs.append(ld.path)
self.__configs[ld.path].nodes = ld.nodes
alredy_added_nodes = set()
# update capabilities
for rd in ld.robot_descriptions:
# add capabilities
caps = dict()
rd_node_cfgs = dict()
for c in rd.capabilities:
if c.namespace not in caps:
caps[c.namespace] = dict()
caps[c.namespace][utf8(c.name)] = {'type': c.type, 'images': [interpret_path(i) for i in c.images], 'description': interpret_path(utf8(c.description.replace("\\n ", "\n"))), 'nodes': list(c.nodes)}
for n in c.nodes:
rd_node_cfgs[n] = ld.path
alredy_added_nodes.add(n)
robot_addr = host_addr
valid_machine = False
if rd.machine and rd.machine != host:
robot_addr = rd.machine
self.node_tree_model.append_config(masteruri, robot_addr, rd_node_cfgs)
if valid_machine or not rd.robot_name or utf8(rd.robot_name) == self.mastername:
self.node_tree_model.add_capabilities(masteruri, robot_addr, ld.path, caps)
# set host description
tooltip = self.node_tree_model.update_host_description(masteruri, robot_addr, rd.robot_type, utf8(rd.robot_name), interpret_path(utf8(rd.robot_descr)))
self.capabilities_update_signal.emit(masteruri, robot_addr, ld.path, [rd])
self.host_description_updated.emit(masteruri, robot_addr, tooltip)
node_cfgs = dict()
for n in ld.nodes:
if n not in alredy_added_nodes:
node_cfgs[n] = ld.path
self.node_tree_model.append_config(masteruri, host_addr, node_cfgs)
# set the robot_icon
if ld.path in self.__robot_icons:
self.__robot_icons.remove(ld.path)
self.__robot_icons.insert(0, ld.path)
self.set_duplicate_nodes(self.__running_nodes)
# expand items to restore old view
if ld.path in self.__expanded_items:
self._expand_groups(self.__expanded_items[ld.path])
del self.__expanded_items[ld.path]
# # update nodelets TODO: get it from nmd
# nodelets = {}
# for n in launchConfig.Roscfg.nodes:
# if n.package == 'nodelet' and n.type == 'nodelet':
# args = n.args.split(' ')
# if len(args) == 3 and args[0] == 'load':
# nodelet_mngr = roslib.names.ns_join(n.namespace, args[2])
# if nodelet_mngr not in nodelets:
# nodelets[nodelet_mngr] = []
# nodelets[nodelet_mngr].append(roslib.names.ns_join(n.namespace, n.name))
for mngr, nlist in ld.nodelets.items():
mngr_nodes = self.node_tree_model.get_tree_node(mngr, self.masteruri)
for mn in mngr_nodes:
mn.nodelets = nlist
for nlet in nlist:
nlet_nodes = self.node_tree_model.get_tree_node(nlet, self.masteruri)
for nn in nlet_nodes:
nn.nodelet_mngr = mngr
self._nodelets[ld.path] = ld.nodelets
with self._associations_lock:
self._associations[ld.path] = ld.associations
if ld.path in self._start_nodes_after_load_cfg:
self.start_nodes_by_name(self._start_nodes_after_load_cfg[ld.path], ld.path, True)
del self._start_nodes_after_load_cfg[ld.path]
removed_configs = set(self.__configs.keys()) - set(new_configs)
for cfg in removed_configs:
if isinstance(cfg, tuple):
rospy.logwarn("CFG: unsupported config type: %s" % str(cfg))
continue
if cfg.startswith(url):
self.remove_cfg_from_model(cfg)
del self.__configs[cfg]
else:
pass
self.updateButtons()
for cfg in new_configs:
if cfg in self._cfg_changed_nodes:
changed_nodes = self._cfg_changed_nodes[cfg]
del self._cfg_changed_nodes[cfg]
node_count = ''
if len(changed_nodes) > 1:
node_count = 's [%d]' % len(changed_nodes)
nodes_text = '<br>'
for chn in changed_nodes:
nodes_text += "%s " % HTMLDelegate.toHTML(chn)
self.message_frame.show_question(MessageFrame.TYPE_NODE_CFG, 'Configuration changed for node%s:%s<br>restart?' % (node_count, nodes_text), MessageData((changed_nodes, cfg)))
def on_nmd_version_retrieved(self, nmd_url, version, date):
if not nmdurl.equal_uri(nmdurl.masteruri(nmd_url), self.masteruri):
return
self._diag_nmd_version = version
self._check_diag_state_nmd()
def on_log_dir_retrieved(self, nmd_url, log_dir_size):
if not nmdurl.equal_uri(nmdurl.masteruri(nmd_url), self.masteruri):
return
self._diag_log_dir_size = log_dir_size
self._check_diag_state_nmd()
def _check_diag_state_nmd(self):
state_ok = True
if self._diag_nmd_version is not None:
if self._diag_nmd_version != self._nmd_version:
state_ok = False
res = self.set_diagnostic_warn('/node_manager_daemon', "node_manager_daemon has on<br>%s different version<br>'%s', own:<br>'%s'.<br>Please update and restart!" % (self.masteruri, self._diag_nmd_version, self._nmd_version))
if not res:
self.message_frame.show_question(MessageFrame.TYPE_NMD, "node_manager_daemon has on %s different version '%s', own '%s'.\nShould it be started?" % (self.masteruri, self._diag_nmd_version, self._nmd_version), MessageData(self.masteruri))
if self._diag_log_dir_size is not None:
if self._diag_log_dir_size > 1073741824:
state_ok = False
hostname = get_hostname(self.masteruri)
clean_cmd = '<a href="rosclean://%s" title="calls `rosclean purge` at `%s`">rosclean purge</a>' % (self.masteruri.replace('http://', ''), hostname)
res = self.set_diagnostic_warn('/node_manager_daemon', "disk usage in log directory @%s is %s. %s" % (get_hostname(self.masteruri), sizeof_fmt(self._diag_log_dir_size), clean_cmd))
if state_ok:
self.set_diagnostic_ok('/node_manager_daemon')
def set_diagnostic_warn(self, node_name, msg):
if DIAGNOSTICS_AVAILABLE:
diagnostic_status = DiagnosticStatus()
diagnostic_status.name = node_name
diagnostic_status.level = DiagnosticStatus.WARN
diagnostic_status.message = msg
self.append_diagnostic(diagnostic_status)
return True
return False
def set_diagnostic_ok(self, node_name):
if DIAGNOSTICS_AVAILABLE:
diagnostic_status = DiagnosticStatus()
diagnostic_status.name = node_name
diagnostic_status.level = DiagnosticStatus.OK
diagnostic_status.message = ''
self.append_diagnostic(diagnostic_status)
return True
return False
def update_system_diagnostics(self, diagnostics):
self.node_tree_model.update_system_diagnostics(self.masteruri, diagnostics)
selections = self.ui.nodeTreeView.selectionModel().selectedIndexes()
selectedNodes = self.hostsFromIndexes(selections)
if len(selectedNodes) == 1:
if selectedNodes[0].local:
self.on_node_selection_changed(None, None)
def append_diagnostic(self, diagnostic_status, isnew=True):
result = False
if (diagnostic_status.name == '/master_sync'):
if get_hostname(self.masteruri) != diagnostic_status.hardware_id:
return False
if diagnostic_status.name not in ['/node_manager_daemon']:
self._has_diagnostics = True
nodes = self.getNode(diagnostic_status.name)
for node in nodes:
node.append_diagnostic_status(diagnostic_status)
result = True
if nodes:
selections = self.ui.nodeTreeView.selectionModel().selectedIndexes()
selectedNodes = self.nodesFromIndexes(selections)
if len(selectedNodes) == 1:
node = selectedNodes[0]
if node.name == diagnostic_status.name:
self.on_node_selection_changed(None, None)
elif isnew:
# store to have messages received before node was detected
self._stored_diagnostic_messages[time.time()] = diagnostic_status
return result
def sysmon_active_update(self):
if self._sysmon_timer is None:
self._sysmon_timer = QTimer()
self._sysmon_timer.timeout.connect(self._sysmon_update_callback)
self._sysmon_timer.start(1000)
self.node_tree_model.sysmon_set_state(self.masteruri, True)
else:
self._sysmon_timer.stop()
self._sysmon_timer = None
self.node_tree_model.sysmon_set_state(self.masteruri, False)
# update host description
selections = self.ui.nodeTreeView.selectionModel().selectedIndexes()
selectedNodes = self.hostsFromIndexes(selections)
if len(selectedNodes) == 1:
if selectedNodes[0].local:
self.on_node_selection_changed(None, None)
def _sysmon_update_callback(self):
if self._has_nmd and self.__online:
nm.nmd().monitor.get_system_diagnostics_threaded(nmdurl.nmduri(self.masteruri))
if not nm.is_local(self.mastername):
nm.nmd().monitor.get_diagnostics_threaded(nmdurl.nmduri(self.masteruri))
@property
def launch_servers(self):
return self.__launch_servers
def has_launch_server(self):
'''
Returns `True` if the there are roslaunch server, which have no `master` as
node or or have other nodes as `rosout-#` inside.
'''
for _, (_, nodes) in self.__launch_servers.items(): # _:= uri, pid
if not self._is_master_launch_server(nodes):
return True
return False
def _is_master_launch_server(self, nodes):
if 'master' in nodes and len(nodes) < 3:
return True
return False
def on_launch_server_retrieved(self, serveruri, pid, nodes):
'''
Handles the info about roslaunch server.
Emits a Qt signal L{host_description_updated} to notify about a new host
description and a Qt signal L{capabilities_update_signal} to notify about a capabilities
update.
:param str serveruri: the URI of the roslaunch server
:param str pid: the process id of the roslaunch server
:param list(str) nodes: list with nodes handled by the roslaunch server
'''
self.__launch_servers[serveruri] = (pid, nodes)
def on_launch_server_err(self, serveruri, msg):
'''
Handles the error messages from launch server hanlder.
:param str serveruri: the URI of the launch server
:param str msg: the error message
'''
try:
del self.__launch_servers[serveruri]
except Exception:
pass
def on_remove_all_launch_server(self):
'''
Kill all running launch server. The coresponding URIS are removed by master_monitor.
'''
for lsuri, (pid, nodes) in self.__launch_servers.items():
try:
if not self._is_master_launch_server(nodes):
self._progress_queue.add2queue(utf8(uuid.uuid4()),
'kill roslaunch %s (%s)' % (lsuri, utf8(pid)),
nm.starter().kill,
{'host': get_hostname(lsuri),
'pid': pid,
'auto_pw_request': False,
'user': self.current_user
})
self.launch_server_handler.updateLaunchServerInfo(lsuri, delayed_exec=3.0)
except Exception as e:
rospy.logwarn("Error while kill roslaunch %s: %s", utf8(lsuri), utf8(e))
raise DetailedError("Kill error",
''.join(['Error while kill roslaunch ', lsuri]),
utf8(e))
self._start_queue(self._progress_queue)
# %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
# %%%%%%%%%%%%% Handling of the view activities %%%%%%%%
# %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
def on_node_activated(self, index):
'''
Depending of the state of the node, it will be run or the screen output will
be open.
:param index: The index of the activated node
:type index: :class:`QtCore.QModelIndex` <https://srinikom.github.io/pyside-docs/PySide/QtCore/QModelIndex.html>
'''
self.__last_node_activation = time.time()
selectedNodes = []
if index.column() == 0:
selectedNodes = self.nodesFromIndexes(self.ui.nodeTreeView.selectionModel().selectedIndexes(), False)
if not selectedNodes:
return
has_running = False
has_stopped = False
has_invalid = False
for node in selectedNodes:
if node.uri is not None:
has_running = True
if node.pid is None:
has_invalid = True
else:
has_stopped = True
if has_stopped:
self.on_start_clicked()
elif has_running:
self.on_io_clicked(activated=True)
def on_node_clicked(self, index):
if time.time() - self.__last_node_activation > 1.:
self.message_frame.hide_question([MessageFrame.TYPE_NODELET])
self.info_frame.hide_question([MessageFrame.TYPE_NOSCREEN])
if time.time() - self.__last_selection > 1.:
self.on_node_selection_changed(None, None, True)
def on_topic_activated(self, index):
'''
:param index: The index of the activated topic
:type index: :class:`QtCore.QModelIndex` <https://srinikom.github.io/pyside-docs/PySide/QtCore/QModelIndex.html>
'''
model_index = self.topic_proxyModel.mapToSource(index)
item = self.topic_model.itemFromIndex(model_index)
if isinstance(item, TopicItem):
self.on_topic_echo_clicked([item.topic])
def on_topic_clicked(self, index):
if time.time() - self.__last_selection > 1.:
self.on_topic_selection_changed(None, None, True)
def on_service_activated(self, index):
'''
:param index: The index of the activated service
:type index: :class:`QtCore.QModelIndex` <https://srinikom.github.io/pyside-docs/PySide/QtCore/QModelIndex.html>
'''
model_index = self.service_proxyModel.mapToSource(index)
item = self.service_model.itemFromIndex(model_index)
if isinstance(item, ServiceItem):
self.on_service_call_clicked([item.service])
def on_service_clicked(self, index):
if time.time() - self.__last_selection > 1.:
self.on_service_selection_changed(None, None, True)
def on_host_inserted(self, item):
if item == (self.masteruri, get_hostname(self.masteruri)):
index = self.node_tree_model.indexFromItem(item)
model_index = self.node_proxy_model.mapFromSource(index)
if model_index.isValid():
self.ui.nodeTreeView.expand(model_index)
# self.ui.nodeTreeView.expandAll()
def on_node_collapsed(self, index):
if not index.parent().isValid():
self.ui.nodeTreeView.selectionModel().clear()
def on_node_expanded(self, index):
pass
def _create_html_list(self, title, items, list_type=None, name=''):
'''
:param list_type: LAUNCH, TOPIC, NODE, SERVICE, LOG
:type list_type: str
'''
result = ''
if items:
result = '<b><u>%s</u></b>' % (title)
if len(items) > 1:
result = '%s <span style="color:gray;">[%d]</span>' % (result, len(items))
result = '%s<table style="display: inline-table">' % result
items.sort()
for i in items:
item = i
# reduce the displayed name
item_name = i
if list_type in ['LOG']:
item = i.name
item_name = i.name
if name:
if item_name.startswith(name):
item_name = item_name.replace('%s%s' % (name, roslib.names.SEP), '~', 1)
ns = roslib.names.namespace(name)
if item_name.startswith(ns) and ns != roslib.names.SEP:
item_name = item_name.replace(ns, '', 1)
if list_type in ['NODE']:
item = '<tr>'
item += '<td><a href="node://%s%s">%s</a></td>' % (self.mastername, i, item_name)
item += '</tr>'
elif list_type in ['TOPIC_PUB', 'TOPIC_SUB']:
# determine the count of publisher or subscriber
count = None
try:
tpc = self.__master_info.getTopic(i)
if list_type == 'TOPIC_SUB':
count = len(tpc.publisherNodes)
if name not in tpc.subscriberNodes:
count = None
else:
count = len(tpc.subscriberNodes)
if name not in tpc.publisherNodes:
count = None
except Exception:
pass
# add the count
if count is not None:
item = '<tr>'
item += '<td><span style="color:gray;">%d</span></td>' % (count)
item += '<td><a href="topicecho://%s%s"><span style="color:gray;"><i>echo</i></span></a></td>' % (self.mastername, i)
item += '<td><a href="topic://%s">%s</a></td>' % (i, item_name)
#sekkyumu_topic_echo_24 = nm.settings().icon_path('sekkyumu_topic_echo_24.png')
#item += '<td><a href="topicecho://%s%s" title="Show the content of the topic"><img src="%s" alt="echo"></a></td>' % (self.mastername, i, sekkyumu_topic_echo_24)
item += '</tr>'
else:
item = '<tr>'
item += '<td colspan="3" style="float:left"><span style="color:red;">!sync </span><a>%s</a></td>' % (item_name)
item += '</tr>'
elif list_type == 'SERVICE':
try:
srv = self.__master_info.getService(i)
if srv is not None and name in srv.serviceProvider:
item = '<tr>'
item += '<td><a href="servicecall://%s%s"><span style="color:gray;"><i>call</i></span></a></td>' % (self.mastername, i)
item += '<td><a href="service://%s%s">%s</a></td>' % (self.mastername, i, item_name)
item += '</tr>'
else:
item = '<tr>'
item += '<td colspan="2" style="float:left"><span style="color:red;">!sync </span>%s</td>' % (item_name)
item += '</tr>'
except Exception:
item = '<tr>'
item += '<td colspan="2" style="float:left"><span style="color:red;">?sync </span>%s</td>' % (item_name)
item += '</tr>'
elif list_type == 'LAUNCH':
if i in self.__configs and self.__configs[i].global_param_done:
item = '<tr>'
item_ref = '<a href="%s">%s</a>' % (i.replace('grpc://', 'open-edit://'), os.path.basename(item_name))
item += '<td>%s</td>' % (item_ref)
pkg, _path = nm.nmd().file.package_name(i)
item += '<td><i>%s</i></td>' % (os.path.dirname(item_name) if pkg is None else pkg)
item += '</tr>'
elif list_type == 'LOG':
node_host = self.getHostFromNode(i)
if nm.is_local(node_host, wait=False):
roslogfile = screen.get_ros_logfile(node=i.name)
logfile = screen.get_logfile(node=i.name)
else:
roslogfile = '%s@%s' % (i.name, node_host)
logfile = '%s@%s' % (i.name, node_host)
item = '<tr>'
item += '<td colspan="2" style="float:left"><span style="color:grey;">roslog: </span></td>'
item_ref = '<a href="%s">%s</a>' % ('show-roslog://%s@%s' % (i.name, node_host), roslogfile)
item += '<td>%s</td>' % (item_ref)
item += '</tr>'
item += '<tr>'
item += '<td colspan="2" style="float:left"><span style="color:grey;">screen: </span></td>'
item_ref = '<a href="%s">%s</a>' % ('show-log://%s@%s' % (i.name, node_host), logfile)
item += '<td>%s</td>' % (item_ref)
item += '</tr>'
result += item
result += '</table>\n<br>'
return result
def on_tab_current_changed(self, index):
tab_name = self.ui.tabWidget.currentWidget().objectName()
if tab_name == 'tabTopics':
# select the topics of the selected node in the "Topic" view
selections = self.ui.nodeTreeView.selectionModel().selectedIndexes()
selectedNodes = self.nodesFromIndexes(selections)
if len(selectedNodes) == 1:
node = selectedNodes[0]
selected_topics = self.topic_model.index_from_names(node.published, node.subscribed)
for s in selected_topics:
self.ui.topicsView.selectionModel().select(self.topic_proxyModel.mapFromSource(s), QItemSelectionModel.Select)
elif tab_name == 'tabServices':
# select the services of the selected node in the "Services" view
selections = self.ui.nodeTreeView.selectionModel().selectedIndexes()
selectedNodes = self.nodesFromIndexes(selections)
if len(selectedNodes) == 1:
node = selectedNodes[0]
selected_services = self.service_model.index_from_names(node.services)
for s in selected_services:
self.ui.servicesView.selectionModel().select(self.service_proxyModel.mapFromSource(s), QItemSelectionModel.Select)
def _is_current_tab_name(self, tab_name):
return (self.ui.tabWidget.currentWidget().objectName() == tab_name)
def on_node_selection_changed(self, selected, deselected, force_emit=False, node_name=''):
'''
updates the Buttons, create a description and emit L{description_signal} to
show the description of host, group or node.
'''
if selected is not None:
# it is a workaround to avoid double updates a after click on an item
self.__last_selection = time.time()
selectedGroups = []
if node_name and self.master_info is not None:
# get node by name
selectedNodes = self.getNode(node_name)
if not selectedNodes or selectedNodes[0] is None:
if node_name:
self.description_signal.emit(node_name, "<b>%s</b> not found" % node_name, True if selected or deselected or force_emit else False)
return
selectedHosts = []
selections = []
else:
# get node by selected items
if not self._is_current_tab_name('tabNodes'):
return
selections = self.ui.nodeTreeView.selectionModel().selectedIndexes()
selectedHosts = self.hostsFromIndexes(selections)
selectedNodes = self.nodesFromIndexes(selections)
selectedGroups = self.groupsFromIndexes(selections)
self.ui.topicsView.selectionModel().clear()
self.ui.servicesView.selectionModel().clear()
name = ''
text = ''
# add control buttons for more then one selected node
if len(selectedNodes) > 1 or len(selectedGroups) > 0:
restartable_nodes = [sn for sn in selectedNodes if len(sn.cfgs) > 0 and not self._is_in_ignore_list(sn.name)]
restartable_nodes_with_launchfiles = [sn for sn in selectedNodes if sn.has_launch_cfgs(sn.cfgs) > 0 and not self._is_in_ignore_list(sn.name)]
killable_nodes = [sn for sn in selectedNodes if sn.node_info.pid is not None and not self._is_in_ignore_list(sn.name)]
unregisterble_nodes = [sn for sn in selectedNodes if sn.node_info.pid is None and sn.node_info.uri is not None and sn.node_info.isLocal and not self._is_in_ignore_list(sn.name)]
# add description for multiple selected nodes
if restartable_nodes or killable_nodes or unregisterble_nodes:
text += '<b>Selected nodes:</b><br>'
restart_icon_path = nm.settings().icon_path('sekkyumu_restart_24.png')
restart_g_icon_path = nm.settings().icon_path('sekkyumu_restart_g_24.png')
sekkyumu_kill_screen_24 = nm.settings().icon_path('sekkyumu_kill_screen_24.png')
play_alt_icon_path = nm.settings().icon_path('sekkyumu_play_alt_24.png')
if restartable_nodes:
text += '<a href="restart-node://all_selected_nodes" title="Restart %s selected nodes Ctrl+Shift+R"><img src="%s" alt="restart">[%d]</a>' % (len(restartable_nodes), restart_icon_path, len(restartable_nodes))
text += ' <a href="restart-node-g://all_selected_nodes" title="Reload global parameter and restart %s selected nodes Ctrl+Shift+Alt+R"><img src="%s" alt="restart">[%d]</a>' % (len(restartable_nodes), restart_g_icon_path, len(restartable_nodes))
if killable_nodes:
text += ' <a href="kill-screen://all_selected_nodes" title="Kill %s screens of selected nodes"><img src="%s" alt="killscreen">[%d]</a>' % (len(killable_nodes), sekkyumu_kill_screen_24, len(killable_nodes))
if restartable_nodes_with_launchfiles:
text += ' <a href="start-node-adv://all_selected_nodes" title="Start %s nodes with additional options, e.g. loglevel"><img src="%s" alt="play alt">[%d]</a>' % (len(restartable_nodes_with_launchfiles), play_alt_icon_path, len(restartable_nodes_with_launchfiles))
if unregisterble_nodes:
text += '<br><a href="unregister-node://all_selected_nodes">unregister [%d]</a>' % len(unregisterble_nodes)
# add host description, if only the one host is selected
if len(selectedHosts) == 1: # and len(selections) / 2 == 1:
host = selectedHosts[0]
name = '%s - Robot' % host.name
text += host.generate_description()
text += '<br>'
else:
# add group description, if only the one group is selected
if len(selectedGroups) == 1 and len(selections) / 2 == 1:
group = selectedGroups[0]
name = '%s - Group' % group.name
text += group.generate_description()
text += '<br>'
# add node description for one selected node
if len(selectedHosts) != 1 and len(selectedNodes) == 1 and len(selectedGroups) == 0:
node = selectedNodes[0]
text = '<div>%s</div>' % self.get_node_description(node_name, node)
# name = node.name
name = 'Node - Info'
if (self._is_current_tab_name('tabNodes') and self.__last_info_text != text) or force_emit:
self.__last_info_text = text
self.description_signal.emit(name, text, True if selected or deselected or force_emit else False)
self.updateButtons(selectedNodes)
def get_node_description(self, node_name, node=None):
text = ''
if node_name and node is None and self.master_info is not None:
# get node by name
selectedNodes = self.getNode(node_name)
if len(selectedNodes) == 1:
node = selectedNodes[0]
# add node description for one selected node
if node is not None:
# create description for a node
ns, sep, name = node.name.rpartition(rospy.names.SEP)
launches = [c for c in node.cfgs if not isinstance(c, tuple)]
crystal_clear_settings_24 = nm.settings().icon_path('crystal_clear_settings_24.png')
if name == 'node_manager_daemon':
text += '<a href="nmd-cfg://%s" title="Configure Daemon"><img src="%s" alt="configure"></a>' % (utf8(self.masteruri).replace('http://', ''), crystal_clear_settings_24)
elif name == 'node_manager' and nm.is_local(self.mastername):
text += '<a href="nm-cfg://%s" title="Configure Node Manager"><img src="%s" alt="configure"></a>' % (utf8(self.masteruri).replace('http://', ''), crystal_clear_settings_24)
if launches:
sekkyumu_restart_24 = nm.settings().icon_path('sekkyumu_restart_24.png')
sekkyumu_restart_g_24 = nm.settings().icon_path('sekkyumu_restart_g_24.png')
text += '<a href="restart-node://%s" title="Restart node Ctrl+Shift+R"><img src="%s" alt="restart"></a>' % (node.name, sekkyumu_restart_24)
text += ' <a href="restart-node-g://%s" title="Reload global parameter and restart node Ctrl+Shift+Alt+R"><img src="%s" alt="restart"></a>' % (node.name, sekkyumu_restart_g_24)
sekkyumu_kill_screen_24 = nm.settings().icon_path('sekkyumu_kill_screen_24.png')
text += ' <a href="kill-screen://%s" title="Kill screen of the node"><img src="%s" alt="killscreen"></a>' % (node.name, sekkyumu_kill_screen_24)
if launches:
sekkyumu_play_alt_24 = nm.settings().icon_path('sekkyumu_play_alt_24.png')
text += ' <a href="start-node-adv://%s" title="Start node with additional options, e.g. loglevel"><img src="%s" alt="play alt"></a>' % (node.name, sekkyumu_play_alt_24)
crystal_clear_copy_log_path_24 = nm.settings().icon_path('crystal_clear_copy_log_path_24.png')
text += ' <a href="copy-log-path://%s" title="copy log path to clipboard"><img src="%s" alt="copy_log_path"></a>' % (node.name, crystal_clear_copy_log_path_24)
text += '<br><font size="+2"><b>%s</b></font>' % (name)
text += ' <font size="-1"><a href="copy://%s" style="color:gray;">copy</a></font>' % (node.name)
text += '<br><span style="color:gray;">ns: </span><b>%s%s</b>' % (ns, sep)
text += '<dl>'
text += '<dt><b>URI</b>: %s</dt>' % node.node_info.uri
text += '<dt><b>PID</b>: %s</dt>' % node.node_info.pid
text += '<dt><b>ORG.MASTERURI</b>: %s</dt>' % node.node_info.masteruri
if not is_legal_name(node.name):
text += '<dt><font color="#FF6600"><b>This node has an illegal <node> name.<br><a href="http://ros.org/wiki/Names">http://ros.org/wiki/Names</a><br>This will likely cause problems with other ROS tools.</b></font></dt>'
if node.is_ghost:
if node.name.endswith('master_sync') or node.name.endswith('node_manager'):
text += '<dt><font color="#FF9900"><b>This node is not synchronized by default. To get info about this node select the related host.</b></font></dt>'
else:
text += '<dt><font color="#FF9900"><b>The node is running on remote host, but is not synchronized, because of filter or errors while sync, see log of <i>master_sync</i></b></font></dt>'
text += '<dt><font color="#FF9900"><i>Are you use the same ROS packages?</i></font></dt>'
if node.has_running and node.node_info.pid is None and node.node_info.uri is None:
text += '<dt><font color="#FF9900"><b>There are nodes with the same name on remote hosts running. These will be terminated, if you run this node! (Only if master_sync is running or will be started somewhere!)</b></font></dt>'
if node.node_info.uri is not None and node.node_info.masteruri != self.masteruri:
text += '<dt><font color="#339900"><b>synchronized</b></font></dt>'
if node.node_info.pid is None and node.node_info.uri is not None:
if not node.node_info.isLocal:
text += '<dt><font color="#FF9900"><b>remote nodes will not be ping, so they are always marked running.<br>Do all nodes have the same ROS_MASTER_URI or node uri?</b></font>'
else:
text += '<dt><font color="#CC0000"><b>the node does not respond: </b></font>'
text += ' <a href="unregister-node://%s">unregister</a></dt>' % node.name
added_diags = []
if node.diagnostic_array:
diag_status = node.diagnostic_array[-1]
if node.diagnostic_array:
level_str = 'Unknown'
if diag_status.level in self.DIAGNOSTIC_LEVELS:
level_str = self.DIAGNOSTIC_LEVELS[diag_status.level]
diag_color = '#008000' # green
if diag_status.level == 1:
diag_color = '#FF6600'
elif diag_status.level == 2:
diag_color = '#CC0000'
elif diag_status.level == 3:
diag_color = '#FFCC00'
elif diag_status.level > 3:
diag_color = '#0000CC'
if diag_status.message:
diag_msg = '<dt><font color="%s"><b>%s: %s</b></font></dt>' % (diag_color, level_str, diag_status.message)
else:
diag_msg = '<dt><font color="%s"><b>%s</b></font></dt>' % (diag_color, level_str)
if diag_msg not in added_diags:
text += diag_msg
added_diags.append(diag_msg)
# if len(node.diagnostic_array) > 1:
text += '<dt><a href="show-all-diagnostics://%s">show all diagnostic msgs (%s)</a></dt>' % (node.name, len(node.diagnostic_array))
# if len(node.diagnostic_array) > 1:
# text += '<dt><font color="#FF6600"><a href="view_diagnostics://%s">view recent %d items</a></font></dt>'%(node.name, len(node.diagnostic_array))
if node.nodelet_mngr:
text += '<dt><b>Nodelet manager</b>: %s</dt>' % self._create_html_list('', [node.nodelet_mngr], 'NODE')
if node.nodelets:
text += '<dt>Manager for <b>%d</b> nodelets</dt>' % len(node.nodelets)
text += '</dl>'
if nm.settings().transpose_pub_sub_descr:
text += self._create_html_list('<br>Subscribed Topics:', node.subscribed, 'TOPIC_SUB', node.name)
text += self._create_html_list('<br>Published Topics:', node.published, 'TOPIC_PUB', node.name)
else:
text += self._create_html_list('<br>Published Topics:', node.published, 'TOPIC_PUB', node.name)
text += self._create_html_list('<br>Subscribed Topics:', node.subscribed, 'TOPIC_SUB', node.name)
text += self._create_html_list('<br>Services:', node.services, 'SERVICE', node.name)
# set loaunch file paths
text += self._create_html_list('<br>Launch Files:', launches, 'LAUNCH')
# text += self._create_html_list('Default Configurations:', default_cfgs, 'NODE')
text += self._create_html_list('<br>Logs:', [node], 'LOG')
# text += '<dt><a href="copy-log-path://%s">copy log path to clipboard</a></dt>'%node.name
return text
def show_diagnostic_messages(self, node):
found_nodes = []
if self.master_info is not None:
found_nodes = self.node_tree_model.get_node_items_by_name([node])
if found_nodes:
node_item = found_nodes[0]
text = ''
for diag in reversed(node_item.diagnostic_array):
msg = EchoDialog.strify_message(diag)
if isinstance(msg, tuple):
msg = msg[0]
msg = msg.replace('<', '<').replace('>', '>')
msg = '<pre style="background-color:#FFFCCC; color:#000000;font-family:Fixedsys,Courier; padding:10px;">---------- %s --------------------\n%s</pre>' % (diag.values[-1].value, msg)
text += msg
if text:
self.description_signal.emit('%s diagnostic' % node, text, True)
def on_topic_selection_changed(self, selected, deselected, force_emit=False, topic_name=''):
'''
updates the Buttons, create a description and emit L{description_signal} to
show the description of selected topic
'''
if selected is not None:
# it is a workaround to avoid double updates a after click on an item
self.__last_selection = time.time()
selectedTopics = []
if topic_name and self.master_info is not None:
selectedTopics = [self.master_info.getTopic('%s' % topic_name)]
if len(selectedTopics) == 0:
return
else:
if not self._is_current_tab_name('tabTopics'):
return
selectedTopics = self.topicsFromIndexes(self.ui.topicsView.selectionModel().selectedIndexes())
topics_selected = (len(selectedTopics) > 0)
self.ui.echoTopicButton.setEnabled(topics_selected)
self.ui.hzTopicButton.setEnabled(topics_selected)
self.ui.hzSshTopicButton.setEnabled(topics_selected)
self.ui.pubStopTopicButton.setEnabled(topics_selected)
if len(selectedTopics) == 1:
try:
topic = selectedTopics[0]
text = self.get_topic_description(topic_name, topic)
info_text = '<div>%s</div>' % text
if (self._is_current_tab_name('tabTopics') and self.__last_info_text != info_text) or force_emit:
self.__last_info_text = info_text
self.description_signal.emit('Topic - Info', info_text, True if selected or deselected or force_emit else False)
except Exception as _:
pass
def get_topic_description(self, topic_name, topic=None):
text = ''
if topic is None:
selectedTopics = []
if topic_name and self.master_info is not None:
selectedTopics = [self.master_info.getTopic("%s" % topic_name)]
else:
selectedTopics = self.topicsFromIndexes(self.ui.topicsView.selectionModel().selectedIndexes())
if len(selectedTopics) == 1:
topic = selectedTopics[0]
if topic is not None:
sekkyumu_topic_echo_24 = nm.settings().icon_path('sekkyumu_topic_echo_24.png')
sekkyumu_topic_hz_24 = nm.settings().icon_path('sekkyumu_topic_hz_24.png')
sekkyumu_topic_echo_ssh_hz_24 = nm.settings().icon_path('sekkyumu_topic_echo_ssh_hz_24.png')
sekkyumu_topic_pub_24 = nm.settings().icon_path('sekkyumu_topic_pub_24.png')
sekkyumu_topic_repub_24 = nm.settings().icon_path('sekkyumu_topic_repub_24.png')
ns, sep, name = topic.name.rpartition(rospy.names.SEP)
# text = '<font size="+1"><b><span style="color:gray;">%s%s</span><b>%s</b></font><br>' % (ns, sep, name)
text += ' <a href="topicecho://%s%s" title="Show the content of the topic"><img src="%s" alt="echo"></a>' % (self.mastername, topic.name, sekkyumu_topic_echo_24)
text += ' <a href="topichz://%s%s" title="Show only the receive rate of the topic.<br>All data is sent through the network"><img src="%s" alt="hz"></a>' % (self.mastername, topic.name, sekkyumu_topic_hz_24)
text += ' <a href="topichzssh://%s%s" title="Show only the receive rate of the topic.<br>Uses an SSH connection to execute `rostopic hz` on remote host."><img src="%s" alt="sshhz"></a>' % (self.mastername, topic.name, sekkyumu_topic_echo_ssh_hz_24)
text += ' <a href="topicpub://%s%s" title="Start a publisher for selected topic"><img src="%s" alt="pub"></a>' % (self.mastername, topic.name, sekkyumu_topic_pub_24)
if topic.name in self.__republish_params:
text += ' <a href="topicrepub://%s%s" title="Start a publisher with last parameters"><img src="%s" alt="repub"></a>' % (self.mastername, topic.name, sekkyumu_topic_repub_24)
topic_publisher = []
topic_prefix = '/rostopic_pub%s_' % topic.name
node_names = self.master_info.node_names
for n in node_names:
if n.startswith(topic_prefix):
topic_publisher.append(n)
if topic_publisher:
sekkyumu_topic_pub_stop_24 = nm.settings().icon_path('sekkyumu_topic_pub_stop_24.png')
text += ' <a href="topicstop://%s%s"><img src="%s" alt="stop"> [%d]</a>' % (self.mastername, topic.name, sekkyumu_topic_pub_stop_24, len(topic_publisher))
text += '<br><font size="+2"><b>%s</b></font>' % (name)
text += ' <font size="-1"><a href="copy://%s" style="color:gray;">copy</a></font>' % (topic.name)
text += '<br><span style="color:gray;">ns: </span><b>%s%s</b>' % (ns, sep)
text += '<br>'
if nm.settings().transpose_pub_sub_descr:
text += self._create_html_list('<br>Subscriber:', topic.subscriberNodes, 'NODE')
text += self._create_html_list('<br>Publisher:', topic.publisherNodes, 'NODE')
else:
text += self._create_html_list('<br>Publisher:', topic.publisherNodes, 'NODE')
text += self._create_html_list('<br>Subscriber:', topic.subscriberNodes, 'NODE')
text += '<br>'
text += '<b><u>Type:</u></b> %s' % self._href_from_msgtype(topic.type)
text += '<dl>'
try:
mclass = roslib.message.get_message_class(topic.type)
if mclass is not None:
for f in mclass.__slots__:
idx = mclass.__slots__.index(f)
idtype = mclass._slot_types[idx]
# base_type = roslib.msgs.base_msg_type(idtype)
# primitive = "unknown"
# if base_type in roslib.msgs.PRIMITIVE_TYPES:
# primitive = "primitive"
# else:
# try:
# primitive =roslib.message.get_message_class(base_type)
# # primitive = "class", list_msg_class.__slots__
# except ValueError:
# pass
text += '%s: <span style="color:gray;">%s</span><br>' % (f, idtype)
text += '<br>'
constants = {}
for m in dir(mclass):
if not m.startswith('_'):
if type(getattr(mclass, m)) in [str, int, bool, float]:
constants[m] = getattr(mclass, m)
if constants:
text += '<b><u>Constants:</u></b><br>'
for n in sorted(constants.keys()):
text += '%s: <span style="color:gray;">%s</span><br>' % (n, constants[n])
except ValueError:
pass
text += '</dl>'
return text
def _href_from_msgtype(self, msg_type):
result = msg_type
if msg_type:
result = '<a href="http://ros.org/doc/api/%s.html">%s</a>' % (msg_type.replace('/', '/html/msg/'), msg_type)
return result
def on_service_selection_changed(self, selected, deselected, force_emit=False, service_name=''):
'''
updates the Buttons, create a description and emit L{description_signal} to
show the description of selected service
'''
if selected is not None:
# it is a workaround to avoid double updates a after click on an item
self.__last_selection = time.time()
if service_name and self.master_info is not None:
# get service by name
selectedServices = [self.master_info.getService("%s" % service_name)]
if selectedServices[0] is None:
return
else:
# get service by selected items
selectedServices = self.servicesFromIndexes(self.ui.servicesView.selectionModel().selectedIndexes())
self.ui.callServiceButton.setEnabled(len(selectedServices) > 0)
if not self._is_current_tab_name('tabServices'):
return
if len(selectedServices) == 1:
text = ''
service = selectedServices[0]
ns, sep, name = service.name.rpartition(rospy.names.SEP)
# text = '<font size="+1"><b><span style="color:gray;">%s%s</span><b>%s</b></font><br>' % (ns, sep, name)
sekkyumu_call_service_24 = nm.settings().icon_path('sekkyumu_call_service_24.png')
text += '<a href="servicecall://%s%s" title="call service"><img src="%s" alt="call"></a>' % (self.mastername, service.name, sekkyumu_call_service_24)
text += '<br><font size="+2"><b>%s</b></font>' % (name)
text += ' <font size="-1"><a href="copy://%s" style="color:gray;">copy</a></font>' % (service.name)
text += '<br><span style="color:gray;">ns: </span><b>%s%s</b>' % (ns, sep)
text += '<dl>'
text += '<dt><b>URI</b>: %s</dt>' % service.uri
text += '<dt><b>ORG.MASTERURI</b>: %s</dt>' % service.masteruri
text += self._create_html_list('<br>Provider:', service.serviceProvider, 'NODE')
if service.masteruri != self.masteruri:
text += '<dt><font color="#339900"><b>synchronized</b></font></dt>'
text += '</dl>'
try:
service_class = service.get_service_class(nm.is_local(get_hostname(service.uri)))
text += '<h4>%s</h4>' % self._href_from_svrtype(service_class._type)
text += '<b><u>Request:</u></b>'
text += '<dl><dt>%s</dt></dl>' % service_class._request_class.__slots__
text += '<b><u>Response:</u></b>'
text += '<dl><dt>%s</dt></dl>' % service_class._response_class.__slots__
except Exception:
text += '<h4><font color=red>Unknown service type</font></h4>'
if service.isLocal:
text += '<font color=red>Unable to communicate with service, is provider node running?</font>'
else:
text += '<font color=red>Try to refresh <b>all</b> hosts. Is provider node running?</font>'
info_text = '<div>%s</div>' % text
self._is_current_tab_name('tabServices')
if (self._is_current_tab_name('tabServices') and self.__last_info_text != info_text) or force_emit:
self.__last_info_text = info_text
self.description_signal.emit('Service - Info', info_text, True if selected or deselected or force_emit else False)
def _href_from_svrtype(self, srv_type):
result = srv_type
if srv_type:
result = '<a href="http://ros.org/doc/api/%s.html">%s</a>' % (srv_type.replace('/', '/html/srv/'), srv_type)
return result
def on_parameter_selection_changed(self, selected, deselected):
selectedParameter = self.parameterFromIndexes(self.ui.parameterView.selectionModel().selectedIndexes())
self.ui.deleteParameterButton.setEnabled(len(selectedParameter) > 0)
self.ui.saveParameterButton.setEnabled(len(selectedParameter) > 0)
self.ui.transferParameterButton.setEnabled(len(selectedParameter) > 0)
def hostsFromIndexes(self, indexes, recursive=True):
result = []
for index in indexes:
if index.column() == 0:
model_index = self.node_proxy_model.mapToSource(index)
item = self.node_tree_model.itemFromIndex(model_index)
if item is not None:
if isinstance(item, HostItem):
result.append(item)
return result
def groupsFromIndexes(self, indexes, recursive=True):
result = []
for index in indexes:
if index.column() == 0 and index.parent().isValid():
model_index = self.node_proxy_model.mapToSource(index)
item = self.node_tree_model.itemFromIndex(model_index)
if item is not None:
if isinstance(item, GroupItem):
result.append(item)
return result
def nodesFromIndexes(self, indexes, recursive=True):
result = []
regex = QRegExp(self.ui.nodeFilterInput.text())
for index in indexes:
if index.column() == 0:
model_index = self.node_proxy_model.mapToSource(index)
item = self.node_tree_model.itemFromIndex(model_index)
res = self._nodesFromItems(item, recursive)
for r in res:
if r not in result and (not regex.pattern() or regex.indexIn(r.name) != -1):
result.append(r)
return result
def _nodesFromItems(self, item, recursive):
result = []
if item is not None:
if isinstance(item, (GroupItem, HostItem)):
if recursive:
for j in range(item.rowCount()):
result[len(result):] = self._nodesFromItems(item.child(j), recursive)
elif isinstance(item, NodeItem):
if item not in result:
result.append(item)
return result
def topicsFromIndexes(self, indexes, recursive=True):
result = []
for index in indexes:
model_index = self.topic_proxyModel.mapToSource(index)
item = self.topic_model.itemFromIndex(model_index)
if item is not None:
if isinstance(item, TopicItem):
result.append(item.topic)
elif recursive and isinstance(item, TopicGroupItem):
for titem in item.get_topic_items():
result.append(titem.topic)
return result
def servicesFromIndexes(self, indexes):
result = []
for index in indexes:
model_index = self.service_proxyModel.mapToSource(index)
item = self.service_model.itemFromIndex(model_index)
if item is not None:
if isinstance(item, ServiceItem):
result.append(item.service)
elif isinstance(item, ServiceGroupItem):
for sitem in item.get_service_items():
result.append(sitem.service)
return result
def parameterFromIndexes(self, indexes):
result = []
for index in indexes:
model_index = self.parameter_proxyModel.mapToSource(index)
item = self.parameter_model.itemFromIndex(model_index)
if item is not None and isinstance(item, ParameterValueItem):
result.append((item.name, item.value))
return result
# %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
# %%%%%%%%%%%%% Handling of the button activities %%%%%%%%
# %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
def on_start_clicked(self):
'''
Starts the selected nodes. If for a node more then one configuration is
available, the selection dialog will be show.
'''
cursor = self.cursor()
self.ui.startButton.setEnabled(False)
self.setCursor(Qt.WaitCursor)
try:
selectedNodes = self.nodesFromIndexes(self.ui.nodeTreeView.selectionModel().selectedIndexes())
self.start_nodes(selectedNodes)
finally:
self.setCursor(cursor)
self.ui.startButton.setEnabled(True)
def on_start_alt_clicked(self):
'''
Starts the selected nodes with additional options.
'''
cursor = self.cursor()
self.setCursor(Qt.WaitCursor)
try:
selectedNodes = self.nodesFromIndexes(self.ui.nodeTreeView.selectionModel().selectedIndexes())
self.start_nodes(selectedNodes, force=True, use_adv_cfg=True)
finally:
self.setCursor(cursor)
def start_node(self, node, force, config, force_host=None, logging=None, cmd_prefix='', path=''):
if node is None:
raise DetailedError("Start error", 'None is not valid node name!')
if node.pid is None or force:
# start the node using launch configuration
if config is None:
raise DetailedError("Start error",
'Error while start %s:\nNo configuration found!' % node.name)
try:
reload_global_param = False
if not self.__configs[config].global_param_done:
reload_global_param = True
self.__configs[config].global_param_done = True
loglevel = ''
logformat = ''
if logging is not None:
if not logging.is_default('console_format'):
logformat = logging.console_format
if not logging.is_default('loglevel'):
loglevel = logging.loglevel
if self._has_nmd:
_result = nm.nmd().launch.start_node(node.name, config, self.masteruri, reload_global_param=reload_global_param,
loglevel=loglevel, logformat=logformat, path=path, cmd_prefix=cmd_prefix)
else:
rospy.logwarn("no running daemon found, start '%s' via SSH" % node.name)
nm.starter().bc_run_node(node.name, config, self.masteruri, reload_global_param=reload_global_param,
loglevel=loglevel, logformat=logformat)
except socket.error as se:
rospy.logwarn("Error while start '%s': %s\n\n Start canceled!", node.name, utf8(se))
raise DetailedError("Start error",
'Error while start %s\n\nStart canceled!' % node.name,
'%s' % utf8(se))
except nm.InteractionNeededError as _:
raise
except nm.BinarySelectionRequest as bsr:
raise nm.InteractionNeededError(bsr, self.start_node, {'node': node, 'force': force, 'config': config, 'force_host': force_host, 'logging': logging, 'cmd_prefix': cmd_prefix, 'path': path})
except (exceptions.StartException, nm.StartException) as e:
rospy.logwarn("Error while start '%s': %s" % (node.name, utf8(e)))
lines = utf8(e).splitlines()
last_line = lines[-1]
for line in lines:
if line:
last_line = line
raise DetailedError("Start error", 'Error while start %s:\n%s' % (node.name, last_line), '%s' % utf8(e))
except Exception as e:
print(type(e))
print(traceback.format_exc(3))
rospy.logwarn("Error while start '%s': %s" % (node.name, utf8(e)))
raise DetailedError("Start error", 'Error while start %s' % node.name, '%s' % utf8(e))
def start_nodes(self, nodes, force=False, force_host=None, use_adv_cfg=False, check_nodelets=True):
'''
Internal method to start a list with nodes
:param nodes: the list with nodes to start
:type nodes: list(:class:`fkie_node_manager.node_tree_model.NodeItem`)
:param bool force: force the start of the node, also if it is already started.
:param str force_host: force the start of the node at specified host.
'''
cfg_choices = dict()
cfg_nodes = dict()
# has_launch_files = False
for node in nodes:
# do not start node, if it is in ingnore list and multiple nodes are selected
if (node.pid is None or (node.pid is not None and force)) and not node.is_ghost:
# test for duplicate nodes
if node.uri is None and node.has_running:
ret = MessageBox.question(self, 'Question', ''.join(['Some nodes, e.g. ', node.name, ' are already running on another host. If you start this node the other node will be terminated.\n Do you want proceed?']), buttons=MessageBox.Yes | MessageBox.No)
if ret == MessageBox.No:
return
# determine the used configuration
if node.next_start_cfg is not None:
lcfg = node.next_start_cfg
cfg_nodes[node.name] = lcfg
node.launched_cfg = lcfg
node.next_start_cfg = None
else:
choices = self._get_cfg_choises(node)
ch_keys = list(choices.keys())
if ch_keys:
ch_keys.sort()
choises_str = utf8(ch_keys)
if choises_str not in list(cfg_choices.keys()):
choice, ok = self._get_cfg_userchoice(choices, node.name)
if choice is not None:
cfg_choices[choises_str] = choices[choice]
cfg_nodes[node.name] = choices[choice]
node.launched_cfg = choices[choice]
elif ok:
MessageBox.warning(self, "Start error",
'Error while start %s:\nNo configuration selected!' % node.name)
else:
break
else:
cfg_nodes[node.name] = cfg_choices[choises_str]
node.launched_cfg = cfg_choices[choises_str]
# get the advanced configuration
logging = None
diag_canceled = False
cmd_prefix = ''
if use_adv_cfg:
log_params = {'Level': {':type': 'string', ':value': nm.settings().logging.get_alternatives('loglevel')},
# 'Level (roscpp)': ('string', nm.settings().logging.get_alternatives('loglevel_roscpp')),
# 'Level (super)': ('string', nm.settings().logging.get_alternatives('loglevel_superdebug')),
'Format': {':type': 'string', ':value': nm.settings().logging.get_alternatives('console_format')}
}
params = {'Prefix': {':type': 'string',
':value': ['', 'gdb -ex run --args', 'valgrind', 'python -m pdb'],
':hint': 'Custom command prefix. It will be prepended before launch prefix.'
},
'Logging': log_params}
dia = ParameterDialog(params, store_geometry="adv_cfg_dialog")
dia.setFilterVisible(False)
dia.setWindowTitle('Start with parameters')
dia.setFocusField('Level')
diag_canceled = not dia.exec_()
if not diag_canceled:
try:
params = dia.getKeywords()
nm.settings().logging.loglevel = params['Logging']['Level']
# nm.settings().logging.loglevel_roscpp = params['Logging']['Level (roscpp)']
# nm.settings().logging.loglevel_superdebug = params['Logging']['Level (super)']
nm.settings().logging.console_format = params['Logging']['Format']
nm.settings().store_logging()
logging = nm.settings().logging
cmd_prefix = params['Prefix']
except Exception as e:
diag_canceled = True
MessageBox.warning(self, "Get advanced start parameter",
'Error while parse parameter',
utf8(e))
if not diag_canceled:
# check for nodelets
if check_nodelets:
pass
# self._check_for_nodelets(nodes)
all2start = set()
# put into the queue and start
for node in nodes:
if node.name in cfg_nodes and not node.name in all2start:
# remove node from question
self.message_frame.hide_question([MessageFrame.TYPE_BINARY], MessageData(node))
# add associated nodes to start
associated2start = self._get_associated_nodes([node.name], ignore=all2start)
all2start |= associated2start
found_nodes = self._get_nodes_by_name(list(associated2start))
for anode in found_nodes:
self._progress_queue.add2queue(utf8(uuid.uuid4()),
'start %s' % anode.name,
self.start_node,
{'node': anode.node_info,
'force': force,
'config': cfg_nodes[node.node_info.name],
'force_host': force_host
})
self._progress_queue.add2queue(utf8(uuid.uuid4()),
'start %s' % node.node_info.name,
self.start_node,
{'node': node.node_info,
'force': force,
'config': cfg_nodes[node.node_info.name],
'force_host': force_host,
'logging': logging,
'cmd_prefix': cmd_prefix
})
self._start_queue(self._progress_queue)
def _check_for_nodelets(self, nodes):
self._restart_nodelets = {}
nodenames = [n.name for n in nodes]
nodelet_mngr = ''
nlmngr = ''
for node in nodes:
try:
cfg_name = node.launched_cfg
if isinstance(node.launched_cfg, LaunchConfig):
cfg_name = node.launched_cfg.Filename
nodelets = self._get_nodelets(node.name, cfg_name)
if nodelets:
nodelets = self._get_nodelets(node.name, cfg_name)
r_nn = []
for nn in nodelets:
if nn not in nodenames:
r_nn.append(nn)
if cfg_name not in self._restart_nodelets:
self._restart_nodelets[cfg_name] = []
self._restart_nodelets[cfg_name].append(nn)
if self._restart_nodelets:
nlmngr = node.name
else:
nodelet_mngr = self._get_nodelet_manager(node.name, cfg_name)
if nodelet_mngr:
if nodelet_mngr not in nodenames:
if cfg_name not in self._restart_nodelets:
self._restart_nodelets[cfg_name] = []
self._restart_nodelets[cfg_name].append(nodelet_mngr)
nodelets = self._get_nodelets(nodelet_mngr, cfg_name)
r_nn = []
for nn in nodelets:
if nn not in nodenames:
r_nn.append(nn)
self._restart_nodelets[cfg_name].append(nn)
nodelet_mngr = nodelet_mngr
except Exception as err:
rospy.logwarn("Error while test for nodelets: %s" % utf8(err))
if nm.settings().check_for_nodelets_at_start:
if nodelet_mngr and nodelet_mngr not in nodenames:
self.message_frame.show_question(MessageFrame.TYPE_NODELET, "Nodelet manager '%s' not in current list. (Re)Start nodelet manager and all nodelets?" % nodelet_mngr, MessageData(self._restart_nodelets))
elif self._restart_nodelets:
self.message_frame.show_question(MessageFrame.TYPE_NODELET, "Not all nodelets of manager '%s' are in the start list. (Re)Start these?" % nlmngr, MessageData(self._restart_nodelets))
def start_nodes_by_name(self, nodes, cfg, force=False, check_nodelets=True):
'''
Start nodes given in a list by their names.
:param nodes: a list with full node names
:type nodes: list(str)
'''
result = []
config = cfg
if isinstance(cfg, LaunchConfig):
config = cfg.launchname
if self.master_info is not None:
for n in nodes:
node_items = self.getNode(n)
if node_items:
node_item = node_items[0]
node_item.next_start_cfg = config
elif config:
node_info = NodeInfo(n, self.masteruri)
node_item = NodeItem(node_info)
node_item.next_start_cfg = config
if node_item is not None:
result.append(node_item)
self.start_nodes(result, force, check_nodelets=check_nodelets)
def start_nodes_after_load_cfg(self, cfg_name, nodes, force=False):
'''
Start nodes after the given configuration is loaded and applied to the model.
:param cfg_name: the name of the cnofiguration
:type cfg_name: str
:param nodes: the list of node names
:type nodes: list(str)
'''
if cfg_name not in self._start_nodes_after_load_cfg:
self._start_nodes_after_load_cfg[cfg_name] = set(nodes)
else:
self._start_nodes_after_load_cfg[cfg_name].update(set(nodes))
def start_nodes_after_load_cfg_clear(self):
'''
Clears the list with nodes which should be startet after a launch file is loaded.
'''
self._start_nodes_after_load_cfg = dict()
def on_force_start_nodes(self, reset_global_param=False):
'''
Starts the selected nodes (also if it already running). If for a node more then one configuration is
available, the selection dialog will be show.
'''
cursor = self.cursor()
self.ui.startButton.setEnabled(False)
self.setCursor(Qt.WaitCursor)
try:
selectedNodes = self.nodesFromIndexes(self.ui.nodeTreeView.selectionModel().selectedIndexes())
self.stop_nodes(selectedNodes)
if reset_global_param:
# reset config to load global parameter
for node in selectedNodes:
for cfg in node.cfgs:
if cfg in self.launchfiles:
self.reload_global_parameter_at_next_start(cfg)
self.start_nodes(selectedNodes, True)
finally:
self.setCursor(cursor)
self.ui.startButton.setEnabled(True)
def on_start_nodes_at_host(self):
'''
Starts the selected nodes on an another host.
:TODO: remove this method or adapt to new ParameterDailaog
'''
cursor = self.cursor()
self.ui.startButton.setEnabled(False)
params = {'Host': {':type': 'string', ':value': 'localhost'}}
dia = ParameterDialog(params, store_geometry="start_node_at_host_dialog")
dia.setFilterVisible(False)
dia.setWindowTitle('Start node on...')
dia.setFocusField('host')
if dia.exec_():
try:
params = dia.getKeywords()
host = params['Host']
self.setCursor(Qt.WaitCursor)
try:
selectedNodes = self.nodesFromIndexes(self.ui.nodeTreeView.selectionModel().selectedIndexes())
self.start_nodes(selectedNodes, True, host)
finally:
self.setCursor(cursor)
except Exception as e:
MessageBox.warning(self, "Start error",
'Error while parse parameter',
utf8(e))
self.ui.startButton.setEnabled(True)
def _get_cfg_choises(self, node, ignore_defaults=False):
result = {}
for c in node.cfgs:
if c and not isinstance(c, tuple):
# TODO: create name
result[c] = c
# if not isinstance(c, tuple):
# launch = self.launchfiles[c]
# result[''.join([utf8(launch.LaunchName), ' [', utf8(launch.PackageName), ']'])] = self.launchfiles[c]
# elif not ignore_defaults:
# result[' '.join(['[default]', c[0]])] = roslib.names.ns_join(c[0], 'run')
return result
def _get_cfg_userchoice(self, choices, nodename):
value = None
ok = False
# Open selection
if len(choices) == 1:
value = list(choices.keys())[0]
ok = True
elif len(choices) > 0:
items, ok = SelectDialog.getValue('Configuration selection', 'Select configuration to launch <b>%s</b>' % nodename, list(choices.keys()), True, store_geometry='cfg_select')
if items:
value = items[0]
return value, ok
def on_stop_clicked(self):
'''
Stops the selected and running nodes. If the node can't be stopped using his
RPC interface, it will be unregistered from the ROS master using the masters
RPC interface.
'''
key_mod = QApplication.keyboardModifiers()
if (key_mod & Qt.ShiftModifier or key_mod & Qt.ControlModifier):
self.ui.stopButton.showMenu()
else:
cursor = self.cursor()
self.setCursor(Qt.WaitCursor)
try:
selectedNodes = self.nodesFromIndexes(self.ui.nodeTreeView.selectionModel().selectedIndexes())
self.stop_nodes(selectedNodes)
finally:
self.setCursor(cursor)
def stop_node(self, node, force=False):
if node is not None and node.uri is not None and (not self._is_in_ignore_list(node.name) or force):
success = False
try:
rospy.loginfo("Stop node '%s'[%s]", utf8(node.name), utf8(node.uri))
socket.setdefaulttimeout(10)
p = xmlrpcclient.ServerProxy(node.uri)
(code, statusMessage, ignore) = p.shutdown(rospy.get_name(), '[node manager] request from %s' % self.mastername)
if code == 1:
success = True
else:
rospy.logwarn("Error while shutdown node '%s': %s", utf8(node.name), utf8(statusMessage))
except Exception as e:
err_msg = utf8(e)
if ' 111' in err_msg:
# this error occurs sometimes after shutdown node
rospy.logdebug("Error while stop node '%s': %s", utf8(node.name), utf8(e))
else:
rospy.logwarn("Error while stop node '%s': %s", utf8(node.name), utf8(e))
finally:
socket.setdefaulttimeout(None)
if not success:
if node.pid and node.name != '/node_manager_daemon':
rospy.loginfo("Try to kill process %d of the node: %s", node.pid, utf8(node.name))
# wait kill_on_stop is an integer
if hasattr(node, 'kill_on_stop') and isinstance(node.kill_on_stop, (int, float)):
time.sleep(float(node.kill_on_stop) / 1000.0)
nm.nmd().monitor.kill_process(node.pid, nmdurl.nmduri(node.masteruri))
elif isinstance(node, NodeItem) and node.is_ghost:
# since for ghost nodes no info is available, emit a signal to handle the
# stop message in other master_view_proxy
self.stop_nodes_signal.emit(node.masteruri, [node.name])
return True
def stop_nodes(self, nodes, force=False):
'''
Internal method to stop a list with nodes
:param nodes: the list with nodes to stop
:type nodes: list(:class:`fkie_master_discovery.NodeInfo` <http://docs.ros.org/kinetic/api/fkie_master_discovery/html/modules.html#fkie_master_discovery.master_info.NodeInfo>)
'''
# put into the queue and start the queue handling
for node in nodes:
self._progress_queue.add2queue(utf8(uuid.uuid4()),
'stop %s' % node.name,
self.stop_node,
{'node': node, 'force': (len(nodes) == 1) or force})
self._start_queue(self._progress_queue)
# add associated nodes to stop
associated2stop = self._get_associated_nodes([node.name for node in nodes])
found_nodes = self._get_nodes_by_name(list(associated2stop))
for node in found_nodes:
self._progress_queue.add2queue(utf8(uuid.uuid4()),
'stop %s' % node.name,
self.stop_node,
{'node': node, 'force': (len(nodes) == 1) or force})
self._start_queue(self._progress_queue)
def stop_nodes_by_name(self, nodes, force=False, ignore=[], only_local=True):
'''
Stop nodes given in a list by their names.
:param nodes: a list with full node names
:type nodes: list(str)
'''
found_nodes = self._get_nodes_by_name(nodes, ignore, only_local)
self.stop_nodes(found_nodes, force)
def _get_nodes_by_name(self, nodes, ignore=[], only_local=True):
found_nodes = []
if self.master_info is not None:
req_nodes = []
for n in nodes:
if n not in ignore:
req_nodes.append(n)
found_nodes = self.node_tree_model.get_node_items_by_name(req_nodes, only_local)
return found_nodes
def _get_associated_nodes(self, nodenames, config='', ignore=set()):
result = set()
ignore_all = set(nodenames) | ignore
with self._associations_lock:
for nodename in nodenames:
if config:
if config in self._associations:
associations = self._associations[config]
associated_nodes = set(associations[nodename])
result |= associated_nodes - ignore_all
result |= self._get_associated_nodes(list(result), config, ignore=ignore_all)
else:
for _cfg, associations in self._associations.items():
if nodename in associations:
associated_nodes = set(associations[nodename])
new_nodes = associated_nodes - ignore_all
ignore_all |= new_nodes
result |= new_nodes
result |= self._get_associated_nodes(list(new_nodes), config, ignore_all)
return result
def kill_node(self, node, force=False):
if node is not None and node.uri is not None and (not self._is_in_ignore_list(node.name) or force):
pid = node.pid
if pid is None:
# try to get the process id of the node
try:
socket.setdefaulttimeout(10)
rpc_node = xmlrpcclient.ServerProxy(node.uri)
_, _, pid = rpc_node.getPid(rospy.get_name()) # _:=code, msg
except Exception:
pass
finally:
socket.setdefaulttimeout(None)
# kill the node
if pid is not None:
try:
if self._has_nmd:
self._progress_queue.add2queue(utf8(uuid.uuid4()),
'kill %s (%s)' % (node.name, utf8(pid)),
nm.nmd().monitor.kill_process,
{'pid': pid, 'grpc_url': self._grpc_from_node(node)})
else:
self._progress_queue.add2queue(utf8(uuid.uuid4()),
'kill %s (%s)' % (node.name, utf8(pid)),
nm.starter().kill,
{'host': self.getHostFromNode(node),
'pid': pid,
'auto_pw_request': False,
'user': self.current_user
})
self._start_queue(self._progress_queue)
except Exception as e:
rospy.logwarn("Error while kill the node %s: %s", utf8(node.name), utf8(e))
raise DetailedError("Kill error",
''.join(['Error while kill the node ', node.name]),
utf8(e))
return True
def killall_roscore(self):
host = get_hostname(self.masteruri)
if host:
try:
if not nm.is_local(self.mastername):
self._progress_queue.add2queue(utf8(uuid.uuid4()),
'killall roscore on %s' % host,
nm.starter().killall_roscore,
{'host': host, 'user': self.current_user})
self._start_queue(self._progress_queue)
else:
nm.starter().killall_roscore(host, self.current_user)
except Exception as e:
rospy.logwarn("Error while killall roscore on %s: %s" % (host, utf8(e)))
raise DetailedError("Killall roscore error",
'Error while killall roscore',
'%s' % utf8(e))
return True
def on_kill_pid(self, pid):
ret = MessageBox.question(self, "Kill Process", "You are going to kill process with ID %d\nContinue?" % pid, buttons=MessageBox.Ok | MessageBox.Cancel)
ret = (ret == MessageBox.Ok)
if ret:
host = get_hostname(self.masteruri)
self._progress_queue.add2queue(utf8(uuid.uuid4()),
'kill %d' % pid,
nm.starter().kill,
{'host': host,
'pid': pid
})
self._start_queue(self._progress_queue)
def on_kill_nodes(self):
selectedNodes = self.nodesFromIndexes(self.ui.nodeTreeView.selectionModel().selectedIndexes())
# put into the queue and start the que handling
for node in selectedNodes:
self._progress_queue.add2queue(utf8(uuid.uuid4()),
'kill %s' % node.name,
self.kill_node,
{'node': node, 'force': (len(selectedNodes) == 1)})
self._start_queue(self._progress_queue)
def unregister_node(self, node, force=False):
if node is not None and node.uri is not None and (not self._is_in_ignore_list(node.name) or force):
# stop the node?
# try:
# p = xmlrpclib.ServerProxy(node.uri)
# p.shutdown(rospy.get_name(), ''.join(['[node manager] request from ', self.hostname]))
# except Exception, e:
# rospy.logwarn("Error while stop node '%s': %s", utf8(node.name), utf8(e))
# self.ui.stopButton.setEnabled(False)
# unregister all entries of the node from ROS master
try:
socket.setdefaulttimeout(10)
master = xmlrpcclient.ServerProxy(node.masteruri)
master_multi = xmlrpcclient.MultiCall(master)
# master_multi.deleteParam(node.name, node.name)
for p in node.published:
rospy.loginfo("unregister publisher '%s' [%s] from ROS master: %s", p, node.name, node.masteruri)
master_multi.unregisterPublisher(node.name, p, node.uri)
for t in node.subscribed:
rospy.loginfo("unregister subscriber '%s' [%s] from ROS master: %s", t, node.name, node.masteruri)
master_multi.unregisterSubscriber(node.name, t, node.uri)
if self.master_state is not None:
for s in node.services:
rospy.loginfo("unregister service '%s' [%s] from ROS master: %s", s, node.name, node.masteruri)
service = self.master_info.getService(s)
if not (service is None):
master_multi.unregisterService(node.name, s, service.uri)
r = master_multi()
for code, msg, _ in r:
if code != 1:
rospy.logwarn("unregistration failed: %s", msg)
except Exception as e:
rospy.logwarn("Error while unregister node %s: %s", utf8(node.name), utf8(e))
raise DetailedError("Unregister error",
''.join(['Error while Unregister node ', node.name]),
utf8(e))
finally:
socket.setdefaulttimeout(None)
return True
def on_unregister_nodes(self):
selectedNodes = self.nodesFromIndexes(self.ui.nodeTreeView.selectionModel().selectedIndexes())
# put into the queue and start the que handling
for node in selectedNodes:
if node.pid is None or len(selectedNodes) == 1:
self._progress_queue.add2queue(utf8(uuid.uuid4()),
'unregister node %s' % node.name,
self.unregister_node,
{'node': node, 'force': (len(selectedNodes) == 1)})
self._start_queue(self._progress_queue)
def on_stop_context_toggled(self, state):
menu = QMenu(self)
self.killAct = QAction("&Kill Node", self, shortcut=QKeySequence.New, statusTip="Kill selected node", triggered=self.kill_nodes)
self.unregAct = QAction("&Unregister Nodes...", self, shortcut=QKeySequence.Open, statusTip="Open an existing file", triggered=self.unreg_nodes)
menu.addAction(self.killAct)
menu.addAction(self.unregAct)
menu.exec_(self.ui.stopContextButton.pos())
def getHostFromNode(self, node):
'''
If the node is running the host the node URI will be returned. Otherwise
tries to get the host from the launch configuration. If the configuration
contains no machine assignment for this node the host of the ROS master URI
will be used.
:param node:
:type node: :class:`fkie_master_discovery.NodeInfo` <http://docs.ros.org/kinetic/api/fkie_master_discovery/html/modules.html#fkie_master_discovery.master_info.NodeInfo>
'''
if node.uri is not None:
return get_hostname(node.uri)
# get hostname from host item where the node is located
host = node.host
if host:
return host
# try to get it from the configuration,
# TODO: get it from node manager daemon?
for c in node.cfgs:
if isstring(c):
launch_config = self.__configs[c]
if node.name in launch_config.nodes:
url, _path = nmdurl.split(c, with_scheme=True)
return get_hostname(url)
# if item is not None and item.machine_name and not item.machine_name == 'localhost':
# return launch_config.Roscfg.machines[item.machine_name].address
# return the host of the assigned ROS master
return get_hostname(node.masteruri)
def _grpc_from_node(self, node):
'''
If the node is running the grpc url of the masteruri of the node will be returned. Otherwise
tries to get the grpc from the launch configuration. Or the ROS master URI
will be used.
:param node:
:type node: :class:`fkie_master_discovery.NodeInfo` <http://docs.ros.org/kinetic/api/fkie_master_discovery/html/modules.html#fkie_master_discovery.master_info.NodeInfo>
'''
# get hostname from host item where the node is located
host = node.host
if host:
return nmdurl.nmduri('http://%s:%d' % (host, get_port(self.masteruri)))
if node.masteruri is not None:
return nmdurl.nmduri(node.masteruri)
# try to get it from the configuration,
# TODO: get it from node manager daemon?
for c in node.cfgs:
if isstring(c):
launch_config = self.__configs[c]
if node.name in launch_config.nodes:
url, _path = nmdurl.split(c, with_scheme=True)
return url
return nmdurl.nmduri(self.masteruri)
def on_io_clicked(self, activated=False):
'''
Shows IO of the selected nodes.
'''
# key_mod = QApplication.keyboardModifiers()
# use_mod = key_mod & Qt.ShiftModifier or key_mod & Qt.ControlModifier
# if (key_mod & Qt.ShiftModifier or key_mod & Qt.ControlModifier):
# self.ui.ioButton.showMenu()
# else:
cursor = self.cursor()
self.setCursor(Qt.WaitCursor)
try:
selectedNodes = self.nodesFromIndexes(self.ui.nodeTreeView.selectionModel().selectedIndexes())
if selectedNodes:
ret = True
if len(selectedNodes) > 5:
ret = MessageBox.question(self, "Show IO", "You are going to open the IO of " + utf8(len(selectedNodes)) + " nodes at once\nContinue?", buttons=MessageBox.Ok | MessageBox.Cancel)
ret = (ret == MessageBox.Ok)
if ret:
queue = self._progress_queue_prio
# we use normal queue, if there are not a lot of processes
if self._progress_queue.count() < 5:
queue = self._progress_queue
key_mod = QApplication.keyboardModifiers()
use_log_widget = nm.settings().use_internal_log_widget
if activated and (key_mod & Qt.ShiftModifier):
# show ROS log if shift or control was pressed while activating
if use_log_widget or key_mod & Qt.ControlModifier:
for node in selectedNodes:
self.main_window.open_screen_dock(self.masteruri, screen_name='', nodename=node.name, user=self.current_user)
else:
for node in selectedNodes:
queue.add2queue(utf8(uuid.uuid4()),
'show log of %s' % node.name,
nm.starter().openLog,
{'nodename' : node.name,
'host': self.getHostFromNode(node),
'user': self.current_user,
'only_screen': False
})
else:
for node in selectedNodes:
queue.add2queue(utf8(uuid.uuid4()),
'show IO of %s' % node.name,
nm.screen().open_screen,
{'node': node.name,
'grpc_url': self._grpc_from_node(node),
'auto_item_request': False,
'use_log_widget': activated and (use_log_widget or key_mod & Qt.ControlModifier),
'user': self.current_user,
'pw': None,
'items': [],
'use_nmd': self._has_nmd
})
self._start_queue(queue)
else:
self.on_show_all_screens()
finally:
self.setCursor(cursor)
def _on_no_screen_error(self, nodename, host):
msg = nm.NoScreenOpenLogRequest(nodename, host).msg()
rospy.loginfo('%s' % msg)
muris = nm.nameres().masterurisbyaddr(host)
for muri in muris:
if muri == self.masteruri:
nodes = self.node_tree_model.get_tree_node(nodename, muri)
for node in nodes:
node.has_screen = False
if nm.settings().show_noscreen_error:
self.info_frame.show_info(MessageFrame.TYPE_NOSCREEN, 'No screens found for:', MessageData('', [nodename]))
def on_kill_screens(self):
'''
Kills selected screens, if some available.
'''
cursor = self.cursor()
self.setCursor(Qt.WaitCursor)
try:
selectedNodes = self.nodesFromIndexes(self.ui.nodeTreeView.selectionModel().selectedIndexes())
for node in selectedNodes:
if not self._is_in_ignore_list(node.name):
self._progress_queue.add2queue(utf8(uuid.uuid4()),
"kill screen of %s" % node.name,
nm.screen().kill_screens,
{'node': node.name,
'grpc_url': self._grpc_from_node(node),
'auto_ok_request': False,
'user': self.current_user
})
self._start_queue(self._progress_queue)
finally:
self.setCursor(cursor)
def on_show_all_screens(self):
'''
Shows all available screens.
'''
cursor = self.cursor()
self.setCursor(Qt.WaitCursor)
try:
grpc_url = nmdurl.nmduri(self.masteruri)
sel_screen = []
try:
screens = nm.nmd().screen.get_all_screens(grpc_url)
sel_screen, _ok = SelectDialog.getValue('Open screen', '', list(screens.keys()), False, False, self, store_geometry='open_screen')
except Exception as e:
rospy.logwarn("Error while get screen list: %s", utf8(e))
MessageBox.warning(self, "Screen list error",
"Error while get screen list from '%s'" % grpc_url,
utf8(e))
host = get_hostname(self.masteruri)
for screen in sel_screen:
try:
if not nm.screen().open_screen_terminal(self.masteruri, screen, screen, False, self.current_user):
pass
except Exception as e:
rospy.logwarn("Error while show IO for %s: %s", utf8(screen), utf8(e))
MessageBox.warning(self, "Show IO error",
"Error while show IO '%s' on '%s'" % (screen, host),
utf8(e))
finally:
self.setCursor(cursor)
def on_multiple_screens(self, grpc_url, screens):
muri = nmdurl.masteruri(grpc_url)
self.node_tree_model.clear_multiple_screens(muri)
for node, screens in screens.items():
nodes = self.node_tree_model.get_tree_node(node, muri)
for node in nodes:
node.has_multiple_screens = True
def on_log_clicked(self):
'''
Shows log files of the selected nodes.
'''
try:
only_screen = True
key_mod = QApplication.keyboardModifiers()
if (key_mod & Qt.ShiftModifier or key_mod & Qt.ControlModifier):
only_screen = False
selectedNodes = self.nodesFromIndexes(self.ui.nodeTreeView.selectionModel().selectedIndexes())
ret = True
if len(selectedNodes) > 5:
ret = MessageBox.question(self, "Show Log", "You are going to open the logs of " + utf8(len(selectedNodes)) + " nodes at once\nContinue?", buttons=MessageBox.Ok | MessageBox.Cancel)
ret = (ret == MessageBox.Ok)
if ret:
for node in selectedNodes:
self._progress_queue_prio.add2queue(utf8(uuid.uuid4()),
'show log of %s' % node.name,
nm.starter().openLog,
{'nodename' : node.name,
'host': self.getHostFromNode(node),
'user': self.current_user,
'only_screen': only_screen
})
self._start_queue(self._progress_queue_prio)
except Exception as e:
print(traceback.format_exc(3))
rospy.logwarn("Error while show log: %s", utf8(e))
MessageBox.warning(self, "Show log error",
'Error while show Log',
utf8(e))
def show_log(self, nodename, host, roslog=True):
self._progress_queue_prio.add2queue(utf8(uuid.uuid4()),
'show log of %s' % nodename,
nm.starter().openLog,
{'nodename' : nodename,
'host': host,
'user': self.current_user,
'only_screen': not roslog,
'only_roslog': roslog
})
self._start_queue(self._progress_queue_prio)
def on_log_path_copy(self):
selectedNodes = self.nodesFromIndexes(self.ui.nodeTreeView.selectionModel().selectedIndexes())
nodenames = []
for n in selectedNodes:
nodenames.append(n.name)
try:
host = get_hostname(self.masteruri)
path_on_host = nm.starter().get_log_path(host, nodenames, True)
QApplication.clipboard().setText(''.join([getpass.getuser() if self.is_local else self.current_user, '@', host, ':', path_on_host]))
except Exception as e:
MessageBox.warning(self, "Get log path",
'Error while get log path',
utf8(e))
def on_log_delete_clicked(self):
'''
Deletes log files of the selected nodes.
'''
selectedNodes = self.nodesFromIndexes(self.ui.nodeTreeView.selectionModel().selectedIndexes())
for node in selectedNodes:
self._progress_queue_prio.add2queue(utf8(uuid.uuid4()),
"delete Log of '%s'" % node.name,
nm.starter().delete_log,
{'nodename': node.name,
'grpc_uri': self._grpc_from_node(node),
'auto_pw_request': False,
'user': self.current_user
})
self._start_queue(self._progress_queue_prio)
def on_dynamic_config_clicked(self):
'''
Opens the dynamic configuration dialogs for selected nodes.
'''
if self.master_info is not None:
selectedNodes = self.nodesFromIndexes(self.ui.nodeTreeView.selectionModel().selectedIndexes())
for n in selectedNodes:
try:
nodes = sorted([srv_name[:-len('/set_parameters')] for srv_name, srv in self.master_info.services.items() if (srv_name.endswith('/set_parameters') and n.name in srv.serviceProvider)])
items = []
if len(nodes) == 1:
items = nodes
elif len(nodes) > 1:
items, _ = SelectDialog.getValue('Dynamic configuration selection', '', [i for i in nodes], store_geometry='dynamic_cfg')
if items is None:
items = []
if len(items) > 3:
ret = MessageBox.question(self, 'Start dynamic reconfigure', 'It will starts %s dynamic reconfigure nodes?\n\n Are you sure?' % utf8(len(items)), buttons=MessageBox.Yes | MessageBox.No)
if ret != MessageBox.Yes:
return
for node in items:
env = dict(os.environ)
env["ROS_MASTER_URI"] = utf8(self.master_info.masteruri)
rospy.loginfo("Start dynamic reconfiguration for '%s'" % node)
_ = SupervisedPopen(['rosrun', 'fkie_node_manager', 'dynamic_reconfigure', node, '__ns:=dynamic_reconfigure'], env=env, object_id=node, description='Start dynamic reconfiguration for %s failed' % node)
except Exception as e:
rospy.logwarn("Start dynamic reconfiguration for '%s' failed: %s" % (n.name, utf8(e)))
MessageBox.warning(self, "Start dynamic reconfiguration error",
'Start dynamic reconfiguration for %s failed!' % n.name,
utf8(e))
def on_edit_config_clicked(self):
'''
'''
selectedNodes = self.nodesFromIndexes(self.ui.nodeTreeView.selectionModel().selectedIndexes())
for node in selectedNodes:
choices = self._get_cfg_choises(node, True)
choice, ok = self._get_cfg_userchoice(choices, node.name)
config = choices[choice] if choices and choice else ''
if ok and config:
self.request_xml_editor.emit(config, 'name="%s"' % os.path.basename(node.name))
def on_edit_rosparam_clicked(self):
'''
'''
selectedNodes = self.nodesFromIndexes(self.ui.nodeTreeView.selectionModel().selectedIndexes())
for node in selectedNodes:
# set the parameter in the ROS parameter server
try:
inputDia = MasterParameterDialog(node.masteruri if node.masteruri is not None else self.masteruri, ''.join([node.name, roslib.names.SEP]), parent=self, store_geometry="edit_param_dialog")
inputDia.setWindowTitle('%s - %s' % (os.path.basename(node.name), "parameter"))
if node.has_launch_cfgs(node.cfgs):
inputDia.add_warning("The changes may not have any effect, because the launch file was also loaded and the parameter in the launch file will be reloaded on restart of the ROS node.")
inputDia.show()
except Exception:
rospy.logwarn("Error on retrieve parameter for %s: %s", utf8(node.name), traceback.format_exc(1))
def on_close_clicked(self):
'''
Opens a dialog to select configurations to close or stop all nodes
(with roscore) or shutdown the host.
'''
choices = dict()
for grpc_path, _ in self.__configs.items():
try:
package = utf8(package_name(grpc_path)[0])
choices['%s [%s]' % (os.path.basename(grpc_path), package)] = grpc_path
except ValueError as val_err:
rospy.logwarn(val_err)
cfg_items = list(choices.keys())
cfg_items.sort()
res = SelectDialog.getValue('Close/Stop/Shutdown', '',
cfg_items, False, False,
self, checkitem1='stop ROS',
checkitem2='shutdown host',
store_geometry='close_cfg')
cfgs, _, stop_nodes, shutdown = res[0], res[1], res[2], res[3]
# close configurations
for config in cfgs:
self._close_cfg(choices[config])
if stop_nodes:
self._on_stop_kill_roscore = True
# stop all nodes, system nodes at the end
ignore_nodes = [rospy.get_name(), '/master_discovery', '/rosout']
self.stop_nodes_by_name(self.get_nodes_runningIfLocal(), True, ignore_nodes)
if shutdown:
self.poweroff()
else:
self.stop_nodes_by_name(['/master_discovery'], True)
self.stop_nodes_by_name(['/node_manager'], True)
elif shutdown:
self.poweroff()
self.updateButtons()
self.update_robot_icon()
def poweroff(self):
try:
if nm.is_local(self.mastername):
ret = MessageBox.warning(self, "ROS Node Manager",
"Do you really want to shutdown localhost?",
buttons=MessageBox.Ok | MessageBox.Cancel)
if ret == MessageBox.Cancel:
return
self._on_stop_poweroff = True
# on shutdown stop only the /master_dsicovery node to remove it from lists
# in other remote nodes
self.stop_nodes_by_name(['/master_discovery'], True)
self._progress_queue.add2queue(utf8(uuid.uuid4()),
'poweroff `%s`' % self.mastername,
nm.starter().poweroff,
{'host': '%s' % self.mastername})
self._start_queue(self._progress_queue)
except (Exception, nm.StartException) as emsg:
rospy.logwarn("Error while poweroff %s: %s", self.mastername, utf8(emsg))
MessageBox.warning(self, "Run error",
'Error while poweroff %s' % self.mastername,
'%s' % utf8(emsg))
def _close_cfg(self, cfg):
try:
self.remove_cfg_from_model(cfg)
nm.nmd().launch.unload_launch(cfg, self.masteruri)
del self.__configs[cfg]
nm.nmd().launch.get_nodes_threaded(cfg)
except exceptions.ResourceNotFound:
del self.__configs[cfg]
nm.nmd().launch.get_nodes_threaded(cfg)
except Exception:
rospy.logwarn(traceback.format_exc())
def on_topic_echo_clicked(self, topics=[]):
'''
Shows the output of the topic in a terminal.
'''
self._show_topic_output(False, use_ssh=False, topics=topics)
def on_topic_hz_clicked(self):
'''
Shows the hz of the topic in a terminal.
'''
self._show_topic_output(True)
def on_topic_hz_ssh_clicked(self):
'''
Shows the hz of the topic using ssh.
'''
self._show_topic_output(True, use_ssh=True)
def on_topic_pub_clicked(self):
selectedTopics = self.topicsFromIndexes(self.ui.topicsView.selectionModel().selectedIndexes())
if len(selectedTopics) > 0:
for topic in selectedTopics:
if not self._start_publisher(topic.name, topic.type):
break
else: # create a new topic
# fill the input fields
# determine the list all available message types
msg_types = []
for ppath, pname in nm.nmd().file.get_packages(nmdurl.nmduri(self.masteruri)).items():
#:TODO: get message types from remote nmduri
_guri, lpath = nmdurl.split(ppath, with_scheme=False)
import rosmsg
for f in rosmsg._list_types('%s/msg' % lpath, 'msg', rosmsg.MODE_MSG):
msg_types.append("%s/%s" % (pname, f))
msg_types.sort()
fields = {'Type': {':type': 'string', ':value': msg_types}, 'Name': {':type': 'string', ':value': ['']}}
# create a dialog
dia = ParameterDialog(fields, parent=self, store_geometry="topic_pub_dialog")
dia.setWindowTitle('Publish to topic')
dia.setFilterVisible(False)
dia.setFocusField('Name')
if dia.exec_():
params = dia.getKeywords()
try:
if params['Name'] and params['Type']:
try:
self._start_publisher(params['Name'], params['Type'])
except Exception as e:
print(traceback.format_exc(1))
rospy.logwarn("Publish topic '%s' failed: %s", utf8(params['Name']), utf8(e))
MessageBox.warning(self, "Publish topic error",
''.join(['Publish topic ', params['Name'], ' failed!']),
utf8(e))
else:
MessageBox.warning(self, "Invalid name or type",
"Can't publish to topic '%s' with type '%s'!" % (params['Name'], params['Type']))
except (KeyError, ValueError) as e:
MessageBox.warning(self, "Warning",
'Error while add a parameter to the ROS parameter server',
utf8(e))
def start_publisher(self, topic_name, republish=False):
'''
Starts a publisher to given topic.
'''
if self.master_info is not None:
topic = self.master_info.getTopic("%s" % topic_name)
if topic is not None:
self._start_publisher(topic.name, topic.type, republish)
else:
rospy.logwarn("Error while start publisher, topic not found: %s" % topic_name)
def _start_publisher(self, topic_name, topic_type, republish=False):
try:
topic_name = roslib.names.ns_join(roslib.names.SEP, topic_name)
mclass = roslib.message.get_message_class(topic_type)
if mclass is None:
MessageBox.warning(self, "Publish error",
'Error while publish to %s' % topic_name,
'invalid message type: %s\nIf this is a valid message type, perhaps you need to run "rosmake"' % topic_type)
return
slots = mclass.__slots__
types = mclass._slot_types
default_topic_values = {}
rate_values = ['once', 'latch', '1']
if republish and topic_name in self.__republish_params:
default_topic_values = self.__republish_params[topic_name][topic_type]
rate_values = self.__republish_params[topic_name]['! Publish rate']
args = ServiceDialog._params_from_slots(slots, types, default_topic_values)
p = {'! Publish rate': {':type': 'string', ':value': rate_values}, topic_type: args}
dia = ParameterDialog(p, store_geometry="start_publisher_dialog")
dia.setWindowTitle('Publish to %s' % topic_name)
dia.showLoadSaveButtons()
dia.setFocusField('! Publish rate')
if dia.exec_():
params = dia.getKeywords()
# store params for republish
self.__republish_params[topic_name] = params
rate = params['! Publish rate']
opt_str = ''
opt_name_suf = '__latch_'
if rate == 'latch':
opt_str = ''
elif rate == 'once' or rate == '-1':
opt_str = '--once'
opt_name_suf = '__once_'
else:
try:
i = 0
try:
i = int(rate)
except Exception:
i = float(rate)
if i > 0:
opt_str = ''.join(['-r ', rate])
opt_name_suf = '__%sHz_' % (utf8(rate).replace('.', '_'))
except Exception:
pass
# remove empty lists
topic_params = dict()
if topic_type in params:
topic_params = self._rem_empty_lists(params[topic_type])
pub_cmd = 'pub %s %s "%s" %s' % (topic_name, topic_type, str(topic_params), opt_str)
rospy.logdebug("rostopic parameter: %s" % pub_cmd)
self._progress_queue.add2queue(utf8(uuid.uuid4()),
'start publisher for %s' % topic_name,
nm.starter().runNodeWithoutConfig,
{'host': nm.nameres().address(self.masteruri),
'package': 'rostopic',
'binary': 'rostopic',
'name': 'rostopic_pub%s%s%s' % (topic_name, opt_name_suf, str(rospy.Time.now())),
'args': [pub_cmd],
'masteruri': self.masteruri,
'use_nmd': True,
'auto_pw_request': False,
'user': self.current_user
})
self._start_queue(self._progress_queue)
return True
else:
return False
except Exception as e:
rospy.logwarn("Publish topic '%s' failed: %s", utf8(topic_name), utf8(e))
MessageBox.warning(self, "Publish topic error",
''.join(['Publish topic ', topic_name, ' failed!']),
utf8(e))
print(utf8(traceback.format_exc(1)))
return False
def _rem_empty_lists(self, param_dict):
result = dict()
for key, value in param_dict.items():
if isinstance(value, dict):
result[key] = self._rem_empty_lists(value)
elif not (isinstance(value, list) and not value):
result[key] = value
return result
def on_topic_pub_stop_clicked(self, topic_name=''):
topic_names = []
if topic_name:
topic_names.append(topic_name)
else:
selectedTopics = self.topicsFromIndexes(self.ui.topicsView.selectionModel().selectedIndexes())
topic_names = ['%s' % topic.name for topic in selectedTopics]
if self.master_info is not None:
nodes2stop = []
for topic in topic_names:
topic_prefix = '/rostopic_pub%s_' % topic
node_names = self.master_info.node_names
for n in node_names:
if n.startswith(topic_prefix):
nodes2stop.append(n)
self.stop_nodes_by_name(nodes2stop)
def _show_topic_output(self, show_hz_only, use_ssh=False, topics=[]):
'''
Shows the output of the topic in a terminal.
'''
selected_topics = topics
if not selected_topics:
selected_topics = self.topicsFromIndexes(self.ui.topicsView.selectionModel().selectedIndexes())
ret = True
if len(selected_topics) > 5:
ret = MessageBox.question(self, "Show echo", "You are going to open the echo of " + utf8(len(selected_topics)) + " topics at once\nContinue?", buttons=MessageBox.Ok | MessageBox.Cancel)
ret = (ret == MessageBox.Ok)
if ret:
for topic in selected_topics:
self._add_topic_output2queue(topic, show_hz_only, use_ssh)
def show_topic_output(self, topic_name, show_hz_only, use_ssh=False):
'''
Shows the topic output in a new window.
'''
if self.master_info is not None:
topic = self.master_info.getTopic("%s" % topic_name)
if topic is not None:
self._add_topic_output2queue(topic, show_hz_only, use_ssh)
else:
rospy.logwarn("topic not found: %s" % topic_name)
def _add_topic_output2queue(self, topic, show_hz_only, use_ssh=False):
try:
namespace = rospy.names.namespace(topic.name)
nodename = os.path.basename(topic.name)
namespace = '/echo_%s%s%s%s' % ('hz_' if show_hz_only else '', 'ssh_' if use_ssh else '', utf8(get_hostname(self.masteruri)), namespace)
args = []
# subscription parameter
args.append("--echo %s %s %s %s" % (topic.name, topic.type, '--hz' if show_hz_only else '', '--ssh' if use_ssh else ''))
args.append("__ns:=%s" % namespace)
self._progress_queue.add2queue(utf8(uuid.uuid4()),
'start subcriber for %s' % topic.name,
nm.starter().runNodeWithoutConfig,
{'host': 'localhost',
'package': 'fkie_node_manager',
'binary': 'node_manager',
'name': nodename,
'args': args,
'masteruri': self.masteruri,
'use_nmd': False,
'auto_pw_request': False,
'user': self.current_user
})
self._start_queue(self._progress_queue)
self.__echo_topics_dialogs.add(rospy.names.ns_join(namespace, nodename))
except Exception as e:
rospy.logwarn("Echo topic '%s' failed: %s" % (topic.name, utf8(e)))
MessageBox.warning(self, "Echo of topic error",
'Echo of topic %s failed!' % topic.name,
'%s' % utf8(e))
def on_service_call_clicked(self, services=[]):
'''
calls a service.
'''
selected_services = services
if not selected_services:
selected_services = self.servicesFromIndexes(self.ui.servicesView.selectionModel().selectedIndexes())
try:
for service in selected_services:
param = ServiceDialog(service, self)
param.show()
except Exception as e:
rospy.logwarn("Call service '%s' failed: %s" % (service.name, utf8(e)))
MessageBox.warning(self, "Call service error",
'Call service %s failed!' % service.name,
'%s' % utf8(e))
def service_call(self, service_name):
service = self.master_info.getService(utf8(service_name))
if service is not None:
try:
param = ServiceDialog(service, self)
param.show()
except Exception as e:
rospy.logwarn("Call service '%s' failed: %s" % (service.name, utf8(e)))
MessageBox.warning(self, "Call service error",
'Call service %s failed!' % service.name,
'%s' % utf8(e))
def _restore_expand_state(self, tree_view, proxy_model):
'''
Expand the first item and all selected items.
'''
tree_view.collapseAll()
for selected in tree_view.selectionModel().selectedIndexes():
index = selected
while index is not None and index.isValid():
item = proxy_model.sourceModel().itemFromIndex(index)
tree_view.setExpanded(index, True)
index = index.parent()
# expand the root item. NodesView has on sync also other hosts. In this case only local host will expanded.
for root_idx in range(proxy_model.sourceModel().rowCount()):
source_index = proxy_model.sourceModel().index(root_idx, 0)
item = proxy_model.sourceModel().itemFromIndex(source_index)
if type(item) in [HostItem] and not item._local:
continue
mapped_index = proxy_model.mapFromSource(source_index)
tree_view.setExpanded(mapped_index, True)
def on_node_filter_changed(self, text):
'''
Filter the displayed nodes
'''
self.node_proxy_model.setFilterRegExp(QRegExp(text, Qt.CaseInsensitive, QRegExp.Wildcard))
if text:
self.ui.nodeTreeView.expandAll()
else:
self._restore_expand_state(self.ui.nodeTreeView, self.node_proxy_model)
def on_topic_filter_changed(self, text):
'''
Filter the displayed topics
'''
self.topic_proxyModel.setFilterRegExp(QRegExp(text, Qt.CaseInsensitive, QRegExp.Wildcard))
if text:
self.ui.topicsView.expandAll()
else:
self._restore_expand_state(self.ui.topicsView, self.topic_proxyModel)
def on_service_filter_changed(self, text):
'''
Filter the displayed services
'''
self.service_proxyModel.setFilterRegExp(QRegExp(text, Qt.CaseInsensitive, QRegExp.Wildcard))
if text:
self.ui.servicesView.expandAll()
else:
self._restore_expand_state(self.ui.servicesView, self.service_proxyModel)
def on_parameter_filter_changed(self, text):
'''
Filter the displayed parameter
'''
self.parameter_proxyModel.setFilterRegExp(QRegExp(text, Qt.CaseInsensitive, QRegExp.Wildcard))
def on_get_parameter_clicked(self):
'''
Requests parameter list from the ROS parameter server.
'''
self.parameterHandler.requestParameterList(self.masteruri)
def on_add_parameter_clicked(self):
'''
Adds a parameter to the ROS parameter server.
'''
selectedParameter = self.parameterFromIndexes(self.ui.parameterView.selectionModel().selectedIndexes())
ns = '/'
if selectedParameter:
ns = roslib.names.namespace(selectedParameter[0][0])
fields = {'name': {':value': ns}, 'type': {':type': 'string', ':value': ['string', 'int', 'float', 'bool', 'list']}, 'value': {':value': ''}}
newparamDia = ParameterDialog(fields, parent=self, store_geometry="add_parameter_dialog")
newparamDia.setWindowTitle('Add new parameter')
newparamDia.setFilterVisible(False)
newparamDia.accepted.connect(self._on_add_parameter_accepted)
newparamDia.setFocusField('name')
newparamDia.show()
newparamDia.raise_()
newparamDia.activateWindow()
def _on_add_parameter_accepted(self):
if isinstance(self.sender(), ParameterDialog):
params = self.sender().getKeywords()
try:
if params['type'] == 'int':
value = int(params['value'])
elif params['type'] == 'float':
value = float(params['value'])
elif params['type'] == 'bool':
value = bool(params['value'].lower() in ("yes", "true", "t", "1"))
elif params['type'] == 'list':
try:
value = [ruamel.yaml.load(params['value'], Loader=ruamel.yaml.Loader)]
# if there is no YAML, load() will return an
# empty string. We want an empty dictionary instead
# for our representation of empty.
if value is None:
value = []
except ruamel.yaml.MarkedYAMLError as e:
MessageBox.warning(self, self.tr("Warning"), "yaml error: %s" % utf8(e), buttons=MessageBox.Ok)
return
else:
value = params['value']
self.parameterHandler.deliverParameter(self.masteruri, {params['name']: value})
self.parameterHandler.requestParameterList(self.masteruri)
self.sender().close()
except (KeyError, ValueError) as e:
MessageBox.warning(self, "Warning",
'Error while add a parameter to the ROS parameter server',
utf8(e))
def on_delete_parameter_clicked(self):
'''
Deletes the parameter from the ROS parameter server.
'''
selectedParameter = self.parameterFromIndexes(self.ui.parameterView.selectionModel().selectedIndexes())
try:
socket.setdefaulttimeout(15)
name = rospy.get_name()
master = xmlrpcclient.ServerProxy(self.masteruri)
master_multi = xmlrpcclient.MultiCall(master)
for (key, _) in selectedParameter: # _ := value
master_multi.deleteParam(name, key)
r = master_multi()
for code, msg, parameter in r:
if code != 1:
rospy.logwarn("Error on delete parameter '%s': %s", parameter, msg)
except Exception:
rospy.logwarn("Error on delete parameter: %s", utf8(traceback.format_exc(1)))
MessageBox.warning(self, "Warning",
'Error while delete a parameter to the ROS parameter server',
utf8(traceback.format_exc(1)))
else:
self.on_get_parameter_clicked()
finally:
socket.setdefaulttimeout(None)
def on_save_parameter_clicked(self):
'''
Stores selected parameter to a file.
'''
selectedParameter = self.parameterFromIndexes(self.ui.parameterView.selectionModel().selectedIndexes())
if selectedParameter:
# (fileName, filter)
(fileName, _) = QFileDialog.getSaveFileName(self,
"Save parameter",
nm.settings().current_dialog_path,
"YAML files (*.yaml);;All files (*)")
if fileName:
nm.settings().current_dialog_path = os.path.dirname(fileName)
try:
with open(fileName, 'w+') as f:
values = dict()
# convert ROS namespaces of parameters to YAML namespaces
for (key, value) in selectedParameter:
keys = key.strip(rospy.names.SEP).split(rospy.names.SEP)
curr_v = values
for k in keys:
if k in curr_v:
curr_v = curr_v[k]
elif k != keys[-1]:
curr_v[k] = dict()
curr_v = curr_v[k]
else:
curr_v[k] = value
buf = ruamel.yaml.compat.StringIO()
ruamel.yaml.dump(values, buf, Dumper=ruamel.yaml.RoundTripDumper)
f.write(buf.getvalue())
except Exception as e:
print(utf8(traceback.format_exc(1)))
MessageBox.warning(self, "Save parameter Error",
'Error while save parameter',
utf8(e))
def on_transfer_parameter_clicked(self):
'''
Copy selected parameter to local ROS-Master.
'''
selectedParameter = self.parameterFromIndexes(self.ui.parameterView.selectionModel().selectedIndexes())
if selectedParameter:
try:
params = {}
for (key, value) in selectedParameter:
params[key] = value
if params:
dia_params = {'master': {':value': masteruri_from_ros()}}
dia = ParameterDialog(dia_params, store_geometry="transfer_param_dialog")
dia.setFilterVisible(False)
dia.setWindowTitle('Copy parameter')
dia.setFocusField('master')
if dia.exec_():
dia_params = dia.getKeywords()
url = dia_params['master']
rospy.loginfo("Copy %d parameter to %s" % (len(params), url))
self.parameterHandler.deliverParameter(url, params)
except Exception as e:
MessageBox.warning(self, "Copy parameter Error",
'Error while transfer parameter',
utf8(e))
def _replaceDoubleSlash(self, liste):
'''
used to avoid the adding of \\ for each \ in a string of a list
'''
if liste and isinstance(liste, list):
result = []
for l in liste:
val = l
if isstring(l):
val = l.replace("\\n", "\n")
# result.append("".join([val]))
elif isinstance(l, list):
val = self._replaceDoubleSlash(l)
result.append(val)
return result
return liste
def _on_parameter_item_changed(self, item):
'''
add changes to the ROS parameter server
'''
if isinstance(item, ParameterValueItem):
try:
if isinstance(item.value, bool):
value = bool(item.text().lower() in ("yes", "true", "t", "1"))
elif isinstance(item.value, int):
value = int(item.text())
elif isinstance(item.value, float):
value = float(item.text())
elif isinstance(item.value, list):
try:
value = ruamel.yaml.load(item.text(), Loader=ruamel.yaml.Loader)
# if there is no YAML, load() will return an
# empty string. We want an empty dictionary instead
# for our representation of empty.
if value is None:
value = []
value = self._replaceDoubleSlash(value)
except ruamel.yaml.MarkedYAMLError as e:
MessageBox.warning(self, self.tr("Warning"), "yaml error: %s" % utf8(e), buttons=MessageBox.Ok)
item.setText(utf8(item.value))
return
else:
value = item.text()
self.parameterHandler.deliverParameter(self.masteruri, {item.name: value})
item.value = value
except ValueError as e:
MessageBox.warning(self, "Warning",
'Error while add changes to the ROS parameter server',
utf8(e))
item.setText(item.value)
def _on_param_list(self, masteruri, code, msg, params):
'''
:param str masteruri: The URI of the ROS parameter server
:param int code: The return code of the request. If not 1, the message is set and the list can be ignored.
:param str msg: The message of the result.
:param params: The list the parameter names.
:type params: list(str)
'''
if code == 1:
params.sort()
self.parameterHandler.requestParameterValues(masteruri, params)
def _on_param_values(self, masteruri, code, msg, params):
'''
:param str masteruri: The URI of the ROS parameter server
:param int code: The return code of the request. If not 1, the message is set and the list can be ignored.
:param str msg: The message of the result.
:param params: The dictionary the parameter names and request result.
:type params: dict(paramName : (code, statusMessage, parameterValue))
'''
if code == 1:
result = {}
for p, (code_n, _, val) in params.items(): # _ := msg_n
if code_n == 1:
result[p] = val
else:
result[p] = ''
if p == '/use_sim_time':
self.__use_sim_time = (code_n == 1 and val)
self.parameter_model.update_model_data(result)
else:
rospy.logwarn("Error on retrieve parameter from %s: %s", utf8(masteruri), utf8(msg))
def _on_delivered_values(self, masteruri, code, msg, params):
'''
:param str masteruri: The URI of the ROS parameter server
:param int code: The return code of the request. If not 1, the message is set and the list can be ignored.
:param str msg: The message of the result.
:param params: The dictionary the parameter names and request result.
:type params: dict(paramName : (code, statusMessage, parameterValue))
'''
errmsg = ''
if code == 1:
for p, (code_n, msg, _) in params.items(): # _ := value
if code_n != 1:
errmsg = '%s: %s\n%s' % (p, errmsg, msg)
else:
errmsg = msg if msg else 'Unknown error on set parameter'
if errmsg:
MessageBox.warning(self, "Warning",
'Error while delivering parameter to the ROS parameter server',
utf8(errmsg))
def _on_sim_param_values(self, masteruri, code, msg, params):
'''
:param str masteruri: The URI of the ROS parameter server
:param int code: The return code of the request. If not 1, the message is set and the list can be ignored.
:param str msg: The message of the result.
:param params: The dictionary the parameter names and request result.
:type params: dict(paramName : (code, statusMessage, parameterValue))
'''
robot_icon_found = False
if code == 1:
for p, (code_n, _, val) in params.items(): # _ := msg_n
if p == '/use_sim_time':
self.__use_sim_time = (code_n == 1 and val)
elif p == '/robot_icon':
robot_icon_found = True
self.__current_parameter_robot_icon = val if code_n == 1 else ''
self.update_robot_icon()
elif p.startswith('/roslaunch/uris'):
if code_n == 1:
for _, value in val.items():
self.launch_server_handler.updateLaunchServerInfo(value)
elif p == "/run_id":
if self.__run_id != val:
self.__run_id = val
# # TODO: you have to launch global parameter
# for _, launch_cfg in self.__configs.items():
# try:
# launch_cfg.global_param_done.remove(masteruri)
# except ValueError:
# pass
else:
rospy.logwarn("Error on retrieve sim parameter value from %s: %s", utf8(masteruri), utf8(msg))
if not robot_icon_found:
self.__current_parameter_robot_icon = ''
self.update_robot_icon()
def _get_nm_masteruri(self):
'''
Requests the ROS master URI from the ROS master through the RPC interface and
returns it. The 'materuri' attribute will be set to the requested value.
:return: ROS master URI
:rtype: str or None
'''
if not hasattr(self, '_nm_materuri') or self._nm_materuri is None:
masteruri = masteruri_from_ros()
master = xmlrpcclient.ServerProxy(masteruri)
_, _, self._nm_materuri = master.getUri(rospy.get_name()) # reuslt: code, message, self._nm_materuri
return self._nm_materuri
# %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
# %%%%%%%%%%%%% Nodelet handling %%%%%%%%
# %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
def _on_question_cancel(self, questionid, data):
pass
def _on_question_ok(self, questionid, data):
if questionid == MessageFrame.TYPE_NODELET:
try:
for cfgs, nodes in data.data.items():
self.stop_nodes_by_name(nodes)
self.start_nodes_by_name(nodes, cfgs, force=True, check_nodelets=False)
except Exception as err:
rospy.logwarn("Error while start nodelets: %s" % utf8(err))
elif questionid == MessageFrame.TYPE_LAUNCH_FILE:
try:
self.launchfiles = data.data
except Exception as err:
rospy.logwarn("Error while reload launch file %s: %s" % (data.data, utf8(err)))
MessageBox.warning(self, "Loading launch file", data.data, '%s' % utf8(err))
elif questionid == MessageFrame.TYPE_TRANSFER:
try:
nmd_uri = nmdurl.nmduri(self.masteruri)
username = self.current_user
self.main_window.launch_dock.progress_queue.add2queue(utf8(uuid.uuid4()),
'transfer %s to %s' % (data.data, nmd_uri),
nm.starter().transfer_file_nmd,
{'grpc_url': nmd_uri,
'path': data.data,
'auto_pw_request': True,
'user': username
})
self.main_window.launch_dock.progress_queue.start()
except Exception as err:
rospy.logwarn("Error while transfer changed files %s: %s" % (data.data, utf8(err)))
MessageBox.warning(self, "Loading launch file", data.data, '%s' % utf8(err))
elif questionid == MessageFrame.TYPE_NMD:
try:
# start node manager daemon if not already running
host_addr = nm.nameres().address(self.masteruri)
rospy.loginfo("start node manager daemon for %s", self.masteruri)
self._progress_queue.add2queue(utf8(uuid.uuid4()),
'start node_manager_daemon for %s' % host_addr,
nm.starter().runNodeWithoutConfig,
{'host': host_addr,
'package': 'fkie_node_manager_daemon',
'binary': 'node_manager_daemon',
'name': 'node_manager_daemon',
'args': [],
'masteruri': self.masteruri,
'use_nmd': False,
'auto_pw_request': False,
'user': self.current_user
})
self._start_queue(self._progress_queue)
except Exception as err:
rospy.logwarn("Error while start node manager daemon on %s: %s" % (self.masteruri, utf8(err)))
MessageBox.warning(self, "Start node manager daemon", self.masteruri, '%s' % utf8(err))
elif questionid == MessageFrame.TYPE_NMD_RESTART:
try:
# start node manager daemon if not already running
rospy.loginfo("stop node manager daemon for %s", self.masteruri)
self.stop_nodes_by_name(['node_manager_daemon'])
host_addr = nm.nameres().address(self.masteruri)
rospy.loginfo("start node manager daemon for %s", self.masteruri)
self._progress_queue.add2queue(utf8(uuid.uuid4()),
'start node_manager_daemon for %s' % host_addr,
nm.starter().runNodeWithoutConfig,
{'host': host_addr,
'package': 'fkie_node_manager_daemon',
'binary': 'node_manager_daemon',
'name': 'node_manager_daemon',
'args': [],
'masteruri': self.masteruri,
'use_nmd': False,
'auto_pw_request': False,
'user': self.current_user
})
self._start_queue(self._progress_queue)
except Exception as err:
rospy.logwarn("Error while start node manager daemon on %s: %s" % (self.masteruri, utf8(err)))
MessageBox.warning(self, "Start node manager daemon", self.masteruri, '%s' % utf8(err))
elif questionid == MessageFrame.TYPE_BINARY:
try:
self.stop_nodes_by_name([node.name for node in data.data_list])
for node in data.data_list:
if node.next_start_cfg:
self.start_node(node, force=True, config=node.next_start_cfg)
else:
self.start_nodes([node], force=True)
try:
del self._changed_binaries[node.name]
except KeyError:
pass
except Exception as err:
rospy.logwarn("Error while restart nodes %s: %s" % (data.data, utf8(err)))
MessageBox.warning(self, "Restart nodes", data.data, '%s' % utf8(err))
elif questionid == MessageFrame.TYPE_NODE_CFG:
try:
nodes, cfg = data.data
self.stop_nodes_by_name(nodes)
self.start_nodes_by_name(nodes, cfg, force=True)
except Exception as err:
rospy.logwarn("Error while restart nodes %s: %s" % (str(nodes), utf8(err)))
MessageBox.warning(self, "Restart nodes", str(nodes), '%s' % utf8(err))
def _on_info_ok(self, questionid, data):
pass
# %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
# %%%%%%%%%%%%% Shortcuts handling %%%%%%%%
# %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
def focus_filter_line(self):
if self._is_current_tab_name('tabNodes'):
self.ui.nodeFilterInput.setFocus(Qt.ActiveWindowFocusReason)
elif self._is_current_tab_name('tabTopics'):
self.ui.topicFilterInput.setFocus(Qt.ActiveWindowFocusReason)
elif self._is_current_tab_name('tabServices'):
self.ui.serviceFilterInput.setFocus(Qt.ActiveWindowFocusReason)
elif self._is_current_tab_name('tabParameter'):
self.ui.parameterFilterInput.setFocus(Qt.ActiveWindowFocusReason)
def select_host_block(self, index):
'''
Selects all nodes of a host with given index
:param int index: the index of the host in the tree model
'''
root = self.ui.nodeTreeView.model().index(index, 0)
if not root.isValid():
return
self.ui.nodeTreeView.expand(root)
# firstChild = root.child(0, 0)
last_row_index = len(self.node_tree_model.header) - 1
# lastChild = root.child(0, last_row_index)
i = 0
selection = QItemSelection()
while root.child(i, 0).isValid():
index = root.child(i, 0)
model_index = self.node_proxy_model.mapToSource(index)
item = self.node_tree_model.itemFromIndex(model_index)
if item is not None and not self._is_in_ignore_list(item.name):
selection.append(QItemSelectionRange(index, root.child(i, last_row_index)))
i = i + 1
# selection = QItemSelection(firstChild, lastChild)
self.ui.nodeTreeView.selectionModel().select(selection, QItemSelectionModel.ClearAndSelect)
def _is_in_ignore_list(self, name):
for i in self._stop_ignores:
if name.endswith(i):
return True
return False
def on_shortcut1_activated(self):
self.select_host_block(0)
def on_shortcut2_activated(self):
self.select_host_block(1)
def on_shortcut3_activated(self):
self.select_host_block(2)
def on_shortcut4_activated(self):
self.select_host_block(3)
def on_shortcut5_activated(self):
self.select_host_block(4)
def on_shortcut_collapse_all(self):
self.ui.nodeTreeView.selectionModel().clearSelection()
self.ui.nodeTreeView.collapseAll()
def on_copy_c_pressed(self):
result = ''
if self.ui.nodeTreeView.hasFocus():
selectedNodes = self.nodesFromIndexes(self.ui.nodeTreeView.selectionModel().selectedIndexes())
for node in selectedNodes:
try:
result = '%s %s' % (result, node.name)
except Exception:
pass
elif self.ui.topicsView.hasFocus():
selectedTopics = self.topicsFromIndexes(self.ui.topicsView.selectionModel().selectedIndexes())
for topic in selectedTopics:
try:
result = '%s %s' % (result, topic.name)
except Exception:
pass
elif self.ui.servicesView.hasFocus():
selectedServices = self.servicesFromIndexes(self.ui.servicesView.selectionModel().selectedIndexes())
for service in selectedServices:
try:
result = '%s %s' % (result, service.name)
except Exception:
pass
elif self.ui.parameterView.hasFocus():
selectedParameter = self.parameterFromIndexes(self.ui.parameterView.selectionModel().selectedIndexes())
for (name, _value) in selectedParameter:
try:
result = '%s %s' % (result, name)
except Exception:
pass
QApplication.clipboard().setText(result.strip())
def on_copy_x_pressed(self):
result = ''
if self.ui.nodeTreeView.hasFocus():
selectedNodes = self.nodesFromIndexes(self.ui.nodeTreeView.selectionModel().selectedIndexes())
for node in selectedNodes:
try:
result = '%s %s' % (result, node.pid)
except Exception:
pass
elif self.ui.topicsView.hasFocus():
selectedTopics = self.topicsFromIndexes(self.ui.topicsView.selectionModel().selectedIndexes())
for topic in selectedTopics:
try:
result = '%s %s' % (result, topic.type)
except Exception:
pass
elif self.ui.servicesView.hasFocus():
selectedServices = self.servicesFromIndexes(self.ui.servicesView.selectionModel().selectedIndexes())
for service in selectedServices:
try:
result = '%s %s' % (result, service.type)
except Exception:
pass
elif self.ui.parameterView.hasFocus():
selectedParameter = self.parameterFromIndexes(self.ui.parameterView.selectionModel().selectedIndexes())
for (_, value) in selectedParameter:
try:
result = '%s %s' % (result, value)
except Exception:
pass
QApplication.clipboard().setText(result.strip())
# %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
# %%%%%%%%%%%%% Filter handling %%%%%%%%%%%
# %%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%%
class NodesSortFilterProxyModel(QSortFilterProxyModel):
def filterAcceptsRow(self, sourceRow, sourceParent):
'''
Perform filtering on column 0 (Name)
'''
if not self.filterRegExp().pattern():
return True
if (self.filterAcceptsRowItself(sourceRow, sourceParent)):
return True
# # accept if any of the parents is accepted on it's own merits
# parent = sourceParent
# while (parent.isValid()):
# if (self.filterAcceptsRowItself(parent.row(), parent.parent())):
# return True
# parent = parent.parent()
# accept if any of the children is accepted on it's own merits
if (self.hasAcceptedChildren(sourceRow, sourceParent)):
return True
return False
def hasAcceptedChildren(self, sourceRow, sourceParent):
index = self.sourceModel().index(sourceRow, 0, sourceParent)
if not index.isValid():
return False
# check if there are children
childCount = index.model().rowCount(index)
if childCount == 0:
return False
for i in range(childCount):
if (self.filterAcceptsRowItself(i, index)):
return True
# recursive call -> NOTICE that this is depth-first searching, you're probably better off with breadth first search...
if (self.hasAcceptedChildren(i, index)):
return True
return False
def filterAcceptsRowItself(self, sourceRow, sourceParent):
index0 = self.sourceModel().index(sourceRow, 0, sourceParent)
item = self.sourceModel().data(index0)
if item is not None:
# skip groups
if '{' not in item:
regex = self.filterRegExp()
return (regex.indexIn(self.sourceModel().data(index0)) != -1)
return False
class TopicsSortFilterProxyModel(QSortFilterProxyModel):
def filterAcceptsRow(self, sourceRow, sourceParent):
'''
Perform filtering on columns 0 and 3 (Name, Type)
'''
result = True
index0 = self.sourceModel().index(sourceRow, 0, sourceParent)
regex = self.filterRegExp()
item = self.sourceModel().itemFromIndex(index0)
if type(item) == TopicItem:
result = (regex.indexIn(item.topic.name) != -1 or regex.indexIn(item.topic_type_str) != -1)
elif type(item) == TopicGroupItem:
result = True
if regex.indexIn(item.name) != -1:
result = True
else:
sitems = item.get_topic_items()
for sitem in sitems:
result = (regex.indexIn(sitem.topic.name) != -1 or regex.indexIn(sitem.topic_type_str) != -1)
if result:
break
return result
class ServicesSortFilterProxyModel(QSortFilterProxyModel):
def filterAcceptsRow(self, sourceRow, sourceParent):
'''
Perform filtering on columns 0 and 1 (Name, Type)
'''
index0 = self.sourceModel().index(sourceRow, 0, sourceParent)
regex = self.filterRegExp()
item = self.sourceModel().itemFromIndex(index0)
if type(item) == ServiceItem:
return (regex.indexIn(item.service.name) != -1 or regex.indexIn(item.service_type_str) != -1)
elif type(item) == ServiceGroupItem:
if regex.indexIn(item.name) != -1:
return True
grp_res = True
sitems = item.get_service_items()
for sitem in sitems:
res = (regex.indexIn(sitem.service.name) != -1 or regex.indexIn(sitem.service_type_str) != -1)
if res:
return True
grp_res = res
return grp_res
return True
class ParameterSortFilterProxyModel(QSortFilterProxyModel):
def filterAcceptsRow(self, sourceRow, sourceParent):
'''
Perform filtering on columns 0 and 1 (Name, Value)
'''
index0 = self.sourceModel().index(sourceRow, 0, sourceParent)
# index1 = self.sourceModel().index(sourceRow, 1, sourceParent)
index2 = self.sourceModel().index(sourceRow, 2, sourceParent)
regex = self.filterRegExp()
return (regex.indexIn(self.sourceModel().data(index0, ParameterNameItem.NAME_ROLE)) != -1 or
regex.indexIn(self.sourceModel().data(index2, ParameterValueItem.VALUE_ROLE)) != -1)
| []
| []
| []
| [] | [] | python | 0 | 0 | |
wienerschnitzelgemeinschaft/src/Christof/models/GAPNet/5crop_1024/train1_ur.py | import os, sys
#os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"]="1"
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import skimage.io
from skimage.transform import resize
from imgaug import augmenters as iaa
from tqdm import tqdm
import PIL
from PIL import Image
import cv2
from sklearn.utils import class_weight, shuffle
from ml_stratifiers import MultilabelStratifiedKFold
import warnings
warnings.filterwarnings("ignore")
from classification_models.resnet.models import ResNet18
import albumentations as A
MODEL_PATH = 'Christof/models/GAPNet/5crop_1024/'
# a) added batchnorm and cut out one Dense 256 layer
# b) a) + added 16 size layer to GAP
SIZE = 512
# Load dataset info
tile = 'ur'
exp_suffix = f'_{tile}'
path_to_train = f'Christof/assets/train_rgb_1024_9crop/{tile}/'
data = pd.read_csv('Christof/assets/train.csv')
normal_aug = A.Compose([#A.Rotate((0,30),p=0.75),
A.RandomRotate90(p=1),
A.HorizontalFlip(p=0.5),
#A.RandomBrightness(0.05),
#A.RandomContrast(0.05),
A.IAAAffine(translate_percent=10,rotate=45,shear=10, scale=(0.9,1.1)),
#A.RandomAffine(degrees=45, translate=(0.1,0.1), shear=10, scale=(0.9,1.1))
A.Normalize(mean=(0.08165012, 0.0530909 , 0.05298166), std=(0.12806622 ,0.08622692, 0.13038702),
max_pixel_value=255.)
])
normal_aug_ext = A.Compose([#A.Rotate((0,30),p=0.75),
A.RandomRotate90(p=1),
A.HorizontalFlip(p=0.5),
#A.RandomBrightness(0.05),
#A.RandomContrast(0.05),
A.IAAAffine(translate_percent=10,rotate=45,shear=10, scale=(0.9,1.1)),
#A.RandomAffine(degrees=45, translate=(0.1,0.1), shear=10, scale=(0.9,1.1))
A.Normalize(mean=(0.11843426, 0.06886751, 0.06541236), std=(0.16149608, 0.0987589 , 0.16087747),
max_pixel_value=255.)
])
val_aug = A.Compose([A.HorizontalFlip(p=0.5),
A.Normalize(mean=(0.08165012, 0.0530909 , 0.05298166), std=(0.12806622 ,0.08622692, 0.13038702),
max_pixel_value=255.)])
from torchvision import transforms
eps = 0.004
desired = {
0: 0.36239782,
1: 0.043841336,
2: 0.075268817,
3: 0.059322034,
4: 0.075268817,
5: 0.075268817,
6: 0.043841336,
7: 0.075268817,
8: eps,
9: eps,
10: eps,
11: 0.043841336,
12: 0.043841336,
13: 0.014198783,
14: 0.043841336,
15: eps,
16: 0.028806584,
17: 0.014198783,
18: 0.028806584,
19: 0.059322034,
20: eps,
21: 0.126126126,
22: 0.028806584,
23: 0.075268817,
24: eps,
25: 0.222493888,
26: 0.028806584,
27: eps
}
sampling_weights = [ 2.6473, 35.0588 , 8.2069 , 19.3439 , 16.0145 , 13.3245 , 32.8644,
10.607 , 551.3 , 501.1818 , 787.5714 , 25.8523 , 39.0301, 51.644,
30.0846 ,1470.1333 , 62.8262, 190.1034 , 39.3084 , 23.2126 , 170.9457
, 8.2592, 33.2609 , 9.6889 , 92.2678 , 4.19 , 99.3333 ,3150.2857]
sample_weights_ext = [ 2.6728, 41.1617 , 10.3068 , 42.4172 , 22.9729 , 21.9808 , 26.8267
, 11.5358 , 474.8659 , 486.7375 , 492.8987 , 66.963 , 50.2763 , 82.7609,
45.0683, 1854.2381, 100.3582 , 319.1721 , 76.5762 , 33.424 , 272.3007,
7.3664 , 39.4319 , 10.239 , 734.6981 , 2.548 , 196.6616 , 638.3443]
train_dataset_info = []
for name, labels in zip(data['Id'], data['Target'].str.split(' ')):
path = os.path.join(path_to_train, name)
labs = np.array([int(label) for label in labels])
bucket_ind = np.argmin([desired[l] for l in labs])
bucket = labs[bucket_ind]
weight = sampling_weights[bucket]
train_dataset_info.append({
'path': path,
'labels': labs,
'weight':weight})
train_dataset_info = np.array(train_dataset_info)
data_ext1 = pd.read_csv('Christof/assets/train_ext1.csv')
path_to_train_ext1 = f'Christof/assets/ext_tomomi_rgb_1024_9crop/{tile}/'
train_dataset_info_ext1 = []
for name, labels in zip(data_ext1['Id'], data_ext1['Target'].str.split(' ')):
path = os.path.join(path_to_train_ext1, name[:-5])
labs = np.array([int(label) for label in labels])
bucket_ind = np.argmin([desired[l] for l in labs])
bucket = labs[bucket_ind]
weight = sample_weights_ext[bucket]
train_dataset_info_ext1.append({
'path':path,
'labels': labs,
'weight':weight})
train_dataset_info_ext1 = np.array(train_dataset_info_ext1)
counts = np.zeros(28)
for item in train_dataset_info:
for l in item['labels']:
counts[l] = counts[l] + 1
counts = counts / len(train_dataset_info)
rare_classes = np.where(counts < 0.005)
#rare_dataset_info = np.array([item for item in train_dataset_info if np.isin(item['labels'], rare_classes).any()])
#train_dataset_info = rare_dataset_info
from torch.utils.data.sampler import WeightedRandomSampler
from classification_models.resnet import preprocess_input
class data_generator:
@staticmethod
def create_train(dataset_info, batch_size, shape, augument=None, weighted_sample = True):
assert shape[2] == 3
if weighted_sample:
p = np.array([item['weight'] for item in dataset_info])
p = p/np.sum(p)
else:
p = None
while True:
#dataset_info = shuffle(dataset_info)
for start in range(0, len(dataset_info), batch_size):
#end = min(start + batch_size, len(dataset_info))
batch_images = []
X_train_batch = np.random.choice(dataset_info,batch_size,p=p)
batch_labels = np.zeros((len(X_train_batch), 28))
for i in range(len(X_train_batch)):
image = data_generator.load_image(X_train_batch[i]['path'], shape)
#image = preprocess_input(image)
#rare = np.isin(X_train_batch[i]['labels'], rare_classes).any()
if augument:
image = data_generator.augment(augument,image)
batch_images.append(image)
batch_labels[i][X_train_batch[i]['labels']] = 1
yield np.array(batch_images, np.float32), batch_labels
@staticmethod
def load_image(path, shape):
image = cv2.imread(path + '.png', cv2.IMREAD_UNCHANGED)
return image
@staticmethod
def augment(aug,image):
image_aug = aug(image=image)['image']
return image_aug
from keras.preprocessing.image import ImageDataGenerator
from keras.models import Sequential, load_model
from keras.layers import Activation, Dropout, Flatten, Dense, GlobalAveragePooling2D, Concatenate, Input, Conv2D
from keras.applications.inception_v3 import InceptionV3
from keras.callbacks import ModelCheckpoint
from keras import metrics
from keras.optimizers import Adam
from keras import backend as K
import keras
from keras.models import Model
from keras.layers import Layer, InputSpec
from keras import initializers
from keras.constraints import Constraint
import keras.backend as K
from keras.layers import Reshape, Permute, multiply
def squeeze_excite_block(input, ratio=16):
init = input
channel_axis = 1 if K.image_data_format() == "channels_first" else -1
filters = init._keras_shape[channel_axis]
se_shape = (1, 1, filters)
se = GlobalAveragePooling2D()(init)
se = Reshape(se_shape)(se)
se = Dense(filters // ratio, activation='relu', kernel_initializer='he_normal', use_bias=False)(se)
se = Dense(filters, activation='sigmoid', kernel_initializer='he_normal', use_bias=False)(se)
if K.image_data_format() == 'channels_first':
se = Permute((3, 1, 2))(se)
x = multiply([init, se])
return x
def encoder(backbone):
c0 = backbone.get_layer('relu0').output
c1 = backbone.get_layer('stage2_unit1_relu1').get_output_at(0) # 128
c2 = backbone.get_layer('stage3_unit1_relu1').output # 63
c3 = backbone.get_layer('stage4_unit1_relu1').output # 32
enc_out = backbone.get_layer('relu1').output # 16
#enc_out = backbone.output # 8
short_cuts = [c0,c1,c2,c3]
return enc_out, short_cuts
from keras.layers import BatchNormalization
def create_model(input_shape, n_out):
input_tensor = Input(shape=(SIZE, SIZE, 3))
#bn = BatchNormalization()(input_tensor)
#conv = Conv2D(3,(3,3),padding='same',activation='relu')(bn)
base_model = ResNet18(include_top=False,
weights='imagenet',
input_shape=(SIZE, SIZE, 3),input_tensor=input_tensor)
enc_out, short_cuts = encoder(base_model)
x0 = GlobalAveragePooling2D()(squeeze_excite_block(enc_out))
x1 = GlobalAveragePooling2D()(squeeze_excite_block(short_cuts[0]))
x2 = GlobalAveragePooling2D()(squeeze_excite_block(short_cuts[1]))
x3 = GlobalAveragePooling2D()(squeeze_excite_block(short_cuts[2]))
x4 = GlobalAveragePooling2D()(squeeze_excite_block(short_cuts[3]))
x = Concatenate()([x0,x1,x2,x3,x4])
x = BatchNormalization()(x)
x = Dropout(0.5)(x)
x = Dense(256, activation='relu')(x)
#x = BatchNormalization()(x)
#x = Dropout(0.5)(x)
#x = Dense(256, activation='relu')(x)
x = BatchNormalization()(x)
x = Dropout(0.5)(x)
output = Dense(n_out, activation='sigmoid')(x)
model = Model(input_tensor, output)
# transfer imagenet weights
#res_img = ResNet34(include_top=False, weights='imagenet', input_shape=(SIZE, SIZE, 3))
#offset = 2
#for i, l in enumerate(base_model.layers[offset+1:]):
# l.set_weights(res_img.layers[i + 1].get_weights())
return model
# create callbacks list
from keras.callbacks import ModelCheckpoint, LearningRateScheduler, EarlyStopping, ReduceLROnPlateau, TensorBoard
from keras_callbacks import F1Metric
#from keras_metrics import f1, f1_02
#from keras_losses import f1_loss
epochs = [20,150]
batch_size = 32
# split data into train, valid
mskf = MultilabelStratifiedKFold(n_splits=5,shuffle=True,random_state=18)
y = np.zeros((len(train_dataset_info), 28))
for i in range(len(train_dataset_info)):
y[i][train_dataset_info[i]['labels']] = 1
mskf.get_n_splits(train_dataset_info, y)
kf = mskf.split(train_dataset_info, y)
fold_id = 1
for f in range(fold_id):
train_indexes, valid_indexes = next(kf)
train_indexes, valid_indexes = next(kf)
train_generator_orig = data_generator.create_train(train_dataset_info[train_indexes],
batch_size, (SIZE, SIZE, 3), augument=normal_aug)
train_generator_ext1 = data_generator.create_train(train_dataset_info_ext1,
batch_size, (SIZE, SIZE, 3), augument=normal_aug_ext)
import random
def gen():
while True:
x = random.random()
if x > 0.5:
batch = next(train_generator_orig)
else:
batch = next(train_generator_ext1)
yield batch
train_generator = gen()
validation_generator = data_generator.create_train(train_dataset_info[valid_indexes],
batch_size, (SIZE, SIZE, 3), augument=val_aug, weighted_sample=False)
checkpoint = ModelCheckpoint(MODEL_PATH + 'model_loss{}.h5'.format(exp_suffix), monitor='val_loss', verbose=1,
save_best_only=True, mode='min', save_weights_only=True)
tensorboard = TensorBoard(MODEL_PATH + 'logs{}'.format(fold_id) + '{}'.format(exp_suffix) + '/')
# reduceLROnPlat = ReduceLROnPlateau(monitor='val_loss', factor=0.1, patience=3,
# verbose=1, mode='auto', epsilon=0.0001)
# early = EarlyStopping(monitor="val_loss",
# mode="min",
# patience=6)
#f1_metric = F1Metric(validation_generator2,2*len(valid_indexes)//batch_size,batch_size,28) #2 times val because of val_aug
nb_epochs = epochs[0]
nb_cycles = 1
init_lr = 0.0005
def _cosine_anneal_schedule(t):
cos_inner = np.pi * (t % (nb_epochs // nb_cycles))
cos_inner /= nb_epochs// nb_cycles
cos_out = np.cos(cos_inner) + 1
return float(init_lr / 2 * cos_out)
lr_schedule = LearningRateScheduler(_cosine_anneal_schedule,verbose=True)
callbacks_list = [lr_schedule, tensorboard]
# warm up model
model = create_model(
input_shape=(SIZE, SIZE, 3),
n_out=28)
POS_WEIGHT = 10 # multiplier for positive targets, needs to be tuned
import tensorflow as tf
import keras.backend.tensorflow_backend as tfb
def weighted_binary_crossentropy(target, output):
"""
Weighted binary crossentropy between an output tensor
and a target tensor. POS_WEIGHT is used as a multiplier
for the positive targets.
Combination of the following functions:
* keras.losses.binary_crossentropy
* keras.backend.tensorflow_backend.binary_crossentropy
* tf.nn.weighted_cross_entropy_with_logits
"""
# transform back to logits
_epsilon = tfb._to_tensor(tfb.epsilon(), output.dtype.base_dtype)
#_epsilon = K.epsilon()
output = tf.clip_by_value(output, _epsilon, 1 - _epsilon)
output = tf.log(output / (1 - output))
# compute weighted loss
loss = tf.nn.weighted_cross_entropy_with_logits(targets=target,
logits=output,
pos_weight=POS_WEIGHT)
return tf.reduce_mean(loss, axis=-1)
import tensorflow as tf
from tensorflow.python.framework import ops
from functools import reduce
def binaryRound(x):
"""
Rounds a tensor whose values are in [0,1] to a tensor with values in {0, 1},
using the straight through estimator for the gradient.
"""
g = tf.get_default_graph()
with ops.name_scope("BinaryRound") as name:
with g.gradient_override_map({"Round": "Identity"}):
return tf.round(x, name=name)
# For Tensorflow v0.11 and below use:
#with g.gradient_override_map({"Floor": "Identity"}):
# return tf.round(x, name=name)
def brian_f1(y_true, y_pred):
y_pred = binaryRound(y_pred)
tp = K.sum(K.cast(y_true*y_pred, 'float'), axis=0)
tn = K.sum(K.cast((1-y_true)*(1-y_pred), 'float'), axis=0)
fp = K.sum(K.cast((1-y_true)*y_pred, 'float'), axis=0)
fn = K.sum(K.cast(y_true*(1-y_pred), 'float'), axis=0)
p = tp / (tp + fp + K.epsilon())
r = tp / (tp + fn + K.epsilon())
f1 = 2*p*r / (p+r+K.epsilon())
f1 = tf.where(tf.is_nan(f1), tf.zeros_like(f1), f1)
return K.mean(f1)
def brian_f1_loss(y_true, y_pred):
return 1- brian_f1(y_true, y_pred)
def custom_loss(y_true, y_pred):
return 4*weighted_binary_crossentropy(y_true,y_pred) - K.log(brian_f1(y_true,y_pred))
# train all layers
from keras.metrics import binary_accuracy
model.compile(loss=custom_loss,
optimizer=Adam(lr=5e-4),
metrics=[binary_accuracy,brian_f1])
model.fit_generator(
train_generator,
steps_per_epoch=np.ceil(float(2*len(train_indexes)) / float(batch_size)),
#validation_data=validation_generator,
#validation_steps=2*np.ceil(float(len(valid_indexes)) / float(batch_size)),
epochs=epochs[0],
verbose=1,
callbacks=callbacks_list)
model.save_weights(MODEL_PATH + 'model_loss{}{}.h5'.format(fold_id,exp_suffix))
model.load_weights(MODEL_PATH + 'model_loss{}{}.h5'.format(fold_id,exp_suffix))
submit = pd.read_csv('Christof/assets/sample_submission.csv')
tta = 8
draw_predict = np.zeros((len(submit['Id']), 28))
for i, name in tqdm(enumerate(submit['Id'])):
path = os.path.join(f'Christof/assets/test_rgb_1024_9crop/{tile}/', name)
image = data_generator.load_image(path, (SIZE, SIZE, 3))
images = [data_generator.augment(normal_aug, image) for _ in range(tta)]
tta_predicts = model.predict(np.array(images))
draw_predict[i] = np.median(tta_predicts,axis = 0)
np.save(MODEL_PATH + f'pred{fold_id}{exp_suffix}.npy',draw_predict)
# custom thresholds to match lb proportions
thresholds = np.linspace(0.95, 0.05, 101)
pred = draw_predict.copy()
for j in tqdm(range(pred.shape[1])):
for t in thresholds:
pred[:, j] = (draw_predict[:, j] > t).astype(int)
prop = np.mean(pred[:, j])
if prop >= desired[j]: break
print(j, '%3.2f' % t, '%6.4f' % desired[j], '%6.4f' % prop, j, )
print(pred[:5].astype(int))
label_predict = [np.arange(28)[score_predict == 1] for score_predict in pred]
str_predict_label = [' '.join(str(l) for l in lp) for lp in label_predict]
submit['Predicted'] = str_predict_label
# np.save('draw_predict_InceptionV3.npy', score_predict)
submit.to_csv(MODEL_PATH + 'submission_loss{}_lb_dist_adjusted_8tta.csv'.format(exp_suffix), index=False)
from Christof.utils import f1_sub
best_sub = pd.read_csv('ens18.csv')
f1_sub(best_sub,submit)
best_sub = pd.read_csv('ens56d.csv')
f1_sub(best_sub,submit)
# submit2 = pd.read_csv('Christof/models/GAPNet/11/submission_loss_0_lb_dist_adjusted_8tta.csv')
# f1_sub(best_sub,submit2)
#
# submit2 = pd.read_csv('Christof/models/GAPNet/11_tests_on_clr/submission_loss_1in20_0005_2c_lb_dist_adjusted_8tta.csv')
# f1_sub(best_sub,submit2)
#
# submit2 = pd.read_csv('Christof/models/GAPNet/11_tests_on_clr/submission_loss_1in20_0005_lb_dist_adjusted_8tta.csv')
# f1_sub(best_sub,submit2) | []
| []
| [
"CUDA_DEVICE_ORDER",
"CUDA_VISIBLE_DEVICES"
]
| [] | ["CUDA_DEVICE_ORDER", "CUDA_VISIBLE_DEVICES"] | python | 2 | 0 | |
documents/questions/tests/Atlassian/Result.java | import java.io.*;
import java.math.*;
import java.security.*;
import java.text.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.function.*;
import java.util.regex.*;
import java.util.stream.*;
import static java.util.stream.Collectors.joining;
import static java.util.stream.Collectors.toList;
class Result {
/*
* Complete the 'mergePalindromes' function below.
*
* The function is expected to return a STRING.
* The function accepts following parameters:
* 1. STRING s1
* 2. STRING s2
*/
public static String mergePalindromes(String s1, String s2) {
char[] c1 = s1.toCharArray();
char[] c2 = s2.toCharArray();
int n1 = c1.length, n2 = c2.length;
int[] x1 = new int[26];
int[] x2 = new int[26];
int[] x3 = new int[26];
for (int i = 0; i < n1; i++) {
x1[c1[i] - 'a'] += 1;
}
for (int i = 0; i < n2; i++) {
x2[c2[i] - 'a'] += 1;
}
for (int i = 0; i < 26; i++) {
int count1 = x1[i], count2 = x2[i];
x3[i] += (count1 - count1 % 2);
x1[i] = count1 % 2;
x3[i] += (count2 - count2 % 2);
x2[i] = count2 % 2;
}
boolean found = false;
for (int j = 0; j < 26; j++) {
if ((x1[j] > 0) && (x2[j] > 0)) {
x3[j] += 2;
found = true;
break;
}
}
StringBuilder left = new StringBuilder();
StringBuilder leftCopy = new StringBuilder();
for (int i = 0; i < 26; i++) {
for (int j = 0; j < (x3[i] / 2); j++) {
left.append((char) ('a' + i));
leftCopy.append((char) ('a' + i));
}
}
StringBuilder right = leftCopy.reverse();
if (found) {
System.out.println("even");
return left.append(right).toString();
}
for (int i = 0; i < 26; i++) {
if ((x1[i] > 0) || (x2[i] > 0)) {
left.append((char)('a' + i));
System.out.println("odd");
return left.append(right).toString();
}
}
return left.append(right).toString();
}
}
public class Solution {
public static void main(String[] args) throws IOException {
BufferedReader bufferedReader = new BufferedReader(new InputStreamReader(System.in));
BufferedWriter bufferedWriter = new BufferedWriter(new FileWriter(System.getenv("OUTPUT_PATH")));
String s1 = bufferedReader.readLine();
String s2 = bufferedReader.readLine();
String result = Result.mergePalindromes(s1, s2);
bufferedWriter.write(result);
bufferedWriter.newLine();
bufferedReader.close();
bufferedWriter.close();
}
}
| [
"\"OUTPUT_PATH\""
]
| []
| [
"OUTPUT_PATH"
]
| [] | ["OUTPUT_PATH"] | java | 1 | 0 | |
rest/config.go | // Copyright (c) 2013 Couchbase, Inc.
// Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file
// except in compliance with the License. You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
// Unless required by applicable law or agreed to in writing, software distributed under the
// License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
// either express or implied. See the License for the specific language governing permissions
// and limitations under the License.
package rest
import (
"bytes"
"crypto/tls"
"crypto/x509"
"errors"
"flag"
"fmt"
"io"
"io/ioutil"
"log"
"net/http"
"net/url"
"os"
"os/signal"
"path/filepath"
"runtime"
"strings"
"syscall"
"github.com/couchbase/sync_gateway/auth"
"github.com/couchbase/sync_gateway/base"
"github.com/couchbase/sync_gateway/db"
"github.com/hashicorp/go-multierror"
pkgerrors "github.com/pkg/errors"
// Register profiling handlers (see Go docs)
_ "net/http/pprof"
)
var (
DefaultInterface = ":4984"
DefaultAdminInterface = "127.0.0.1:4985" // Only accessible on localhost!
DefaultMetricsInterface = "127.0.0.1:4986" // Only accessible on localhost!
DefaultServer = "walrus:"
DefaultMinimumTLSVersionConst = tls.VersionTLS12
// The value of defaultLogFilePath is populated by -defaultLogFilePath in ParseCommandLine()
defaultLogFilePath string
// The value of disablePersistentConfig is populated by -disable_persistent_config in ParseCommandLine()
disablePersistentConfig bool
)
const (
eeOnlyWarningMsg = "EE only configuration option %s=%v - Reverting to default value for CE: %v"
minValueErrorMsg = "minimum value for %s is: %v"
rangeValueErrorMsg = "valid range for %s is: %s"
// Default value of ServerConfig.MaxIncomingConnections
DefaultMaxIncomingConnections = 0
// Default value of ServerConfig.MaxFileDescriptors
DefaultMaxFileDescriptors uint64 = 5000
// Default number of index replicas
DefaultNumIndexReplicas = uint(1)
)
// JSON object that defines the server configuration.
type ServerConfig struct {
TLSMinVersion *string `json:"tls_minimum_version,omitempty"` // Set TLS Version
Interface *string `json:",omitempty"` // Interface to bind REST API to, default ":4984"
SSLCert *string `json:",omitempty"` // Path to SSL cert file, or nil
SSLKey *string `json:",omitempty"` // Path to SSL private key file, or nil
ServerReadTimeout *int `json:",omitempty"` // maximum duration.Second before timing out read of the HTTP(S) request
ServerWriteTimeout *int `json:",omitempty"` // maximum duration.Second before timing out write of the HTTP(S) response
ReadHeaderTimeout *int `json:",omitempty"` // The amount of time allowed to read request headers.
IdleTimeout *int `json:",omitempty"` // The maximum amount of time to wait for the next request when keep-alives are enabled.
AdminInterface *string `json:",omitempty"` // Interface to bind admin API to, default "localhost:4985"
AdminUI *string `json:",omitempty"` // Path to Admin HTML page, if omitted uses bundled HTML
ProfileInterface *string `json:",omitempty"` // Interface to bind Go profile API to (no default)
ConfigServer *string `json:",omitempty"` // URL of config server (for dynamic db discovery)
Facebook *FacebookConfig `json:",omitempty"` // Configuration for Facebook validation
Google *GoogleConfig `json:",omitempty"` // Configuration for Google validation
CORS *CORSConfig `json:",omitempty"` // Configuration for allowing CORS
DeprecatedLog []string `json:"log,omitempty"` // Log keywords to enable
DeprecatedLogFilePath *string `json:"logFilePath,omitempty"` // Path to log file, if missing write to stderr
Logging *base.LoggingConfig `json:",omitempty"` // Configuration for logging with optional log file rotation
Pretty bool `json:",omitempty"` // Pretty-print JSON responses?
DeploymentID *string `json:",omitempty"` // Optional customer/deployment ID for stats reporting
StatsReportInterval *float64 `json:",omitempty"` // Optional stats report interval (0 to disable)
CouchbaseKeepaliveInterval *int `json:",omitempty"` // TCP keep-alive interval between SG and Couchbase server
SlowQueryWarningThreshold *int `json:",omitempty"` // Log warnings if N1QL queries take this many ms
MaxIncomingConnections *int `json:",omitempty"` // Max # of incoming HTTP connections to accept
MaxFileDescriptors *uint64 `json:",omitempty"` // Max # of open file descriptors (RLIMIT_NOFILE)
CompressResponses *bool `json:",omitempty"` // If false, disables compression of HTTP responses
Databases DbConfigMap `json:",omitempty"` // Pre-configured databases, mapped by name
Replications []*ReplicateV1Config `json:",omitempty"` // sg-replicate replication definitions
MaxHeartbeat uint64 `json:",omitempty"` // Max heartbeat value for _changes request (seconds)
ClusterConfig *ClusterConfig `json:"cluster_config,omitempty"` // Bucket and other config related to CBGT
Unsupported *UnsupportedServerConfig `json:"unsupported,omitempty"` // Config for unsupported features
ReplicatorCompression *int `json:"replicator_compression,omitempty"` // BLIP data compression level (0-9)
BcryptCost int `json:"bcrypt_cost,omitempty"` // bcrypt cost to use for password hashes - Default: bcrypt.DefaultCost
MetricsInterface *string `json:"metricsInterface,omitempty"` // Interface to bind metrics to. If not set then metrics isn't accessible
HideProductVersion bool `json:"hide_product_version,omitempty"` // Determines whether product versions removed from Server headers and REST API responses. This setting does not apply to the Admin REST API.
}
// Bucket configuration elements - used by db, index
type BucketConfig struct {
Server *string `json:"server,omitempty"` // Couchbase server URL
DeprecatedPool *string `json:"pool,omitempty"` // Couchbase pool name - This is now deprecated and forced to be "default"
Bucket *string `json:"bucket,omitempty"` // Bucket name
Username string `json:"username,omitempty"` // Username for authenticating to server
Password string `json:"password,omitempty"` // Password for authenticating to server
CertPath string `json:"certpath,omitempty"` // Cert path (public key) for X.509 bucket auth
KeyPath string `json:"keypath,omitempty"` // Key path (private key) for X.509 bucket auth
CACertPath string `json:"cacertpath,omitempty"` // Root CA cert path for X.509 bucket auth
KvTLSPort int `json:"kv_tls_port,omitempty"` // Memcached TLS port, if not default (11207)
}
func (bc *BucketConfig) MakeBucketSpec() base.BucketSpec {
server := "http://localhost:8091"
bucketName := ""
tlsPort := 11207
if bc.Server != nil {
server = *bc.Server
}
if bc.Bucket != nil {
bucketName = *bc.Bucket
}
if bc.KvTLSPort != 0 {
tlsPort = bc.KvTLSPort
}
return base.BucketSpec{
Server: server,
BucketName: bucketName,
Keypath: bc.KeyPath,
Certpath: bc.CertPath,
CACertPath: bc.CACertPath,
KvTLSPort: tlsPort,
Auth: bc,
}
}
// Implementation of AuthHandler interface for BucketConfig
func (bucketConfig *BucketConfig) GetCredentials() (username string, password string, bucketname string) {
return base.TransformBucketCredentials(bucketConfig.Username, bucketConfig.Password, *bucketConfig.Bucket)
}
type ClusterConfig struct {
BucketConfig
DataDir string `json:"data_dir,omitempty"`
HeartbeatIntervalSeconds *uint16 `json:"heartbeat_interval_seconds,omitempty"`
}
func (c ClusterConfig) CBGTEnabled() bool {
// if we have a non-empty server field, then assume CBGT is enabled.
return c.Server != nil && *c.Server != ""
}
// JSON object that defines a database configuration within the ServerConfig.
type DbConfig struct {
BucketConfig
Name string `json:"name,omitempty"` // Database name in REST API (stored as key in JSON)
Sync *string `json:"sync,omitempty"` // Sync function defines which users can see which data
Users map[string]*db.PrincipalConfig `json:"users,omitempty"` // Initial user accounts
Roles map[string]*db.PrincipalConfig `json:"roles,omitempty"` // Initial roles
RevsLimit *uint32 `json:"revs_limit,omitempty"` // Max depth a document's revision tree can grow to
AutoImport interface{} `json:"import_docs,omitempty"` // Whether to automatically import Couchbase Server docs into SG. Xattrs must be enabled. true or "continuous" both enable this.
ImportPartitions *uint16 `json:"import_partitions,omitempty"` // Number of partitions for import sharding. Impacts the total DCP concurrency for import
ImportFilter *string `json:"import_filter,omitempty"` // Filter function (import)
ImportBackupOldRev bool `json:"import_backup_old_rev"` // Whether import should attempt to create a temporary backup of the previous revision body, when available.
EventHandlers *EventHandlerConfig `json:"event_handlers,omitempty"` // Event handlers (webhook)
FeedType string `json:"feed_type,omitempty"` // Feed type - "DCP" or "TAP"; defaults based on Couchbase server version
AllowEmptyPassword bool `json:"allow_empty_password,omitempty"` // Allow empty passwords? Defaults to false
CacheConfig *CacheConfig `json:"cache,omitempty"` // Cache settings
DeprecatedRevCacheSize *uint32 `json:"rev_cache_size,omitempty"` // Maximum number of revisions to store in the revision cache (deprecated, CBG-356)
StartOffline bool `json:"offline,omitempty"` // start the DB in the offline state, defaults to false
Unsupported db.UnsupportedOptions `json:"unsupported,omitempty"` // Config for unsupported features
Deprecated DeprecatedOptions `json:"deprecated,omitempty"` // Config for Deprecated features
OIDCConfig *auth.OIDCOptions `json:"oidc,omitempty"` // Config properties for OpenID Connect authentication
OldRevExpirySeconds *uint32 `json:"old_rev_expiry_seconds,omitempty"` // The number of seconds before old revs are removed from CBS bucket
ViewQueryTimeoutSecs *uint32 `json:"view_query_timeout_secs,omitempty"` // The view query timeout in seconds
LocalDocExpirySecs *uint32 `json:"local_doc_expiry_secs,omitempty"` // The _local doc expiry time in seconds
EnableXattrs *bool `json:"enable_shared_bucket_access,omitempty"` // Whether to use extended attributes to store _sync metadata
SecureCookieOverride *bool `json:"session_cookie_secure,omitempty"` // Override cookie secure flag
SessionCookieName string `json:"session_cookie_name"` // Custom per-database session cookie name
SessionCookieHTTPOnly bool `json:"session_cookie_http_only"` // HTTP only cookies
AllowConflicts *bool `json:"allow_conflicts,omitempty"` // False forbids creating conflicts
NumIndexReplicas *uint `json:"num_index_replicas"` // Number of GSI index replicas used for core indexes
UseViews bool `json:"use_views"` // Force use of views instead of GSI
SendWWWAuthenticateHeader *bool `json:"send_www_authenticate_header,omitempty"` // If false, disables setting of 'WWW-Authenticate' header in 401 responses
BucketOpTimeoutMs *uint32 `json:"bucket_op_timeout_ms,omitempty"` // How long bucket ops should block returning "operation timed out". If nil, uses GoCB default. GoCB buckets only.
DeltaSync *DeltaSyncConfig `json:"delta_sync,omitempty"` // Config for delta sync
CompactIntervalDays *float32 `json:"compact_interval_days,omitempty"` // Interval between scheduled compaction runs (in days) - 0 means don't run
SGReplicateEnabled *bool `json:"sgreplicate_enabled,omitempty"` // When false, node will not be assigned replications
SGReplicateWebsocketPingInterval *int `json:"sgreplicate_websocket_heartbeat_secs,omitempty"` // If set, uses this duration as a custom heartbeat interval for websocket ping frames
Replications map[string]*db.ReplicationConfig `json:"replications,omitempty"` // sg-replicate replication definitions
ServeInsecureAttachmentTypes bool `json:"serve_insecure_attachment_types,omitempty"` // Attachment content type will bypass the content-disposition handling, default false
QueryPaginationLimit *int `json:"query_pagination_limit,omitempty"` // Query limit to be used during pagination of large queries
UserXattrKey string `json:"user_xattr_key,omitempty"` // Key of user xattr that will be accessible from the Sync Function. If empty the feature will be disabled.
}
type DeltaSyncConfig struct {
Enabled *bool `json:"enabled,omitempty"` // Whether delta sync is enabled (requires EE)
RevMaxAgeSeconds *uint32 `json:"rev_max_age_seconds,omitempty"` // The number of seconds deltas for old revs are available for
}
type DeprecatedOptions struct {
}
type DbConfigMap map[string]*DbConfig
type ReplConfigMap map[string]*ReplicateV1Config
type FacebookConfig struct {
Register bool // If true, server will register new user accounts
}
type GoogleConfig struct {
Register bool // If true, server will register new user accounts
AppClientID []string `json:"app_client_id"` // list of enabled client ids
}
type CORSConfig struct {
Origin []string // List of allowed origins, use ["*"] to allow access from everywhere
LoginOrigin []string // List of allowed login origins
Headers []string // List of allowed headers
MaxAge int // Maximum age of the CORS Options request
}
type EventHandlerConfig struct {
MaxEventProc uint `json:"max_processes,omitempty"` // Max concurrent event handling goroutines
WaitForProcess string `json:"wait_for_process,omitempty"` // Max wait time when event queue is full (ms)
DocumentChanged []*EventConfig `json:"document_changed,omitempty"` // Document changed
DBStateChanged []*EventConfig `json:"db_state_changed,omitempty"` // DB state change
}
type EventConfig struct {
HandlerType string `json:"handler"` // Handler type
Url string `json:"url,omitempty"` // Url (webhook)
Filter string `json:"filter,omitempty"` // Filter function (webhook)
Timeout *uint64 `json:"timeout,omitempty"` // Timeout (webhook)
Options map[string]interface{} `json:"options,omitempty"` // Options can be specified per-handler, and are specific to each type.
}
type CacheConfig struct {
RevCacheConfig *RevCacheConfig `json:"rev_cache"` // Revision Cache Config Settings
ChannelCacheConfig *ChannelCacheConfig `json:"channel_cache"` // Channel Cache Config Settings
DeprecatedCacheConfig
}
// ***************************************************************
// Kept around for CBG-356 backwards compatability
// ***************************************************************
type DeprecatedCacheConfig struct {
DeprecatedCachePendingSeqMaxWait *uint32 `json:"max_wait_pending,omitempty"` // Max wait for pending sequence before skipping
DeprecatedCachePendingSeqMaxNum *int `json:"max_num_pending,omitempty"` // Max number of pending sequences before skipping
DeprecatedCacheSkippedSeqMaxWait *uint32 `json:"max_wait_skipped,omitempty"` // Max wait for skipped sequence before abandoning
DeprecatedEnableStarChannel *bool `json:"enable_star_channel,omitempty"` // Enable star channel
DeprecatedChannelCacheMaxLength *int `json:"channel_cache_max_length,omitempty"` // Maximum number of entries maintained in cache per channel
DeprecatedChannelCacheMinLength *int `json:"channel_cache_min_length,omitempty"` // Minimum number of entries maintained in cache per channel
DeprecatedChannelCacheAge *int `json:"channel_cache_expiry,omitempty"` // Time (seconds) to keep entries in cache beyond the minimum retained
}
type RevCacheConfig struct {
Size *uint32 `json:"size,omitempty"` // Maximum number of revisions to store in the revision cache
ShardCount *uint16 `json:"shard_count,omitempty"` // Number of shards the rev cache should be split into
}
type ChannelCacheConfig struct {
MaxNumber *int `json:"max_number,omitempty"` // Maximum number of channel caches which will exist at any one point
HighWatermarkPercent *int `json:"compact_high_watermark_pct,omitempty"` // High watermark for channel cache eviction (percent)
LowWatermarkPercent *int `json:"compact_low_watermark_pct,omitempty"` // Low watermark for channel cache eviction (percent)
MaxWaitPending *uint32 `json:"max_wait_pending,omitempty"` // Max wait for pending sequence before skipping
MaxNumPending *int `json:"max_num_pending,omitempty"` // Max number of pending sequences before skipping
MaxWaitSkipped *uint32 `json:"max_wait_skipped,omitempty"` // Max wait for skipped sequence before abandoning
EnableStarChannel *bool `json:"enable_star_channel,omitempty"` // Enable star channel
MaxLength *int `json:"max_length,omitempty"` // Maximum number of entries maintained in cache per channel
MinLength *int `json:"min_length,omitempty"` // Minimum number of entries maintained in cache per channel
ExpirySeconds *int `json:"expiry_seconds,omitempty"` // Time (seconds) to keep entries in cache beyond the minimum retained
DeprecatedQueryLimit *int `json:"query_limit,omitempty"` // Limit used for channel queries, if not specified by client DEPRECATED in favour of db.QueryPaginationLimit
}
type UnsupportedServerConfig struct {
Http2Config *Http2Config `json:"http2,omitempty"` // Config settings for HTTP2
StatsLogFrequencySecs *uint `json:"stats_log_freq_secs,omitempty"` // How often should stats be written to stats logs
UseStdlibJSON *bool `json:"use_stdlib_json,omitempty"` // Bypass the jsoniter package and use Go's stdlib instead
}
type Http2Config struct {
Enabled *bool `json:"enabled,omitempty"` // Whether HTTP2 support is enabled
}
func GetTLSVersionFromString(stringV *string) uint16 {
if stringV != nil {
switch *stringV {
case "tlsv1":
return tls.VersionTLS10
case "tlsv1.1":
return tls.VersionTLS11
case "tlsv1.2":
return tls.VersionTLS12
case "tlsv1.3":
return tls.VersionTLS13
}
}
return uint16(DefaultMinimumTLSVersionConst)
}
func (dbConfig *DbConfig) setup(name string) error {
dbConfig.Name = name
if dbConfig.Bucket == nil {
dbConfig.Bucket = &dbConfig.Name
}
if dbConfig.Server == nil {
dbConfig.Server = &DefaultServer
}
url, err := url.Parse(*dbConfig.Server)
if err != nil {
return err
}
if url.User != nil {
// Remove credentials from URL and put them into the DbConfig.Username and .Password:
if dbConfig.Username == "" {
dbConfig.Username = url.User.Username()
}
if dbConfig.Password == "" {
if password, exists := url.User.Password(); exists {
dbConfig.Password = password
}
}
url.User = nil
urlStr := url.String()
dbConfig.Server = &urlStr
}
// Load Sync Function.
if dbConfig.Sync != nil {
sync, err := loadJavaScript(*dbConfig.Sync, dbConfig.Unsupported.RemoteConfigTlsSkipVerify)
if err != nil {
return &JavaScriptLoadError{
JSLoadType: SyncFunction,
Path: *dbConfig.Sync,
Err: err,
}
}
dbConfig.Sync = &sync
}
// Load Import Filter Function.
if dbConfig.ImportFilter != nil {
importFilter, err := loadJavaScript(*dbConfig.ImportFilter, dbConfig.Unsupported.RemoteConfigTlsSkipVerify)
if err != nil {
return &JavaScriptLoadError{
JSLoadType: ImportFilter,
Path: *dbConfig.ImportFilter,
Err: err,
}
}
dbConfig.ImportFilter = &importFilter
}
// Load Conflict Resolution Function.
for _, rc := range dbConfig.Replications {
if rc.ConflictResolutionFn != "" {
conflictResolutionFn, err := loadJavaScript(rc.ConflictResolutionFn, dbConfig.Unsupported.RemoteConfigTlsSkipVerify)
if err != nil {
return &JavaScriptLoadError{
JSLoadType: ConflictResolver,
Path: rc.ConflictResolutionFn,
Err: err,
}
}
rc.ConflictResolutionFn = conflictResolutionFn
}
}
return nil
}
// loadJavaScript loads the JavaScript source from an external file or and HTTP/HTTPS endpoint.
// If the specified path does not qualify for a valid file or an URI, it returns the input path
// as-is with the assumption that it is an inline JavaScript source. Returns error if there is
// any failure in reading the JavaScript file or URI.
func loadJavaScript(path string, insecureSkipVerify bool) (js string, err error) {
rc, err := readFromPath(path, insecureSkipVerify)
if errors.Is(err, ErrPathNotFound) {
// If rc is nil and readFromPath returns no error, treat the
// the given path as an inline JavaScript and return it as-is.
return path, nil
}
if err != nil {
if !insecureSkipVerify {
var unkAuthErr x509.UnknownAuthorityError
if errors.As(err, &unkAuthErr) {
return "", fmt.Errorf("%w. TLS certificate failed verification. TLS verification "+
"can be disabled using the unsupported \"remote_config_tls_skip_verify\" option", err)
}
return "", err
}
return "", err
}
defer func() { _ = rc.Close() }()
src, err := ioutil.ReadAll(rc)
if err != nil {
return "", err
}
return string(src), nil
}
// JSLoadType represents a specific JavaScript load type.
// It is used to uniquely identify any potential errors during JavaScript load.
type JSLoadType int
const (
SyncFunction JSLoadType = iota // Sync Function JavaScript load.
ImportFilter // Import filter JavaScript load.
ConflictResolver // Conflict Resolver JavaScript load.
WebhookFilter // Webhook filter JavaScript load.
jsLoadTypeCount // Number of JSLoadType constants.
)
// jsLoadTypes represents the list of different possible JSLoadType.
var jsLoadTypes = []string{"SyncFunction", "ImportFilter", "ConflictResolver", "WebhookFilter"}
// String returns the string representation of a specific JSLoadType.
func (t JSLoadType) String() string {
if len(jsLoadTypes) < int(t) {
return fmt.Sprintf("JSLoadType(%d)", t)
}
return jsLoadTypes[t]
}
// JavaScriptLoadError is returned if there is any failure in loading JavaScript
// source from an external file or URL (HTTP/HTTPS endpoint).
type JavaScriptLoadError struct {
JSLoadType JSLoadType // A specific JavaScript load type.
Path string // Path of the JavaScript source.
Err error // Underlying error.
}
// Error returns string representation of the JavaScriptLoadError.
func (e *JavaScriptLoadError) Error() string {
return fmt.Sprintf("Error loading JavaScript (%s) from %q, Err: %v", e.JSLoadType, e.Path, e.Err)
}
// ErrPathNotFound means that the specified path or URL (HTTP/HTTPS endpoint)
// doesn't exist to construct a ReadCloser to read the bytes later on.
var ErrPathNotFound = errors.New("path not found")
// readFromPath creates a ReadCloser from the given path. The path must be either a valid file
// or an HTTP/HTTPS endpoint. Returns an error if there is any failure in building ReadCloser.
func readFromPath(path string, insecureSkipVerify bool) (rc io.ReadCloser, err error) {
messageFormat := "Loading content from [%s] ..."
if strings.HasPrefix(path, "http://") || strings.HasPrefix(path, "https://") {
base.Infof(base.KeyAll, messageFormat, path)
client := base.GetHttpClient(insecureSkipVerify)
resp, err := client.Get(path)
if err != nil {
return nil, err
} else if resp.StatusCode >= 300 {
_ = resp.Body.Close()
return nil, base.HTTPErrorf(resp.StatusCode, http.StatusText(resp.StatusCode))
}
rc = resp.Body
} else if base.FileExists(path) {
base.Infof(base.KeyAll, messageFormat, path)
rc, err = os.Open(path)
if err != nil {
return nil, err
}
} else {
return nil, ErrPathNotFound
}
return rc, nil
}
func (dbConfig *DbConfig) AutoImportEnabled() (bool, error) {
if dbConfig.AutoImport == nil {
return base.DefaultAutoImport, nil
}
if b, ok := dbConfig.AutoImport.(bool); ok {
return b, nil
}
str, ok := dbConfig.AutoImport.(string)
if ok && str == "continuous" {
base.Warnf(`Using deprecated config value for "import_docs": "continuous". Use "import_docs": true instead.`)
return true, nil
}
return false, fmt.Errorf("Unrecognized value for import_docs: %#v. Valid values are true and false.", dbConfig.AutoImport)
}
func (dbConfig *DbConfig) validate() error {
return dbConfig.validateVersion(base.IsEnterpriseEdition())
}
func (dbConfig *DbConfig) validateVersion(isEnterpriseEdition bool) (errorMessages error) {
// Make sure a non-zero compact_interval_days config is within the valid range
if val := dbConfig.CompactIntervalDays; val != nil && *val != 0 &&
(*val < db.CompactIntervalMinDays || *val > db.CompactIntervalMaxDays) {
errorMessages = multierror.Append(errorMessages, fmt.Errorf(rangeValueErrorMsg, "compact_interval_days",
fmt.Sprintf("%g-%g", db.CompactIntervalMinDays, db.CompactIntervalMaxDays)))
}
if dbConfig.CacheConfig != nil {
if dbConfig.CacheConfig.ChannelCacheConfig != nil {
// EE: channel cache
if !isEnterpriseEdition {
if val := dbConfig.CacheConfig.ChannelCacheConfig.MaxNumber; val != nil {
base.Warnf(eeOnlyWarningMsg, "cache.channel_cache.max_number", *val, db.DefaultChannelCacheMaxNumber)
dbConfig.CacheConfig.ChannelCacheConfig.MaxNumber = nil
}
if val := dbConfig.CacheConfig.ChannelCacheConfig.HighWatermarkPercent; val != nil {
base.Warnf(eeOnlyWarningMsg, "cache.channel_cache.compact_high_watermark_pct", *val, db.DefaultCompactHighWatermarkPercent)
dbConfig.CacheConfig.ChannelCacheConfig.HighWatermarkPercent = nil
}
if val := dbConfig.CacheConfig.ChannelCacheConfig.LowWatermarkPercent; val != nil {
base.Warnf(eeOnlyWarningMsg, "cache.channel_cache.compact_low_watermark_pct", *val, db.DefaultCompactLowWatermarkPercent)
dbConfig.CacheConfig.ChannelCacheConfig.LowWatermarkPercent = nil
}
}
if dbConfig.CacheConfig.ChannelCacheConfig.MaxNumPending != nil && *dbConfig.CacheConfig.ChannelCacheConfig.MaxNumPending < 1 {
errorMessages = multierror.Append(errorMessages, fmt.Errorf(minValueErrorMsg, "cache.channel_cache.max_num_pending", 1))
}
if dbConfig.CacheConfig.ChannelCacheConfig.MaxWaitPending != nil && *dbConfig.CacheConfig.ChannelCacheConfig.MaxWaitPending < 1 {
errorMessages = multierror.Append(errorMessages, fmt.Errorf(minValueErrorMsg, "cache.channel_cache.max_wait_pending", 1))
}
if dbConfig.CacheConfig.ChannelCacheConfig.MaxWaitSkipped != nil && *dbConfig.CacheConfig.ChannelCacheConfig.MaxWaitSkipped < 1 {
errorMessages = multierror.Append(errorMessages, fmt.Errorf(minValueErrorMsg, "cache.channel_cache.max_wait_skipped", 1))
}
if dbConfig.CacheConfig.ChannelCacheConfig.MaxLength != nil && *dbConfig.CacheConfig.ChannelCacheConfig.MaxLength < 1 {
errorMessages = multierror.Append(errorMessages, fmt.Errorf(minValueErrorMsg, "cache.channel_cache.max_length", 1))
}
if dbConfig.CacheConfig.ChannelCacheConfig.MinLength != nil && *dbConfig.CacheConfig.ChannelCacheConfig.MinLength < 1 {
errorMessages = multierror.Append(errorMessages, fmt.Errorf(minValueErrorMsg, "cache.channel_cache.min_length", 1))
}
if dbConfig.CacheConfig.ChannelCacheConfig.ExpirySeconds != nil && *dbConfig.CacheConfig.ChannelCacheConfig.ExpirySeconds < 1 {
errorMessages = multierror.Append(errorMessages, fmt.Errorf(minValueErrorMsg, "cache.channel_cache.expiry_seconds", 1))
}
if dbConfig.CacheConfig.ChannelCacheConfig.MaxNumber != nil && *dbConfig.CacheConfig.ChannelCacheConfig.MaxNumber < db.MinimumChannelCacheMaxNumber {
errorMessages = multierror.Append(errorMessages, fmt.Errorf(minValueErrorMsg, "cache.channel_cache.max_number", db.MinimumChannelCacheMaxNumber))
}
// Compact watermark validation
hwm := db.DefaultCompactHighWatermarkPercent
lwm := db.DefaultCompactLowWatermarkPercent
if dbConfig.CacheConfig.ChannelCacheConfig.HighWatermarkPercent != nil {
if *dbConfig.CacheConfig.ChannelCacheConfig.HighWatermarkPercent < 1 || *dbConfig.CacheConfig.ChannelCacheConfig.HighWatermarkPercent > 100 {
errorMessages = multierror.Append(errorMessages, fmt.Errorf(rangeValueErrorMsg, "cache.channel_cache.compact_high_watermark_pct", "0-100"))
}
hwm = *dbConfig.CacheConfig.ChannelCacheConfig.HighWatermarkPercent
}
if dbConfig.CacheConfig.ChannelCacheConfig.LowWatermarkPercent != nil {
if *dbConfig.CacheConfig.ChannelCacheConfig.LowWatermarkPercent < 1 || *dbConfig.CacheConfig.ChannelCacheConfig.LowWatermarkPercent > 100 {
errorMessages = multierror.Append(errorMessages, fmt.Errorf(rangeValueErrorMsg, "cache.channel_cache.compact_low_watermark_pct", "0-100"))
}
lwm = *dbConfig.CacheConfig.ChannelCacheConfig.LowWatermarkPercent
}
if lwm >= hwm {
errorMessages = multierror.Append(errorMessages, fmt.Errorf("cache.channel_cache.compact_high_watermark_pct (%v) must be greater than cache.channel_cache.compact_low_watermark_pct (%v)", hwm, lwm))
}
}
if dbConfig.CacheConfig.RevCacheConfig != nil {
// EE: disable revcache
revCacheSize := dbConfig.CacheConfig.RevCacheConfig.Size
if !isEnterpriseEdition && revCacheSize != nil && *revCacheSize == 0 {
base.Warnf(eeOnlyWarningMsg, "cache.rev_cache.size", *revCacheSize, db.DefaultRevisionCacheSize)
dbConfig.CacheConfig.RevCacheConfig.Size = nil
}
if dbConfig.CacheConfig.RevCacheConfig.ShardCount != nil {
if *dbConfig.CacheConfig.RevCacheConfig.ShardCount < 1 {
errorMessages = multierror.Append(errorMessages, fmt.Errorf(minValueErrorMsg, "cache.rev_cache.shard_count", 1))
}
}
}
}
// EE: delta sync
if !isEnterpriseEdition && dbConfig.DeltaSync != nil && dbConfig.DeltaSync.Enabled != nil {
base.Warnf(eeOnlyWarningMsg, "delta_sync.enabled", *dbConfig.DeltaSync.Enabled, false)
dbConfig.DeltaSync.Enabled = nil
}
// Import validation
autoImportEnabled, err := dbConfig.AutoImportEnabled()
if err != nil {
errorMessages = multierror.Append(errorMessages, err)
}
if dbConfig.FeedType == base.TapFeedType && autoImportEnabled == true {
errorMessages = multierror.Append(errorMessages, fmt.Errorf("Invalid configuration for Sync Gw. TAP feed type can not be used with auto-import"))
}
if dbConfig.AutoImport != nil && autoImportEnabled && !dbConfig.UseXattrs() {
errorMessages = multierror.Append(errorMessages, fmt.Errorf("Invalid configuration - import_docs enabled, but enable_shared_bucket_access not enabled"))
}
if dbConfig.ImportPartitions != nil {
if !isEnterpriseEdition {
base.Warnf(eeOnlyWarningMsg, "import_partitions", *dbConfig.ImportPartitions, nil)
dbConfig.ImportPartitions = nil
} else if !dbConfig.UseXattrs() {
errorMessages = multierror.Append(errorMessages, fmt.Errorf("Invalid configuration - import_partitions set, but enable_shared_bucket_access not enabled"))
} else if !autoImportEnabled {
errorMessages = multierror.Append(errorMessages, fmt.Errorf("Invalid configuration - import_partitions set, but import_docs disabled"))
} else if *dbConfig.ImportPartitions < 1 || *dbConfig.ImportPartitions > 1024 {
errorMessages = multierror.Append(errorMessages, fmt.Errorf(rangeValueErrorMsg, "import_partitions", "1-1024"))
}
}
if dbConfig.DeprecatedPool != nil {
base.Warnf(`"pool" config option is not supported. The pool will be set to "default". The option should be removed from config file.`)
}
return errorMessages
}
func (dbConfig *DbConfig) validateSgDbConfig() (errorMessages error) {
if err := dbConfig.validate(); err != nil {
errorMessages = multierror.Append(errorMessages, err)
}
return errorMessages
}
// Checks for deprecated cache config options and if they are set it will return a warning. If the old one is set and
// the new one is not set it will set the new to the old value. If they are both set it will still give the warning but
// will choose the new value.
func (dbConfig *DbConfig) deprecatedConfigCacheFallback() (warnings []string) {
warningMsgFmt := "Using deprecated config option: %q. Use %q instead."
if dbConfig.CacheConfig == nil {
dbConfig.CacheConfig = &CacheConfig{}
}
if dbConfig.CacheConfig.RevCacheConfig == nil {
dbConfig.CacheConfig.RevCacheConfig = &RevCacheConfig{}
}
if dbConfig.CacheConfig.ChannelCacheConfig == nil {
dbConfig.CacheConfig.ChannelCacheConfig = &ChannelCacheConfig{}
}
if dbConfig.DeprecatedRevCacheSize != nil {
if dbConfig.CacheConfig.RevCacheConfig.Size == nil {
dbConfig.CacheConfig.RevCacheConfig.Size = dbConfig.DeprecatedRevCacheSize
}
warnings = append(warnings, fmt.Sprintf(warningMsgFmt, "rev_cache_size", "cache.rev_cache.size"))
}
if dbConfig.CacheConfig.DeprecatedCachePendingSeqMaxWait != nil {
if dbConfig.CacheConfig.ChannelCacheConfig.MaxWaitPending == nil {
dbConfig.CacheConfig.ChannelCacheConfig.MaxWaitPending = dbConfig.CacheConfig.DeprecatedCachePendingSeqMaxWait
}
warnings = append(warnings, fmt.Sprintf(warningMsgFmt, "max_wait_pending", "cache.channel_cache.max_wait_pending"))
}
if dbConfig.CacheConfig.DeprecatedCachePendingSeqMaxNum != nil {
if dbConfig.CacheConfig.ChannelCacheConfig.MaxNumPending == nil {
dbConfig.CacheConfig.ChannelCacheConfig.MaxNumPending = dbConfig.CacheConfig.DeprecatedCachePendingSeqMaxNum
}
warnings = append(warnings, fmt.Sprintf(warningMsgFmt, "max_num_pending", "cache.channel_cache.max_num_pending"))
}
if dbConfig.CacheConfig.DeprecatedCacheSkippedSeqMaxWait != nil {
if dbConfig.CacheConfig.ChannelCacheConfig.MaxWaitSkipped == nil {
dbConfig.CacheConfig.ChannelCacheConfig.MaxWaitSkipped = dbConfig.CacheConfig.DeprecatedCacheSkippedSeqMaxWait
}
warnings = append(warnings, fmt.Sprintf(warningMsgFmt, "max_wait_skipped", "cache.channel_cache.max_wait_skipped"))
}
if dbConfig.CacheConfig.DeprecatedEnableStarChannel != nil {
if dbConfig.CacheConfig.ChannelCacheConfig.EnableStarChannel == nil {
dbConfig.CacheConfig.ChannelCacheConfig.EnableStarChannel = dbConfig.CacheConfig.DeprecatedEnableStarChannel
}
warnings = append(warnings, fmt.Sprintf(warningMsgFmt, "enable_star_channel", "cache.channel_cache.enable_star_channel"))
}
if dbConfig.CacheConfig.DeprecatedChannelCacheMaxLength != nil {
if dbConfig.CacheConfig.ChannelCacheConfig.MaxLength == nil {
dbConfig.CacheConfig.ChannelCacheConfig.MaxLength = dbConfig.CacheConfig.DeprecatedChannelCacheMaxLength
}
warnings = append(warnings, fmt.Sprintf(warningMsgFmt, "channel_cache_max_length", "cache.channel_cache.max_length"))
}
if dbConfig.CacheConfig.DeprecatedChannelCacheMinLength != nil {
if dbConfig.CacheConfig.ChannelCacheConfig.MinLength == nil {
dbConfig.CacheConfig.ChannelCacheConfig.MinLength = dbConfig.CacheConfig.DeprecatedChannelCacheMinLength
}
warnings = append(warnings, fmt.Sprintf(warningMsgFmt, "channel_cache_min_length", "cache.channel_cache.min_length"))
}
if dbConfig.CacheConfig.DeprecatedChannelCacheAge != nil {
if dbConfig.CacheConfig.ChannelCacheConfig.ExpirySeconds == nil {
dbConfig.CacheConfig.ChannelCacheConfig.ExpirySeconds = dbConfig.CacheConfig.DeprecatedChannelCacheAge
}
warnings = append(warnings, fmt.Sprintf(warningMsgFmt, "channel_cache_expiry", "cache.channel_cache.expiry_seconds"))
}
return warnings
}
// Implementation of AuthHandler interface for DbConfig
func (dbConfig *DbConfig) GetCredentials() (string, string, string) {
return base.TransformBucketCredentials(dbConfig.Username, dbConfig.Password, *dbConfig.Bucket)
}
func (dbConfig *DbConfig) ConflictsAllowed() *bool {
if dbConfig.AllowConflicts != nil {
return dbConfig.AllowConflicts
}
return base.BoolPtr(base.DefaultAllowConflicts)
}
func (dbConfig *DbConfig) UseXattrs() bool {
if dbConfig.EnableXattrs != nil {
return *dbConfig.EnableXattrs
}
return base.DefaultUseXattrs
}
func (dbConfig *DbConfig) Redacted() (*DbConfig, error) {
var config DbConfig
err := base.DeepCopyInefficient(&config, dbConfig)
if err != nil {
return nil, err
}
config.Password = "xxxxx"
for i := range config.Users {
config.Users[i].Password = base.StringPtr("xxxxx")
}
for i, _ := range config.Replications {
config.Replications[i] = config.Replications[i].Redacted()
}
return &config, nil
}
// Implementation of AuthHandler interface for ClusterConfig
func (clusterConfig *ClusterConfig) GetCredentials() (string, string, string) {
return base.TransformBucketCredentials(clusterConfig.Username, clusterConfig.Password, *clusterConfig.Bucket)
}
// LoadServerConfig loads a ServerConfig from either a JSON file or from a URL
func LoadServerConfig(path string) (config *ServerConfig, err error) {
rc, err := readFromPath(path, false)
if err != nil {
return nil, err
}
defer func() { _ = rc.Close() }()
return readServerConfig(rc)
}
// readServerConfig returns a validated ServerConfig from an io.Reader
func readServerConfig(r io.Reader) (config *ServerConfig, err error) {
err = decodeAndSanitiseConfig(r, &config)
return config, err
}
// decodeAndSanitiseConfig will sanitise a ServerConfig or dbConfig from an io.Reader and unmarshal it into the given config parameter.
func decodeAndSanitiseConfig(r io.Reader, config interface{}) (err error) {
b, err := ioutil.ReadAll(r)
if err != nil {
return err
}
// Expand environment variables.
b, err = expandEnv(b)
if err != nil {
return err
}
b = base.ConvertBackQuotedStrings(b)
d := base.JSONDecoder(bytes.NewBuffer(b))
d.DisallowUnknownFields()
err = d.Decode(config)
return base.WrapJSONUnknownFieldErr(err)
}
func (config *ServerConfig) setupAndValidateDatabases() (errs error) {
if config == nil {
return nil
}
for name, dbConfig := range config.Databases {
if err := dbConfig.setup(name); err != nil {
return err
}
if errs = dbConfig.validateSgDbConfig(); errs != nil {
return errs
}
}
return nil
}
// expandEnv replaces $var or ${var} in config according to the values of the
// current environment variables. The replacement is case-sensitive. References
// to undefined variables will result in an error. A default value can
// be given by using the form ${var:-default value}.
func expandEnv(config []byte) (value []byte, errs error) {
return []byte(os.Expand(string(config), func(key string) string {
if key == "$" {
base.Debugf(base.KeyAll, "Skipping environment variable expansion: %s", key)
return key
}
val, err := envDefaultExpansion(key, os.Getenv)
if err != nil {
errs = multierror.Append(errs, err)
}
return val
})), errs
}
// ErrEnvVarUndefined is returned when a specified variable can’t be resolved from
// the system environment and no default value is supplied in the configuration.
type ErrEnvVarUndefined struct {
key string // Environment variable identifier.
}
func (e ErrEnvVarUndefined) Error() string {
return fmt.Sprintf("undefined environment variable '${%s}' is specified in the config without default value", e.key)
}
// envDefaultExpansion implements the ${foo:-bar} parameter expansion from
// https://pubs.opengroup.org/onlinepubs/009695399/utilities/xcu_chap02.html#tag_02_06_02
func envDefaultExpansion(key string, getEnvFn func(string) string) (value string, err error) {
kvPair := strings.SplitN(key, ":-", 2)
key = kvPair[0]
value = getEnvFn(key)
if value == "" && len(kvPair) == 2 {
// Set value to the default.
value = kvPair[1]
base.Debugf(base.KeyAll, "Replacing config environment variable '${%s}' with "+
"default value specified", key)
} else if value == "" && len(kvPair) != 2 {
return "", ErrEnvVarUndefined{key: key}
} else {
base.Debugf(base.KeyAll, "Replacing config environment variable '${%s}'", key)
}
return value, nil
}
// validate validates the given server config and returns all invalid options as a slice of errors
func (config *ServerConfig) validate() (errorMessages error) {
if config.Unsupported != nil && config.Unsupported.StatsLogFrequencySecs != nil {
if *config.Unsupported.StatsLogFrequencySecs == 0 {
// explicitly disabled
} else if *config.Unsupported.StatsLogFrequencySecs < 10 {
errorMessages = multierror.Append(errorMessages, fmt.Errorf(minValueErrorMsg,
"unsupported.stats_log_freq_secs", 10))
}
}
return errorMessages
}
// setupAndValidateLogging sets up and validates logging,
// and returns a slice of deferred logs to execute later.
func (config *ServerConfig) SetupAndValidateLogging() (err error) {
if config.Logging == nil {
config.Logging = &base.LoggingConfig{}
}
// populate values from deprecated logging config options if not set
config.deprecatedConfigLoggingFallback()
base.SetRedaction(config.Logging.RedactionLevel)
err = config.Logging.Init(defaultLogFilePath)
if err != nil {
return err
}
if config.Logging.DeprecatedDefaultLog == nil {
config.Logging.DeprecatedDefaultLog = &base.LogAppenderConfig{}
}
return nil
}
// deprecatedConfigLoggingFallback will parse the ServerConfig and try to
// use older logging config options for backwards compatibility.
// It will return a slice of deferred warnings to log at a later time.
func (config *ServerConfig) deprecatedConfigLoggingFallback() {
warningMsgFmt := "Using deprecated config option: %q. Use %q instead."
if config.Logging.DeprecatedDefaultLog != nil {
// Fall back to the old logging.["default"].LogFilePath option
if config.Logging.LogFilePath == "" && config.Logging.DeprecatedDefaultLog.LogFilePath != nil {
base.Warnf(warningMsgFmt, `logging.["default"].LogFilePath`, "logging.log_file_path")
// Set the new LogFilePath to be the directory containing the old logfile, instead of the full path.
// SGCollect relies on this path to pick up the standard and rotated log files.
info, err := os.Stat(*config.Logging.DeprecatedDefaultLog.LogFilePath)
if err == nil && info.IsDir() {
config.Logging.LogFilePath = *config.Logging.DeprecatedDefaultLog.LogFilePath
} else {
config.Logging.LogFilePath = filepath.Dir(*config.Logging.DeprecatedDefaultLog.LogFilePath)
base.Infof(base.KeyAll, "Using %v as log file path (parent directory of deprecated logging."+
"[\"default\"].LogFilePath)", config.Logging.LogFilePath)
}
}
// Fall back to the old logging.["default"].LogKeys option
if len(config.Logging.Console.LogKeys) == 0 && len(config.Logging.DeprecatedDefaultLog.LogKeys) > 0 {
base.Warnf(warningMsgFmt, `logging.["default"].LogKeys`, "logging.console.log_keys")
config.Logging.Console.LogKeys = config.Logging.DeprecatedDefaultLog.LogKeys
}
// Fall back to the old logging.["default"].LogLevel option
if config.Logging.Console.LogLevel == nil && config.Logging.DeprecatedDefaultLog.LogLevel != 0 {
base.Warnf(warningMsgFmt, `logging.["default"].LogLevel`, "logging.console.log_level")
config.Logging.Console.LogLevel = base.ToLogLevel(config.Logging.DeprecatedDefaultLog.LogLevel)
}
}
// Fall back to the old LogFilePath option
if config.Logging.LogFilePath == "" && config.DeprecatedLogFilePath != nil {
base.Warnf(warningMsgFmt, "logFilePath", "logging.log_file_path")
config.Logging.LogFilePath = *config.DeprecatedLogFilePath
}
// Fall back to the old Log option
if config.Logging.Console.LogKeys == nil && len(config.DeprecatedLog) > 0 {
base.Warnf(warningMsgFmt, "log", "logging.console.log_keys")
config.Logging.Console.LogKeys = config.DeprecatedLog
}
}
func (self *ServerConfig) MergeWith(other *ServerConfig) error {
if self.Interface == nil {
self.Interface = other.Interface
}
if self.AdminInterface == nil {
self.AdminInterface = other.AdminInterface
}
if self.ProfileInterface == nil {
self.ProfileInterface = other.ProfileInterface
}
if self.ConfigServer == nil {
self.ConfigServer = other.ConfigServer
}
if self.DeploymentID == nil {
self.DeploymentID = other.DeploymentID
}
if self.Facebook == nil {
self.Facebook = other.Facebook
}
if self.CORS == nil {
self.CORS = other.CORS
}
for _, flag := range other.DeprecatedLog {
self.DeprecatedLog = append(self.DeprecatedLog, flag)
}
if self.Logging == nil {
self.Logging = other.Logging
}
if other.Pretty {
self.Pretty = true
}
for name, db := range other.Databases {
if self.Databases[name] != nil {
return base.RedactErrorf("Database %q already specified earlier", base.UD(name))
}
if self.Databases == nil {
self.Databases = make(DbConfigMap)
}
self.Databases[name] = db
}
return nil
}
func (sc *ServerConfig) Redacted() (*ServerConfig, error) {
var config ServerConfig
err := base.DeepCopyInefficient(&config, sc)
if err != nil {
return nil, err
}
for i := range config.Databases {
config.Databases[i], err = config.Databases[i].Redacted()
if err != nil {
return nil, err
}
}
return &config, nil
}
// Reads the command line flags and the optional config file.
func ParseCommandLine(args []string, handling flag.ErrorHandling) (*ServerConfig, error) {
flagSet := flag.NewFlagSet(args[0], handling)
// TODO: Change default to false when we're ready to enable 3.0/bootstrap/persistent config by default (once QE's existing tests are ready to handle it)
// TODO: Move to scoped variable when we have 2 code paths from ServerMain for 3.0 and legacy handling.
disablePersistentConfigFlag := flagSet.Bool("disable_persistent_config", true, "If set, disables persistent config and reads all configuration from a legacy config file.")
addr := flagSet.String("interface", DefaultInterface, "Address to bind to")
authAddr := flagSet.String("adminInterface", DefaultAdminInterface, "Address to bind admin interface to")
profAddr := flagSet.String("profileInterface", "", "Address to bind profile interface to")
configServer := flagSet.String("configServer", "", "URL of server that can return database configs")
deploymentID := flagSet.String("deploymentID", "", "Customer/project identifier for stats reporting")
couchbaseURL := flagSet.String("url", DefaultServer, "Address of Couchbase server")
dbName := flagSet.String("dbname", "", "Name of Couchbase Server database (defaults to name of bucket)")
pretty := flagSet.Bool("pretty", false, "Pretty-print JSON responses")
verbose := flagSet.Bool("verbose", false, "Log more info about requests")
logKeys := flagSet.String("log", "", "Log keys, comma separated")
logFilePath := flagSet.String("logFilePath", "", "Path to log files")
certpath := flagSet.String("certpath", "", "Client certificate path")
cacertpath := flagSet.String("cacertpath", "", "Root CA certificate path")
keypath := flagSet.String("keypath", "", "Client certificate key path")
// used by service scripts as a way to specify a per-distro defaultLogFilePath
defaultLogFilePathFlag := flagSet.String("defaultLogFilePath", "", "Path to log files, if not overridden by --logFilePath, or the config")
_ = flagSet.Parse(args[1:])
var config *ServerConfig
var err error
if defaultLogFilePathFlag != nil {
defaultLogFilePath = *defaultLogFilePathFlag
}
if disablePersistentConfigFlag != nil {
disablePersistentConfig = *disablePersistentConfigFlag
if disablePersistentConfig {
base.Warnf("Running in legacy config mode (disable_persistent_config=true)")
} else {
base.Infof(base.KeyAll, "Running in persistent config mode")
}
}
if flagSet.NArg() > 0 {
// Read the configuration file(s), if any:
for _, filename := range flagSet.Args() {
newConfig, newConfigErr := LoadServerConfig(filename)
if pkgerrors.Cause(newConfigErr) == base.ErrUnknownField {
// Delay returning this error so we can continue with other setup
err = pkgerrors.WithMessage(newConfigErr, fmt.Sprintf("Error reading config file %s", filename))
} else if newConfigErr != nil {
return config, pkgerrors.WithMessage(newConfigErr, fmt.Sprintf("Error reading config file %s", filename))
}
if config == nil {
config = newConfig
} else {
if err := config.MergeWith(newConfig); err != nil {
return config, pkgerrors.WithMessage(err, fmt.Sprintf("Error reading config file %s", filename))
}
}
}
// Override the config file with global settings from command line flags:
if *addr != DefaultInterface {
config.Interface = addr
}
if *authAddr != DefaultAdminInterface {
config.AdminInterface = authAddr
}
if *profAddr != "" {
config.ProfileInterface = profAddr
}
if *configServer != "" {
config.ConfigServer = configServer
}
if *deploymentID != "" {
config.DeploymentID = deploymentID
}
if *pretty {
config.Pretty = *pretty
}
// If the interfaces were not specified in either the config file or
// on the command line, set them to the default values
if config.Interface == nil {
config.Interface = &DefaultInterface
}
if config.AdminInterface == nil {
config.AdminInterface = &DefaultAdminInterface
}
if *logFilePath != "" {
config.Logging.LogFilePath = *logFilePath
}
if *logKeys != "" {
config.Logging.Console.LogKeys = strings.Split(*logKeys, ",")
}
// Log HTTP Responses if verbose is enabled.
if verbose != nil && *verbose {
config.Logging.Console.LogKeys = append(config.Logging.Console.LogKeys, "HTTP+")
}
} else {
// If no config file is given, create a default config, filled in from command line flags:
var defaultBucketName = "sync_gateway"
if *dbName == "" {
*dbName = defaultBucketName
}
// At this point the addr is either:
// - A value provided by the user, in which case we want to leave it as is
// - The default value (":4984"), which is actually _not_ the default value we
// want for this case, since we are enabling insecure mode. We want "localhost:4984" instead.
// See #708 for more details
if *addr == DefaultInterface {
*addr = "localhost:4984"
}
config = &ServerConfig{
Interface: addr,
AdminInterface: authAddr,
ProfileInterface: profAddr,
Pretty: *pretty,
ConfigServer: configServer,
Logging: &base.LoggingConfig{
Console: base.ConsoleLoggerConfig{
// Enable the logger only when log keys have explicitly been set on the command line
FileLoggerConfig: base.FileLoggerConfig{Enabled: base.BoolPtr(*logKeys != "")},
LogKeys: strings.Split(*logKeys, ","),
},
LogFilePath: *logFilePath,
},
Databases: map[string]*DbConfig{
*dbName: {
Name: *dbName,
BucketConfig: BucketConfig{
Server: couchbaseURL,
Bucket: &defaultBucketName,
CertPath: *certpath,
CACertPath: *cacertpath,
KeyPath: *keypath,
},
Users: map[string]*db.PrincipalConfig{
base.GuestUsername: {
Disabled: false,
ExplicitChannels: base.SetFromArray([]string{"*"}),
},
},
},
},
}
}
if config.MetricsInterface == nil {
config.MetricsInterface = &DefaultMetricsInterface
}
return config, err
}
func SetMaxFileDescriptors(maxP *uint64) error {
maxFDs := DefaultMaxFileDescriptors
if maxP != nil {
maxFDs = *maxP
}
_, err := base.SetMaxFileDescriptors(maxFDs)
if err != nil {
base.Errorf("Error setting MaxFileDescriptors to %d: %v", maxFDs, err)
return err
}
return nil
}
func (config *ServerConfig) Serve(addr string, handler http.Handler) {
maxConns := DefaultMaxIncomingConnections
if config.MaxIncomingConnections != nil {
maxConns = *config.MaxIncomingConnections
}
http2Enabled := false
if config.Unsupported != nil && config.Unsupported.Http2Config != nil {
http2Enabled = *config.Unsupported.Http2Config.Enabled
}
tlsMinVersion := GetTLSVersionFromString(config.TLSMinVersion)
err := base.ListenAndServeHTTP(
addr,
maxConns,
config.SSLCert,
config.SSLKey,
handler,
config.ServerReadTimeout,
config.ServerWriteTimeout,
config.ReadHeaderTimeout,
config.IdleTimeout,
http2Enabled,
tlsMinVersion,
)
if err != nil {
base.Fatalf("Failed to start HTTP server on %s: %v", base.UD(addr), err)
}
}
// ServerContext creates a new ServerContext given its configuration and performs the context validation.
func setupServerContext(config *ServerConfig) (*ServerContext, error) {
PrettyPrint = config.Pretty
base.Infof(base.KeyAll, "Logging: Console level: %v", base.ConsoleLogLevel())
base.Infof(base.KeyAll, "Logging: Console keys: %v", base.ConsoleLogKey().EnabledLogKeys())
base.Infof(base.KeyAll, "Logging: Redaction level: %s", config.Logging.RedactionLevel)
if os.Getenv("GOMAXPROCS") == "" && runtime.GOMAXPROCS(0) == 1 {
cpus := runtime.NumCPU()
if cpus > 1 {
runtime.GOMAXPROCS(cpus)
base.Infof(base.KeyAll, "Configured Go to use all %d CPUs; setenv GOMAXPROCS to override this", cpus)
}
}
_ = SetMaxFileDescriptors(config.MaxFileDescriptors)
// Use the stdlib JSON package, if configured to do so
if config.Unsupported != nil && config.Unsupported.UseStdlibJSON != nil && *config.Unsupported.UseStdlibJSON {
base.Infof(base.KeyAll, "Using the stdlib JSON package")
base.UseStdlibJSON = true
}
// Set global bcrypt cost if configured
if config.BcryptCost > 0 {
if err := auth.SetBcryptCost(config.BcryptCost); err != nil {
return nil, fmt.Errorf("configuration error: %v", err)
}
}
sc := NewServerContext(config)
for _, dbConfig := range config.Databases {
if _, err := sc.AddDatabaseFromConfig(dbConfig); err != nil {
return nil, fmt.Errorf("error opening database %s: %v", base.MD(dbConfig.Name), err)
}
}
_ = validateServerContext(sc)
return sc, nil
}
// startServer starts and runs the server with the given configuration. (This function never returns.)
func startServer(config *ServerConfig, sc *ServerContext) {
if config.ProfileInterface != nil {
//runtime.MemProfileRate = 10 * 1024
base.Infof(base.KeyAll, "Starting profile server on %s", base.UD(*config.ProfileInterface))
go func() {
_ = http.ListenAndServe(*config.ProfileInterface, nil)
}()
}
go sc.PostStartup()
base.Consolef(base.LevelInfo, base.KeyAll, "Starting metrics server on %s", *config.MetricsInterface)
go config.Serve(*config.MetricsInterface, CreateMetricHandler(sc))
base.Consolef(base.LevelInfo, base.KeyAll, "Starting admin server on %s", *config.AdminInterface)
go config.Serve(*config.AdminInterface, CreateAdminHandler(sc))
base.Consolef(base.LevelInfo, base.KeyAll, "Starting server on %s ...", *config.Interface)
config.Serve(*config.Interface, CreatePublicHandler(sc))
}
func validateServerContext(sc *ServerContext) (errors error) {
bucketUUIDToDBContext := make(map[string][]*db.DatabaseContext, len(sc.databases_))
for _, dbContext := range sc.databases_ {
if uuid, err := dbContext.Bucket.UUID(); err == nil {
bucketUUIDToDBContext[uuid] = append(bucketUUIDToDBContext[uuid], dbContext)
}
}
sharedBuckets := sharedBuckets(bucketUUIDToDBContext)
for _, sharedBucket := range sharedBuckets {
sharedBucketError := &SharedBucketError{sharedBucket}
errors = multierror.Append(errors, sharedBucketError)
messageFormat := "Bucket %q is shared among databases %s. " +
"This may result in unexpected behaviour if security is not defined consistently."
base.Warnf(messageFormat, base.MD(sharedBucket.bucketName), base.MD(sharedBucket.dbNames))
}
return errors
}
type sharedBucket struct {
bucketName string
dbNames []string
}
type SharedBucketError struct {
sharedBucket sharedBucket
}
func (e *SharedBucketError) Error() string {
messageFormat := "Bucket %q is shared among databases %v. " +
"This may result in unexpected behaviour if security is not defined consistently."
return fmt.Sprintf(messageFormat, e.sharedBucket.bucketName, e.sharedBucket.dbNames)
}
func (e *SharedBucketError) GetSharedBucket() sharedBucket {
return e.sharedBucket
}
// Returns a list of buckets that are being shared by multiple databases.
func sharedBuckets(dbContextMap map[string][]*db.DatabaseContext) (sharedBuckets []sharedBucket) {
for _, dbContexts := range dbContextMap {
if len(dbContexts) > 1 {
var dbNames []string
for _, dbContext := range dbContexts {
dbNames = append(dbNames, dbContext.Name)
}
sharedBuckets = append(sharedBuckets, sharedBucket{dbContexts[0].Bucket.GetName(), dbNames})
}
}
return sharedBuckets
}
func HandleSighup() {
for logger, err := range base.RotateLogfiles() {
if err != nil {
base.Warnf("Error rotating %v: %v", logger, err)
}
}
}
func RegisterSignalHandler() {
signalChannel := make(chan os.Signal, 1)
signal.Notify(signalChannel, syscall.SIGHUP, os.Interrupt, os.Kill)
go func() {
for sig := range signalChannel {
base.Infof(base.KeyAll, "Handling signal: %v", sig)
switch sig {
case syscall.SIGHUP:
HandleSighup()
case os.Interrupt, os.Kill:
// Ensure log buffers are flushed before exiting.
base.FlushLogBuffers()
os.Exit(130) // 130 == exit code 128 + 2 (interrupt)
}
}
}()
}
// setupServerConfig parses command-line flags, reads the optional configuration file,
// performs the config validation and database setup.
func setupServerConfig(args []string) (config *ServerConfig, err error) {
var unknownFieldsErr error
base.InitializeLoggers()
// We can log version here because for console we have initialized an early logger in init() and for file loggers we
// have the memory buffers.
base.LogSyncGatewayVersion()
config, err = ParseCommandLine(args, flag.ExitOnError)
if pkgerrors.Cause(err) == base.ErrUnknownField {
unknownFieldsErr = err
} else if err != nil {
return nil, fmt.Errorf(err.Error())
}
// Logging config will now have been loaded from command line
// or from a sync_gateway config file so we can validate the
// configuration and setup logging now
err = config.SetupAndValidateLogging()
if err != nil {
// If we didn't set up logging correctly, we *probably* can't log via normal means...
// as a best-effort, last-ditch attempt, we'll log to stderr as well.
log.Printf("[ERR] Error setting up logging: %v", err)
return nil, fmt.Errorf("error setting up logging: %v", err)
}
base.FlushLoggerBuffers()
// If we got an unknownFields error when reading the config
// log and exit now we've tried setting up the logging.
if unknownFieldsErr != nil {
return nil, fmt.Errorf(unknownFieldsErr.Error())
}
// Validation
var multiError *multierror.Error
multiError = multierror.Append(multiError, config.validate())
multiError = multierror.Append(multiError, config.setupAndValidateDatabases())
if multiError.ErrorOrNil() != nil {
base.Errorf("Error during config validation: %v", multiError)
return nil, fmt.Errorf("error(s) during config validation: %v", multiError)
}
return config, nil
}
// ServerMain is the main entry point of launching the Sync Gateway server; the main
// function directly calls this. It registers both signal and fatal panic handlers,
// does the initial setup and finally starts the server.
func ServerMain() {
RegisterSignalHandler()
defer base.FatalPanicHandler()
config, err := setupServerConfig(os.Args)
if err != nil {
base.Fatalf(err.Error())
}
ctx, err := setupServerContext(config)
if err != nil {
base.Fatalf(err.Error())
}
startServer(config, ctx)
}
| [
"\"GOMAXPROCS\""
]
| []
| [
"GOMAXPROCS"
]
| [] | ["GOMAXPROCS"] | go | 1 | 0 | |
sdk/go/azure/network/v20170301/getVirtualNetworkGatewayConnection.go | // *** WARNING: this file was generated by the Pulumi SDK Generator. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package v20170301
import (
"github.com/pulumi/pulumi/sdk/v2/go/pulumi"
)
func LookupVirtualNetworkGatewayConnection(ctx *pulumi.Context, args *LookupVirtualNetworkGatewayConnectionArgs, opts ...pulumi.InvokeOption) (*LookupVirtualNetworkGatewayConnectionResult, error) {
var rv LookupVirtualNetworkGatewayConnectionResult
err := ctx.Invoke("azure-nextgen:network/v20170301:getVirtualNetworkGatewayConnection", args, &rv, opts...)
if err != nil {
return nil, err
}
return &rv, nil
}
type LookupVirtualNetworkGatewayConnectionArgs struct {
// The name of the resource group.
ResourceGroupName string `pulumi:"resourceGroupName"`
// The name of the virtual network gateway connection.
VirtualNetworkGatewayConnectionName string `pulumi:"virtualNetworkGatewayConnectionName"`
}
// A common class for general resource information
type LookupVirtualNetworkGatewayConnectionResult struct {
// The authorizationKey.
AuthorizationKey *string `pulumi:"authorizationKey"`
// Virtual network Gateway connection status. Possible values are 'Unknown', 'Connecting', 'Connected' and 'NotConnected'.
ConnectionStatus string `pulumi:"connectionStatus"`
// Gateway connection type. Possible values are: 'IPsec','Vnet2Vnet','ExpressRoute', and 'VPNClient.
ConnectionType string `pulumi:"connectionType"`
// The egress bytes transferred in this connection.
EgressBytesTransferred int `pulumi:"egressBytesTransferred"`
// EnableBgp flag
EnableBgp *bool `pulumi:"enableBgp"`
// Gets a unique read-only string that changes whenever the resource is updated.
Etag *string `pulumi:"etag"`
// The ingress bytes transferred in this connection.
IngressBytesTransferred int `pulumi:"ingressBytesTransferred"`
// The IPSec Policies to be considered by this connection.
IpsecPolicies []IpsecPolicyResponse `pulumi:"ipsecPolicies"`
// A common class for general resource information
LocalNetworkGateway2 *LocalNetworkGatewayResponse `pulumi:"localNetworkGateway2"`
// Resource location.
Location *string `pulumi:"location"`
// Resource name.
Name string `pulumi:"name"`
// The reference to peerings resource.
Peer *SubResourceResponse `pulumi:"peer"`
// The provisioning state of the VirtualNetworkGatewayConnection resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
ProvisioningState string `pulumi:"provisioningState"`
// The resource GUID property of the VirtualNetworkGatewayConnection resource.
ResourceGuid *string `pulumi:"resourceGuid"`
// The routing weight.
RoutingWeight *int `pulumi:"routingWeight"`
// The IPSec shared key.
SharedKey *string `pulumi:"sharedKey"`
// Resource tags.
Tags map[string]string `pulumi:"tags"`
// Collection of all tunnels' connection health status.
TunnelConnectionStatus []TunnelConnectionHealthResponse `pulumi:"tunnelConnectionStatus"`
// Resource type.
Type string `pulumi:"type"`
// Enable policy-based traffic selectors.
UsePolicyBasedTrafficSelectors *bool `pulumi:"usePolicyBasedTrafficSelectors"`
// A common class for general resource information
VirtualNetworkGateway1 VirtualNetworkGatewayResponse `pulumi:"virtualNetworkGateway1"`
// A common class for general resource information
VirtualNetworkGateway2 *VirtualNetworkGatewayResponse `pulumi:"virtualNetworkGateway2"`
}
| []
| []
| []
| [] | [] | go | null | null | null |
flink-instrumentation/workspace/flink-1.4.1-instrumented/flink-1.4.1/flink-yarn-tests/src/test/java/org/apache/flink/yarn/YARNSessionCapacitySchedulerITCase.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.yarn;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.GlobalConfiguration;
import org.apache.flink.configuration.JobManagerOptions;
import org.apache.flink.configuration.ResourceManagerOptions;
import org.apache.flink.runtime.client.JobClient;
import org.apache.flink.runtime.taskexecutor.TaskManagerServices;
import org.apache.flink.runtime.webmonitor.WebMonitorUtils;
import org.apache.flink.test.testdata.WordCountData;
import org.apache.flink.test.util.TestBaseUtils;
import org.apache.flink.yarn.cli.FlinkYarnSessionCli;
import org.apache.flink.yarn.configuration.YarnConfigOptions;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.JsonNode;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.flink.shaded.jackson2.com.fasterxml.jackson.databind.node.ArrayNode;
import org.apache.commons.io.FileUtils;
import org.apache.commons.lang3.StringUtils;
import org.apache.hadoop.security.UserGroupInformation;
import org.apache.hadoop.yarn.api.protocolrecords.StopContainersRequest;
import org.apache.hadoop.yarn.api.records.ApplicationId;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.ContainerId;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.security.NMTokenIdentifier;
import org.apache.hadoop.yarn.server.nodemanager.NodeManager;
import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.ResourceScheduler;
import org.apache.hadoop.yarn.server.resourcemanager.scheduler.capacity.CapacityScheduler;
import org.apache.log4j.Level;
import org.junit.After;
import org.junit.Assert;
import org.junit.BeforeClass;
import org.junit.Test;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.io.File;
import java.io.FilenameFilter;
import java.io.IOException;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.EnumSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentMap;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import static org.apache.flink.yarn.UtilsTest.addTestAppender;
import static org.apache.flink.yarn.UtilsTest.checkForLogString;
/**
* This test starts a MiniYARNCluster with a CapacityScheduler.
* Is has, by default a queue called "default". The configuration here adds another queue: "qa-team".
*/
public class YARNSessionCapacitySchedulerITCase extends YarnTestBase {
private static final Logger LOG = LoggerFactory.getLogger(YARNSessionCapacitySchedulerITCase.class);
@BeforeClass
public static void setup() {
YARN_CONFIGURATION.setClass(YarnConfiguration.RM_SCHEDULER, CapacityScheduler.class, ResourceScheduler.class);
YARN_CONFIGURATION.set("yarn.scheduler.capacity.root.queues", "default,qa-team");
YARN_CONFIGURATION.setInt("yarn.scheduler.capacity.root.default.capacity", 40);
YARN_CONFIGURATION.setInt("yarn.scheduler.capacity.root.qa-team.capacity", 60);
YARN_CONFIGURATION.set(YarnTestBase.TEST_CLUSTER_NAME_KEY, "flink-yarn-tests-capacityscheduler");
startYARNWithConfig(YARN_CONFIGURATION);
}
/**
* Test regular operation, including command line parameter parsing.
*/
@Test
public void testClientStartup() {
LOG.info("Starting testClientStartup()");
runWithArgs(new String[]{"-j", flinkUberjar.getAbsolutePath(), "-t", flinkLibFolder.getAbsolutePath(),
"-n", "1",
"-jm", "768",
"-tm", "1024", "-qu", "qa-team"},
"Number of connected TaskManagers changed to 1. Slots available: 1", null, RunTypes.YARN_SESSION, 0);
LOG.info("Finished testClientStartup()");
}
/**
* Test per-job yarn cluster
*
* <p>This also tests the prefixed CliFrontend options for the YARN case
* We also test if the requested parallelism of 2 is passed through.
* The parallelism is requested at the YARN client (-ys).
*/
@Test
public void perJobYarnCluster() {
LOG.info("Starting perJobYarnCluster()");
addTestAppender(JobClient.class, Level.INFO);
File exampleJarLocation = new File("target/programs/BatchWordCount.jar");
Assert.assertNotNull("Could not find wordcount jar", exampleJarLocation);
runWithArgs(new String[]{"run", "-m", "yarn-cluster",
"-yj", flinkUberjar.getAbsolutePath(), "-yt", flinkLibFolder.getAbsolutePath(),
"-yn", "1",
"-ys", "2", //test that the job is executed with a DOP of 2
"-yjm", "768",
"-ytm", "1024", exampleJarLocation.getAbsolutePath()},
/* test succeeded after this string */
"Job execution complete",
/* prohibited strings: (to verify the parallelism) */
// (we should see "DataSink (...) (1/2)" and "DataSink (...) (2/2)" instead)
new String[]{"DataSink \\(.*\\) \\(1/1\\) switched to FINISHED"},
RunTypes.CLI_FRONTEND, 0, true);
LOG.info("Finished perJobYarnCluster()");
}
/**
* Test per-job yarn cluster and memory calculations for off-heap use (see FLINK-7400) with the
* same job as {@link #perJobYarnCluster()}.
*
* <p>This ensures that with (any) pre-allocated off-heap memory by us, there is some off-heap
* memory remaining for Flink's libraries. Creating task managers will thus fail if no off-heap
* memory remains.
*/
@Test
public void perJobYarnClusterOffHeap() {
LOG.info("Starting perJobYarnCluster()");
addTestAppender(JobClient.class, Level.INFO);
File exampleJarLocation = new File("target/programs/BatchWordCount.jar");
Assert.assertNotNull("Could not find wordcount jar", exampleJarLocation);
// set memory constraints (otherwise this is the same test as perJobYarnCluster() above)
final long taskManagerMemoryMB = 1024;
//noinspection NumericOverflow if the calculation of the total Java memory size overflows, default configuration parameters are wrong in the first place, so we can ignore this inspection
final long networkBuffersMB = TaskManagerServices
.calculateNetworkBufferMemory(
(taskManagerMemoryMB -
ResourceManagerOptions.CONTAINERIZED_HEAP_CUTOFF_MIN.defaultValue()) << 20,
new Configuration()) >> 20;
final long offHeapMemory = taskManagerMemoryMB
- ResourceManagerOptions.CONTAINERIZED_HEAP_CUTOFF_MIN.defaultValue()
// cutoff memory (will be added automatically)
- networkBuffersMB // amount of memory used for network buffers
- 100; // reserve something for the Java heap space
runWithArgs(new String[]{"run", "-m", "yarn-cluster",
"-yj", flinkUberjar.getAbsolutePath(), "-yt", flinkLibFolder.getAbsolutePath(),
"-yn", "1",
"-ys", "2", //test that the job is executed with a DOP of 2
"-yjm", "768",
"-ytm", String.valueOf(taskManagerMemoryMB),
"-yD", "taskmanager.memory.off-heap=true",
"-yD", "taskmanager.memory.size=" + offHeapMemory,
"-yD", "taskmanager.memory.preallocate=true", exampleJarLocation.getAbsolutePath()},
/* test succeeded after this string */
"Job execution complete",
/* prohibited strings: (to verify the parallelism) */
// (we should see "DataSink (...) (1/2)" and "DataSink (...) (2/2)" instead)
new String[]{"DataSink \\(.*\\) \\(1/1\\) switched to FINISHED"},
RunTypes.CLI_FRONTEND, 0, true);
LOG.info("Finished perJobYarnCluster()");
}
/**
* Test TaskManager failure and also if the vcores are set correctly (see issue FLINK-2213).
*/
@Test(timeout = 100000) // timeout after 100 seconds
public void testTaskManagerFailure() throws Exception {
LOG.info("Starting testTaskManagerFailure()");
Runner runner = startWithArgs(new String[]{"-j", flinkUberjar.getAbsolutePath(), "-t", flinkLibFolder.getAbsolutePath(),
"-n", "1",
"-jm", "768",
"-tm", "1024",
"-s", "3", // set the slots 3 to check if the vCores are set properly!
"-nm", "customName",
"-Dfancy-configuration-value=veryFancy",
"-Dyarn.maximum-failed-containers=3",
"-D" + YarnConfigOptions.VCORES.key() + "=2"},
"Number of connected TaskManagers changed to 1. Slots available: 3",
RunTypes.YARN_SESSION);
Assert.assertEquals(2, getRunningContainers());
// ------------------------ Test if JobManager web interface is accessible -------
final YarnClient yc = YarnClient.createYarnClient();
yc.init(YARN_CONFIGURATION);
yc.start();
List<ApplicationReport> apps = yc.getApplications(EnumSet.of(YarnApplicationState.RUNNING));
Assert.assertEquals(1, apps.size()); // Only one running
ApplicationReport app = apps.get(0);
Assert.assertEquals("customName", app.getName());
String url = app.getTrackingUrl();
if (!url.endsWith("/")) {
url += "/";
}
if (!url.startsWith("http://")) {
url = "http://" + url;
}
LOG.info("Got application URL from YARN {}", url);
String response = TestBaseUtils.getFromHTTP(url + "taskmanagers/");
JsonNode parsedTMs = new ObjectMapper().readTree(response);
ArrayNode taskManagers = (ArrayNode) parsedTMs.get("taskmanagers");
Assert.assertNotNull(taskManagers);
Assert.assertEquals(1, taskManagers.size());
Assert.assertEquals(3, taskManagers.get(0).get("slotsNumber").asInt());
// get the configuration from webinterface & check if the dynamic properties from YARN show up there.
String jsonConfig = TestBaseUtils.getFromHTTP(url + "jobmanager/config");
Map<String, String> parsedConfig = WebMonitorUtils.fromKeyValueJsonArray(jsonConfig);
Assert.assertEquals("veryFancy", parsedConfig.get("fancy-configuration-value"));
Assert.assertEquals("3", parsedConfig.get("yarn.maximum-failed-containers"));
Assert.assertEquals("2", parsedConfig.get(YarnConfigOptions.VCORES.key()));
// -------------- FLINK-1902: check if jobmanager hostname/port are shown in web interface
// first, get the hostname/port
String oC = outContent.toString();
Pattern p = Pattern.compile("Flink JobManager is now running on ([a-zA-Z0-9.-]+):([0-9]+)");
Matcher matches = p.matcher(oC);
String hostname = null;
String port = null;
while (matches.find()) {
hostname = matches.group(1).toLowerCase();
port = matches.group(2);
}
LOG.info("Extracted hostname:port: {} {}", hostname, port);
Assert.assertEquals("unable to find hostname in " + jsonConfig, hostname,
parsedConfig.get(JobManagerOptions.ADDRESS.key()));
Assert.assertEquals("unable to find port in " + jsonConfig, port,
parsedConfig.get(JobManagerOptions.PORT.key()));
// test logfile access
String logs = TestBaseUtils.getFromHTTP(url + "jobmanager/log");
Assert.assertTrue(logs.contains("Starting YARN ApplicationMaster"));
Assert.assertTrue(logs.contains("Starting JobManager"));
Assert.assertTrue(logs.contains("Starting JobManager Web Frontend"));
// ------------------------ Kill container with TaskManager and check if vcores are set correctly -------
// find container id of taskManager:
ContainerId taskManagerContainer = null;
NodeManager nodeManager = null;
UserGroupInformation remoteUgi = null;
NMTokenIdentifier nmIdent = null;
try {
remoteUgi = UserGroupInformation.getCurrentUser();
} catch (IOException e) {
LOG.warn("Unable to get curr user", e);
Assert.fail();
}
for (int nmId = 0; nmId < NUM_NODEMANAGERS; nmId++) {
NodeManager nm = yarnCluster.getNodeManager(nmId);
ConcurrentMap<ContainerId, Container> containers = nm.getNMContext().getContainers();
for (Map.Entry<ContainerId, Container> entry : containers.entrySet()) {
String command = StringUtils.join(entry.getValue().getLaunchContext().getCommands(), " ");
if (command.contains(YarnTaskManager.class.getSimpleName())) {
taskManagerContainer = entry.getKey();
nodeManager = nm;
nmIdent = new NMTokenIdentifier(taskManagerContainer.getApplicationAttemptId(), null, "", 0);
// allow myself to do stuff with the container
// remoteUgi.addCredentials(entry.getValue().getCredentials());
remoteUgi.addTokenIdentifier(nmIdent);
}
}
sleep(500);
}
Assert.assertNotNull("Unable to find container with TaskManager", taskManagerContainer);
Assert.assertNotNull("Illegal state", nodeManager);
yc.stop();
List<ContainerId> toStop = new LinkedList<ContainerId>();
toStop.add(taskManagerContainer);
StopContainersRequest scr = StopContainersRequest.newInstance(toStop);
try {
nodeManager.getNMContext().getContainerManager().stopContainers(scr);
} catch (Throwable e) {
LOG.warn("Error stopping container", e);
Assert.fail("Error stopping container: " + e.getMessage());
}
// stateful termination check:
// wait until we saw a container being killed and AFTERWARDS a new one launched
boolean ok = false;
do {
LOG.debug("Waiting for correct order of events. Output: {}", errContent.toString());
String o = errContent.toString();
int killedOff = o.indexOf("Container killed by the ApplicationMaster");
if (killedOff != -1) {
o = o.substring(killedOff);
ok = o.indexOf("Launching TaskManager") > 0;
}
sleep(1000);
} while(!ok);
// send "stop" command to command line interface
runner.sendStop();
// wait for the thread to stop
try {
runner.join(1000);
} catch (InterruptedException e) {
LOG.warn("Interrupted while stopping runner", e);
}
LOG.warn("stopped");
// ----------- Send output to logger
System.setOut(ORIGINAL_STDOUT);
System.setErr(ORIGINAL_STDERR);
oC = outContent.toString();
String eC = errContent.toString();
LOG.info("Sending stdout content through logger: \n\n{}\n\n", oC);
LOG.info("Sending stderr content through logger: \n\n{}\n\n", eC);
// ------ Check if everything happened correctly
Assert.assertTrue("Expect to see failed container",
eC.contains("New messages from the YARN cluster"));
Assert.assertTrue("Expect to see failed container",
eC.contains("Container killed by the ApplicationMaster"));
Assert.assertTrue("Expect to see new container started",
eC.contains("Launching TaskManager") && eC.contains("on host"));
// cleanup auth for the subsequent tests.
remoteUgi.getTokenIdentifiers().remove(nmIdent);
LOG.info("Finished testTaskManagerFailure()");
}
/**
* Test deployment to non-existing queue & ensure that the system logs a WARN message
* for the user. (Users had unexpected behavior of Flink on YARN because they mistyped the
* target queue. With an error message, we can help users identifying the issue)
*/
@Test
public void testNonexistingQueueWARNmessage() {
LOG.info("Starting testNonexistingQueueWARNmessage()");
addTestAppender(YarnClusterDescriptor.class, Level.WARN);
runWithArgs(new String[]{"-j", flinkUberjar.getAbsolutePath(),
"-t", flinkLibFolder.getAbsolutePath(),
"-n", "1",
"-jm", "768",
"-tm", "1024",
"-qu", "doesntExist"}, "to unknown queue: doesntExist", null, RunTypes.YARN_SESSION, 1);
checkForLogString("The specified queue 'doesntExist' does not exist. Available queues");
LOG.info("Finished testNonexistingQueueWARNmessage()");
}
/**
* Test per-job yarn cluster with the parallelism set at the CliFrontend instead of the YARN client.
*/
@Test
public void perJobYarnClusterWithParallelism() {
LOG.info("Starting perJobYarnClusterWithParallelism()");
// write log messages to stdout as well, so that the runWithArgs() method
// is catching the log output
addTestAppender(JobClient.class, Level.INFO);
File exampleJarLocation = new File("target/programs/BatchWordCount.jar");
Assert.assertNotNull("Could not find wordcount jar", exampleJarLocation);
runWithArgs(new String[]{"run",
"-p", "2", //test that the job is executed with a DOP of 2
"-m", "yarn-cluster",
"-yj", flinkUberjar.getAbsolutePath(),
"-yt", flinkLibFolder.getAbsolutePath(),
"-yn", "1",
"-yjm", "768",
"-ytm", "1024", exampleJarLocation.getAbsolutePath()},
/* test succeeded after this string */
"Job execution complete",
/* prohibited strings: (we want to see "DataSink (...) (2/2) switched to FINISHED") */
new String[]{"DataSink \\(.*\\) \\(1/1\\) switched to FINISHED"},
RunTypes.CLI_FRONTEND, 0, true);
LOG.info("Finished perJobYarnClusterWithParallelism()");
}
/**
* Test a fire-and-forget job submission to a YARN cluster.
*/
@Test(timeout = 60000)
public void testDetachedPerJobYarnCluster() {
LOG.info("Starting testDetachedPerJobYarnCluster()");
File exampleJarLocation = new File("target/programs/BatchWordCount.jar");
Assert.assertNotNull("Could not find batch wordcount jar", exampleJarLocation);
testDetachedPerJobYarnClusterInternal(exampleJarLocation.getAbsolutePath());
LOG.info("Finished testDetachedPerJobYarnCluster()");
}
/**
* Test a fire-and-forget job submission to a YARN cluster.
*/
@Test(timeout = 60000)
public void testDetachedPerJobYarnClusterWithStreamingJob() {
LOG.info("Starting testDetachedPerJobYarnClusterWithStreamingJob()");
File exampleJarLocation = new File("target/programs/StreamingWordCount.jar");
Assert.assertNotNull("Could not find streaming wordcount jar", exampleJarLocation);
testDetachedPerJobYarnClusterInternal(exampleJarLocation.getAbsolutePath());
LOG.info("Finished testDetachedPerJobYarnClusterWithStreamingJob()");
}
private void testDetachedPerJobYarnClusterInternal(String job) {
YarnClient yc = YarnClient.createYarnClient();
yc.init(YARN_CONFIGURATION);
yc.start();
// get temporary folder for writing output of wordcount example
File tmpOutFolder = null;
try {
tmpOutFolder = tmp.newFolder();
}
catch (IOException e) {
throw new RuntimeException(e);
}
// get temporary file for reading input data for wordcount example
File tmpInFile;
try {
tmpInFile = tmp.newFile();
FileUtils.writeStringToFile(tmpInFile, WordCountData.TEXT);
}
catch (IOException e) {
throw new RuntimeException(e);
}
Runner runner = startWithArgs(new String[]{
"run", "-m", "yarn-cluster",
"-yj", flinkUberjar.getAbsolutePath(),
"-yt", flinkLibFolder.getAbsolutePath(),
"-yn", "1",
"-yjm", "768",
"-yD", "yarn.heap-cutoff-ratio=0.5", // test if the cutoff is passed correctly
"-yD", "yarn.tags=test-tag",
"-ytm", "1024",
"-ys", "2", // test requesting slots from YARN.
"--yarndetached", job,
"--input", tmpInFile.getAbsoluteFile().toString(),
"--output", tmpOutFolder.getAbsoluteFile().toString()},
"Job has been submitted with JobID",
RunTypes.CLI_FRONTEND);
// it should usually be 2, but on slow machines, the number varies
Assert.assertTrue("There should be at most 2 containers running", getRunningContainers() <= 2);
// give the runner some time to detach
for (int attempt = 0; runner.isAlive() && attempt < 5; attempt++) {
try {
Thread.sleep(500);
} catch (InterruptedException e) {
}
}
Assert.assertFalse("The runner should detach.", runner.isAlive());
LOG.info("CLI Frontend has returned, so the job is running");
// find out the application id and wait until it has finished.
try {
List<ApplicationReport> apps = yc.getApplications(EnumSet.of(YarnApplicationState.RUNNING));
ApplicationId tmpAppId;
if (apps.size() == 1) {
// Better method to find the right appId. But sometimes the app is shutting down very fast
// Only one running
tmpAppId = apps.get(0).getApplicationId();
LOG.info("waiting for the job with appId {} to finish", tmpAppId);
// wait until the app has finished
while (yc.getApplications(EnumSet.of(YarnApplicationState.RUNNING)).size() > 0) {
sleep(500);
}
} else {
// get appId by finding the latest finished appid
apps = yc.getApplications();
Collections.sort(apps, new Comparator<ApplicationReport>() {
@Override
public int compare(ApplicationReport o1, ApplicationReport o2) {
return o1.getApplicationId().compareTo(o2.getApplicationId()) * -1;
}
});
tmpAppId = apps.get(0).getApplicationId();
LOG.info("Selected {} as the last appId from {}", tmpAppId, Arrays.toString(apps.toArray()));
}
final ApplicationId id = tmpAppId;
// now it has finished.
// check the output files.
File[] listOfOutputFiles = tmpOutFolder.listFiles();
Assert.assertNotNull("Taskmanager output not found", listOfOutputFiles);
LOG.info("The job has finished. TaskManager output files found in {}", tmpOutFolder);
// read all output files in output folder to one output string
String content = "";
for (File f:listOfOutputFiles) {
if (f.isFile()) {
content += FileUtils.readFileToString(f) + "\n";
}
}
//String content = FileUtils.readFileToString(taskmanagerOut);
// check for some of the wordcount outputs.
Assert.assertTrue("Expected string 'da 5' or '(all,2)' not found in string '" + content + "'", content.contains("da 5") || content.contains("(da,5)") || content.contains("(all,2)"));
Assert.assertTrue("Expected string 'der 29' or '(mind,1)' not found in string'" + content + "'", content.contains("der 29") || content.contains("(der,29)") || content.contains("(mind,1)"));
// check if the heap size for the TaskManager was set correctly
File jobmanagerLog = YarnTestBase.findFile("..", new FilenameFilter() {
@Override
public boolean accept(File dir, String name) {
return name.contains("jobmanager.log") && dir.getAbsolutePath().contains(id.toString());
}
});
Assert.assertNotNull("Unable to locate JobManager log", jobmanagerLog);
content = FileUtils.readFileToString(jobmanagerLog);
// TM was started with 1024 but we cut off 50% (NOT THE DEFAULT VALUE)
String expected = "Starting TaskManagers with command: $JAVA_HOME/bin/java -Xms424m -Xmx424m";
Assert.assertTrue("Expected string '" + expected + "' not found in JobManager log: '" + jobmanagerLog + "'",
content.contains(expected));
expected = " (2/2) (attempt #0) to ";
Assert.assertTrue("Expected string '" + expected + "' not found in JobManager log." +
"This string checks that the job has been started with a parallelism of 2. Log contents: '" + jobmanagerLog + "'",
content.contains(expected));
// make sure the detached app is really finished.
LOG.info("Checking again that app has finished");
ApplicationReport rep;
do {
sleep(500);
rep = yc.getApplicationReport(id);
LOG.info("Got report {}", rep);
} while (rep.getYarnApplicationState() == YarnApplicationState.RUNNING);
verifyApplicationTags(rep);
} catch (Throwable t) {
LOG.warn("Error while detached yarn session was running", t);
Assert.fail(t.getMessage());
} finally {
//cleanup the yarn-properties file
String confDirPath = System.getenv("FLINK_CONF_DIR");
File configDirectory = new File(confDirPath);
LOG.info("testDetachedPerJobYarnClusterInternal: Using configuration directory " + configDirectory.getAbsolutePath());
// load the configuration
LOG.info("testDetachedPerJobYarnClusterInternal: Trying to load configuration file");
GlobalConfiguration.loadConfiguration(configDirectory.getAbsolutePath());
try {
File yarnPropertiesFile = FlinkYarnSessionCli.getYarnPropertiesLocation(GlobalConfiguration.loadConfiguration());
if (yarnPropertiesFile.exists()) {
LOG.info("testDetachedPerJobYarnClusterInternal: Cleaning up temporary Yarn address reference: {}", yarnPropertiesFile.getAbsolutePath());
yarnPropertiesFile.delete();
}
} catch (Exception e) {
LOG.warn("testDetachedPerJobYarnClusterInternal: Exception while deleting the JobManager address file", e);
}
}
}
/**
* Ensures that the YARN application tags were set properly.
*
* <p>Since YARN application tags were only added in Hadoop 2.4, but Flink still supports Hadoop 2.3, reflection is
* required to invoke the methods. If the method does not exist, this test passes.
*/
private void verifyApplicationTags(final ApplicationReport report) throws InvocationTargetException,
IllegalAccessException {
final Method applicationTagsMethod;
Class<ApplicationReport> clazz = ApplicationReport.class;
try {
// this method is only supported by Hadoop 2.4.0 onwards
applicationTagsMethod = clazz.getMethod("getApplicationTags");
} catch (NoSuchMethodException e) {
// only verify the tags if the method exists
return;
}
@SuppressWarnings("unchecked")
Set<String> applicationTags = (Set<String>) applicationTagsMethod.invoke(report);
Assert.assertEquals(applicationTags, Collections.singleton("test-tag"));
}
@After
public void checkForProhibitedLogContents() {
ensureNoProhibitedStringInLogFiles(PROHIBITED_STRINGS, WHITELISTED_STRINGS);
}
}
| [
"\"FLINK_CONF_DIR\""
]
| []
| [
"FLINK_CONF_DIR"
]
| [] | ["FLINK_CONF_DIR"] | java | 1 | 0 | |
carto_app/carto_app/wsgi.py | """
WSGI config for carto_app project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.8/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "carto_app.settings")
application = get_wsgi_application()
| []
| []
| []
| [] | [] | python | 0 | 0 | |
Providers/nxOMSAutomationWorker/automationworker/worker/hybridworker.py | #!/usr/bin/env python2
#
# Copyright (C) Microsoft Corporation, All rights reserved.
import ConfigParser
import os
import platform
import shutil
import subprocess
import sys
import threading
import time
import traceback
# import worker module after linuxutil.daemonize() call
sandboxes_root_folder_name = "sandboxes"
def safe_loop(func):
def decorated_func(*args, **kwargs):
while True:
try:
# ensure required file / cert exists
func(*args, **kwargs)
except (JrdsAuthorizationException,
InvalidFilePermissionException,
FileNotFoundException,
SystemExit):
tracer.log_worker_safe_loop_terminal_exception(traceback.format_exc())
time.sleep(1) # allow the trace to make it to stdout (since traces are background threads)
sys.exit(-1)
except Exception:
tracer.log_worker_safe_loop_non_terminal_exception(traceback.format_exc())
time.sleep(configuration.get_jrds_get_sandbox_actions_polling_freq())
return decorated_func
def background_thread(func):
def decorated_func(*args, **kwargs):
t = threading.Thread(target=func, args=args)
t.daemon = True
t.start()
return decorated_func
def exit_on_error(message, exit_code=1):
crash_log_filename = "automation_worker_crash.log"
util.exit_on_error(filename=crash_log_filename, message=message, exit_code=exit_code)
def test_file_creation(path):
try:
iohelper.write_to_file(path, path)
os.remove(path)
return True
except IOError:
return False
def validate_and_setup_path():
# default to user dir for exception logs to be writen to disk
test_file_name = "test_file"
# test certificate and key path
if not os.path.isfile(configuration.get_jrds_cert_path()) or not os.path.isfile(configuration.get_jrds_key_path()):
exit_on_error("Invalid certificate of key file path (absolute path is required).")
# test working directory for existence and permissions
working_directory_path = configuration.get_working_directory_path()
if not os.path.exists(working_directory_path):
exit_on_error("Invalid working directory path (absolute path is required).")
file_creation = test_file_creation(os.path.join(working_directory_path, test_file_name))
if file_creation is False:
exit_on_error("Invalid working directory permission (read/write permissions are required).")
# test state file path
if configuration.get_state_directory_path() != configuration.DEFAULT_STATE_DIRECTORY_PATH:
if not os.path.exists(configuration.get_state_directory_path()):
exit_on_error("Invalid state directory path (absolute path is required).")
file_creation = test_file_creation(os.path.join(configuration.get_state_directory_path(), test_file_name))
if file_creation is False:
exit_on_error("Invalid state directory permission (read/write permissions are required).")
# OMS integration
# set the working directory owner to be nxautomation:omiusers
if os.name.lower() != "nt":
import pwd
try:
nxautomation_uid = int(pwd.getpwnam('nxautomation').pw_uid)
if os.getuid() == nxautomation_uid:
retval = subprocess.call(["sudo", "chown", "-R", "nxautomation:omiusers", working_directory_path])
if retval != 0:
exit_on_error("Could not change owner of working directory %s to nxautomation:omiusers"
% (working_directory_path))
except KeyError:
# nxautomation user was not found on the system, skip this step
tracer.log_debug_trace("Ownership change of working directory skipped. nxautomation user not found.")
pass
def generate_state_file():
# skip state file if the worker is managed by the worker manager
if len(sys.argv) >= 3 and str(sys.argv[2]) == "managed":
return
state_file_name = "state.conf"
if configuration.get_state_directory_path() == configuration.DEFAULT_STATE_DIRECTORY_PATH:
state_file_path = os.path.join(configuration.get_working_directory_path(), state_file_name)
else:
state_file_path = os.path.join(configuration.get_state_directory_path(), state_file_name)
tracer.log_debug_trace("State file path : " + str(state_file_path))
if os.path.isfile(state_file_path):
os.remove(state_file_path)
section = "state"
conf_file = open(state_file_path, 'wb')
config = ConfigParser.ConfigParser()
config.add_section(section)
config.set(section, configuration.STATE_PID, str(os.getpid()))
config.set(section, configuration.WORKER_VERSION, str(configuration.get_worker_version()))
# for OMS scenarios, optional for DIY
if len(sys.argv) >= 3:
config.set(section, configuration.STATE_WORKSPACE_ID, str(sys.argv[2]))
if len(sys.argv) >= 4:
config.set(section, configuration.STATE_RESOURCE_VERSION, str(sys.argv[3]))
config.write(conf_file)
conf_file.close()
# OMS integration
# set the ownership of the state file to nxautomation:omiusers
# set the permission of the state file to 660
if os.name.lower() != "nt":
import pwd
try:
nxautomation_uid = int(pwd.getpwnam('nxautomation').pw_uid)
if os.getuid() == nxautomation_uid:
retval = subprocess.call(["sudo", "chown", "nxautomation:omiusers", state_file_path])
if retval != 0:
exit_on_error(
"Could not change owner of state file %s to nxautomation:omiusers" % (state_file_path))
retval = subprocess.call(["sudo", "chmod", "660", state_file_path])
if retval != 0:
exit_on_error("Could not change permission of state file %s " % (state_file_path))
except KeyError:
# nxautomation user was not found on the system, skip this step
tracer.log_debug_trace("State file permission change skipped. nxautomation user not found.")
pass
class Worker:
def __init__(self):
tracer.log_worker_starting(configuration.get_worker_version())
http_client_factory = HttpClientFactory(configuration.get_jrds_cert_path(), configuration.get_jrds_key_path(),
configuration.get_verify_certificates())
http_client = http_client_factory.create_http_client(sys.version_info)
self.jrds_client = JRDSClient(http_client)
self.running_sandboxes = {}
@staticmethod
def assert_environment_prerequisite():
jrds_cert_path = configuration.get_jrds_cert_path()
if util.assert_file_read_permission(jrds_cert_path) is False:
raise InvalidFilePermissionException(jrds_cert_path)
jrds_key_path = configuration.get_jrds_key_path()
if util.assert_file_read_permission(jrds_key_path) is False:
raise InvalidFilePermissionException(jrds_key_path)
worker_conf_path = configuration.get_worker_configuration_file_path()
if util.assert_file_read_permission(worker_conf_path) is False:
raise InvalidFilePermissionException(worker_conf_path)
proxy_conf_path = configuration.get_proxy_configuration_path()
if proxy_conf_path != configuration.DEFAULT_PROXY_CONFIGURATION_PATH and os.path.isfile(proxy_conf_path):
if util.assert_file_read_permission(proxy_conf_path) is False:
raise InvalidFilePermissionException(proxy_conf_path)
@safe_loop
def routine(self):
self.assert_environment_prerequisite()
self.stop_tracking_terminated_sandbox()
sandbox_actions = self.jrds_client.get_sandbox_actions()
if sandbox_actions is None:
tracer.log_get_sandbox_action_returned_null_data()
return
tracer.log_debug_trace("Get sandbox action. Found " + str(len(sandbox_actions)) + " action(s).")
for action in sandbox_actions:
tracer.log_worker_sandbox_action_found(len(sandbox_actions))
sandbox_id = str(action["SandboxId"])
# prevent duplicate sandbox from running
if sandbox_id in self.running_sandboxes:
continue
# create sandboxes folder if needed
sandbox_working_dir = os.path.join(configuration.get_working_directory_path(), sandboxes_root_folder_name,
sandbox_id)
try:
iohelper.assert_or_create_path(sandbox_working_dir)
except OSError, exception:
tracer.log_worker_failed_to_create_sandbox_root_folder(sandbox_id, exception)
raise SystemExit("Sandbox folder creation failed.")
# copy current process env variable (contains configuration) and add the sanbox_id key
process_env_variables = os.environ.copy()
process_env_variables["sandbox_id"] = sandbox_id
cmd = ["python", os.path.join(configuration.get_source_directory_path(), "sandbox.py"),
configuration.get_worker_configuration_file_path()]
tracer.log_worker_starting_sandbox(sandbox_id)
sandbox_process = subprocessfactory.create_subprocess(cmd=cmd,
env=process_env_variables,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
cwd=sandbox_working_dir)
self.running_sandboxes[sandbox_id] = sandbox_process
tracer.log_worker_started_tracking_sandbox(sandbox_id)
self.monitor_sandbox_process_outputs(sandbox_id, sandbox_process)
tracer.log_worker_sandbox_process_started(sandbox_id, str(sandbox_process.pid))
@background_thread
def monitor_sandbox_process_outputs(self, sandbox_id, process):
while process.poll() is None:
output = process.stdout.readline().replace("\n", "")
if output == '':
continue
if output != '':
tracer.log_sandbox_stdout(output)
if process.poll() != 0:
full_error_output = ""
while True:
error_output = process.stderr.readline()
if error_output is None or error_output == '':
break
full_error_output += error_output
tracer.log_worker_sandbox_process_crashed(sandbox_id, process.pid, process.poll(), full_error_output)
tracer.log_worker_sandbox_process_exited(sandbox_id, str(process.pid), process.poll())
# cleanup sandbox directory
sandbox_working_dir = os.path.join(configuration.get_working_directory_path(), sandboxes_root_folder_name,
sandbox_id)
shutil.rmtree(sandbox_working_dir, ignore_errors=True)
@background_thread
def telemetry_routine(self):
while True:
tracer.log_worker_general_telemetry(configuration.get_worker_version(), configuration.get_worker_type(),
linuxutil.get_current_username(), linuxutil.get_oms_agent_id())
tracer.log_worker_python_telemetry(platform.python_version(), platform.python_build(),
platform.python_compiler())
tracer.log_worker_system_telemetry(platform.system(), platform.node(), platform.version(),
platform.machine(), platform.processor())
try:
distributor_id, description, release, codename = linuxutil.get_lsb_release()
tracer.log_worker_lsb_release_telemetry(distributor_id, description, release, codename)
except:
pass
# sleep for 6 hours, this allows us to gather daily telemetry
time.sleep(60 * 60 * 6)
def stop_tracking_terminated_sandbox(self):
terminated_sandbox_ids = []
# detect terminated sandboxes
for sandbox_id, sandbox_process in self.running_sandboxes.items():
if sandbox_process.poll() is not None:
terminated_sandbox_ids.append(sandbox_id)
# clean-up terminated sandboxes
for sandbox_id in terminated_sandbox_ids:
removal = self.running_sandboxes.pop(sandbox_id, None)
if removal is not None:
tracer.log_worker_stopped_tracking_sandbox(sandbox_id)
def main():
if len(sys.argv) < 2:
exit_on_error("Missing configuration file path.")
configuration_path = str(sys.argv[1])
if not os.path.isfile(configuration_path):
exit_on_error("Invalid configuration file path or empty configuration file (absolute path is required).")
# configuration has to be read first thing
try:
# remove the test_mode env_var value (mainly for Windows)
# this value is set in test
del os.environ["test_mode"]
except KeyError:
pass
configuration.read_and_set_configuration(configuration_path)
configuration.set_config({configuration.COMPONENT: "worker"})
validate_and_setup_path()
# do not trace anything before this point
generate_state_file()
worker = Worker()
worker.telemetry_routine()
worker.routine()
if __name__ == "__main__":
# daemonize before loading the logging library to prevent deadlock in 2.4 (see: http://bugs.python.org/issue6721)
import linuxutil
linuxutil.daemonize()
try:
import configuration
import iohelper
import subprocessfactory
import tracer
import util
from httpclientfactory import HttpClientFactory
from jrdsclient import JRDSClient
from workerexception import *
main()
except:
exit_on_error(traceback.format_exc())
| []
| []
| [
"test_mode"
]
| [] | ["test_mode"] | python | 1 | 0 | |
conans/model/conan_file.py | from conans.model.options import Options, PackageOptions, OptionsValues
from conans.model.requires import Requirements
from conans.model.build_info import DepsCppInfo
from conans import tools # @UnusedImport KEEP THIS! Needed for pyinstaller to copy to exe.
from conans.errors import ConanException
from conans.model.env_info import DepsEnvInfo, EnvValues
import os
from conans.paths import RUN_LOG_NAME
def create_options(conanfile):
try:
package_options = PackageOptions(getattr(conanfile, "options", None))
options = Options(package_options)
default_options = getattr(conanfile, "default_options", None)
if default_options:
if isinstance(default_options, (list, tuple)):
default_values = OptionsValues(default_options)
elif isinstance(default_options, str):
default_values = OptionsValues.loads(default_options)
else:
raise ConanException("Please define your default_options as list or "
"multiline string")
options.values = default_values
return options
except Exception as e:
raise ConanException("Error while initializing options. %s" % str(e))
def create_requirements(conanfile):
try:
# Actual requirements of this package
if not hasattr(conanfile, "requires"):
return Requirements()
else:
if not conanfile.requires:
return Requirements()
if isinstance(conanfile.requires, tuple):
return Requirements(*conanfile.requires)
else:
return Requirements(conanfile.requires, )
except Exception as e:
raise ConanException("Error while initializing requirements. %s" % str(e))
def create_settings(conanfile, settings):
try:
defined_settings = getattr(conanfile, "settings", None)
if isinstance(defined_settings, str):
defined_settings = [defined_settings]
current = defined_settings or {}
settings.constraint(current)
return settings
except Exception as e:
raise ConanException("Error while initializing settings. %s" % str(e))
def create_exports(conanfile):
if not hasattr(conanfile, "exports"):
return None
else:
if isinstance(conanfile.exports, str):
return (conanfile.exports, )
return conanfile.exports
def create_exports_sources(conanfile):
if not hasattr(conanfile, "exports_sources"):
return None
else:
if isinstance(conanfile.exports_sources, str):
return (conanfile.exports_sources, )
return conanfile.exports_sources
class ConanFile(object):
""" The base class for all package recipes
"""
name = None
version = None # Any str, can be "1.1" or whatever
url = None # The URL where this File is located, as github, to collaborate in package
# The license of the PACKAGE, just a shortcut, does not replace or
# change the actual license of the source code
license = None
author = None # Main maintainer/responsible for the package, any format
build_policy = None
short_paths = False
def __init__(self, output, runner, settings, conanfile_directory, user=None, channel=None):
# User defined generators
self.generators = self.generators if hasattr(self, "generators") else ["txt"]
if isinstance(self.generators, str):
self.generators = [self.generators]
# User defined options
self.options = create_options(self)
self.requires = create_requirements(self)
self.settings = create_settings(self, settings)
self.exports = create_exports(self)
self.exports_sources = create_exports_sources(self)
# needed variables to pack the project
self.cpp_info = None # Will be initialized at processing time
self.deps_cpp_info = DepsCppInfo()
# environment variables declared in the package_info
self.env_info = None # Will be initialized at processing time
self.deps_env_info = DepsEnvInfo()
self.copy = None # initialized at runtime
# an output stream (writeln, info, warn error)
self.output = output
# something that can run commands, as os.sytem
self._runner = runner
self._conanfile_directory = conanfile_directory
self.package_folder = None # Assigned at runtime
self._scope = None
# user specified env variables
self._env_values = EnvValues() # Updated at runtime, user specified -e
self._user = user
self._channel = channel
@property
def env(self):
simple, multiple = self._env_values.env_dicts(self.name)
simple.update(multiple)
return simple
@property
def channel(self):
if not self._channel:
self._channel = os.getenv("CONAN_CHANNEL")
if not self._channel:
raise ConanException("CONAN_CHANNEL environment variable not defined, "
"but self.channel is used in conanfile")
return self._channel
@property
def user(self):
if not self._user:
self._user = os.getenv("CONAN_USERNAME")
if not self._user:
raise ConanException("CONAN_USERNAME environment variable not defined, "
"but self.user is used in conanfile")
return self._user
def collect_libs(self, folder="lib"):
if not self.package_folder:
return []
lib_folder = os.path.join(self.package_folder, folder)
if not os.path.exists(lib_folder):
self.output.warn("Package folder doesn't exist, can't collect libraries")
return []
files = os.listdir(lib_folder)
result = []
for f in files:
name, ext = os.path.splitext(f)
if ext in (".so", ".lib", ".a", ".dylib"):
if ext != ".lib" and name.startswith("lib"):
name = name[3:]
result.append(name)
return result
@property
def scope(self):
return self._scope
@scope.setter
def scope(self, value):
self._scope = value
if value.dev:
self.requires.allow_dev = True
try:
if hasattr(self, "dev_requires"):
if isinstance(self.dev_requires, tuple):
self.requires.add_dev(*self.dev_requires)
else:
self.requires.add_dev(self.dev_requires, )
except Exception as e:
raise ConanException("Error while initializing dev_requirements. %s" % str(e))
@property
def conanfile_directory(self):
return self._conanfile_directory
@property
def build_policy_missing(self):
return self.build_policy == "missing"
@property
def build_policy_always(self):
return self.build_policy == "always"
def source(self):
pass
def system_requirements(self):
""" this method can be overwritten to implement logic for system package
managers, as apt-get
You can define self.global_system_requirements = True, if you want the installation
to be for all packages (not depending on settings/options/requirements)
"""
def config_options(self):
""" modify options, probably conditioned to some settings. This call is executed
before config_settings. E.g.
if self.settings.os == "Windows":
del self.options.shared # shared/static not supported in win
"""
def configure(self):
""" modify settings, probably conditioned to some options. This call is executed
after config_options. E.g.
if self.options.header_only:
self.settings.clear()
This is also the place for conditional requirements
"""
def imports(self):
pass
def build(self):
self.output.warn("This conanfile has no build step")
def package(self):
self.output.warn("This conanfile has no package step")
def package_info(self):
""" define cpp_build_info, flags, etc
"""
def run(self, command, output=True, cwd=None):
""" runs such a command in the folder the Conan
is defined
"""
retcode = self._runner(command, output, os.path.abspath(RUN_LOG_NAME), cwd)
if retcode != 0:
raise ConanException("Error %d while executing %s" % (retcode, command))
def package_id(self):
""" modify the conans info, typically to narrow values
eg.: conaninfo.package_references = []
"""
def test(self):
raise ConanException("You need to create a method 'test' in your test/conanfile.py")
def __repr__(self):
if self.name and self.version and self._channel and self._user:
return "%s/%s@%s/%s" % (self.name, self.version, self.user, self.channel)
elif self.name and self.version:
return "%s/%s@PROJECT" % (self.name, self.version)
else:
return "PROJECT"
| []
| []
| [
"CONAN_USERNAME",
"CONAN_CHANNEL"
]
| [] | ["CONAN_USERNAME", "CONAN_CHANNEL"] | python | 2 | 0 | |
manage.py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'adentro_schedule.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| []
| []
| []
| [] | [] | python | 0 | 0 | |
internal/provider/resource_runscope_test_test.go | package provider
import (
"context"
"fmt"
"github.com/terraform-providers/terraform-provider-runscope/internal/runscope"
"os"
"testing"
"github.com/hashicorp/terraform-plugin-sdk/v2/helper/resource"
"github.com/hashicorp/terraform-plugin-sdk/v2/terraform"
)
func TestAccTest_create_default_test(t *testing.T) {
teamId := os.Getenv("RUNSCOPE_TEAM_ID")
bucketName := testAccRandomBucketName()
test := &runscope.Test{}
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
ProviderFactories: testAccProviderFactories,
CheckDestroy: testAccCheckTestDestroy,
Steps: []resource.TestStep{
{
Config: fmt.Sprintf(testAccTestDefaultConfig, bucketName, teamId),
Check: resource.ComposeTestCheckFunc(
testAccCheckTestExists("runscope_test.test", test),
resource.TestCheckResourceAttr("runscope_test.test", "name", "runscope test"),
resource.TestCheckResourceAttr("runscope_test.test", "description", ""),
resource.TestCheckResourceAttrSet("runscope_test.test", "default_environment_id"),
resource.TestCheckResourceAttrSet("runscope_test.test", "created_at"),
resource.TestCheckResourceAttr("runscope_test.test", "created_by.#", "1"),
resource.TestCheckResourceAttrSet("runscope_test.test", "created_by.0.id"),
resource.TestCheckResourceAttrSet("runscope_test.test", "created_by.0.name"),
resource.TestCheckResourceAttrSet("runscope_test.test", "created_by.0.email"),
),
},
},
})
}
func TestAccTest_create_custom_test(t *testing.T) {
teamId := os.Getenv("RUNSCOPE_TEAM_ID")
bucketName := testAccRandomBucketName()
test := &runscope.Test{}
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
ProviderFactories: testAccProviderFactories,
CheckDestroy: testAccCheckTestDestroy,
Steps: []resource.TestStep{
{
Config: fmt.Sprintf(testAccTestCustomConfig, bucketName, teamId),
Check: resource.ComposeTestCheckFunc(
testAccCheckTestExists("runscope_test.test", test),
resource.TestCheckResourceAttr("runscope_test.test", "name", "runscope custom test"),
resource.TestCheckResourceAttr("runscope_test.test", "description", "runscope custom test description"),
resource.TestCheckResourceAttrSet("runscope_test.test", "default_environment_id"),
resource.TestCheckResourceAttrSet("runscope_test.test", "created_at"),
resource.TestCheckResourceAttr("runscope_test.test", "created_by.#", "1"),
resource.TestCheckResourceAttrSet("runscope_test.test", "created_by.0.id"),
resource.TestCheckResourceAttrSet("runscope_test.test", "created_by.0.name"),
resource.TestCheckResourceAttrSet("runscope_test.test", "created_by.0.email"),
resource.TestCheckResourceAttrSet("runscope_test.test", "trigger_url"),
),
},
{
ResourceName: "runscope_test.test",
ImportState: true,
ImportStateVerify: true,
ImportStateIdFunc: func(s *terraform.State) (string, error) {
rs, ok := s.RootModule().Resources["runscope_test.test"]
if !ok {
return "", fmt.Errorf("not found runscope_test.test")
}
return fmt.Sprintf("%s/%s", rs.Primary.Attributes["bucket_id"], rs.Primary.ID), nil
},
},
},
})
}
func TestAccTest_update_test(t *testing.T) {
teamId := os.Getenv("RUNSCOPE_TEAM_ID")
bucketName := testAccRandomBucketName()
test1 := &runscope.Test{}
test2 := &runscope.Test{}
resource.Test(t, resource.TestCase{
PreCheck: func() { testAccPreCheck(t) },
ProviderFactories: testAccProviderFactories,
CheckDestroy: testAccCheckTestDestroy,
Steps: []resource.TestStep{
{
Config: fmt.Sprintf(testAccTestDefaultConfig, bucketName, teamId),
Check: resource.ComposeTestCheckFunc(
testAccCheckTestExists("runscope_test.test", test1),
resource.TestCheckResourceAttr("runscope_test.test", "name", "runscope test"),
resource.TestCheckResourceAttr("runscope_test.test", "description", ""),
resource.TestCheckResourceAttrSet("runscope_test.test", "default_environment_id"),
resource.TestCheckResourceAttrSet("runscope_test.test", "created_at"),
resource.TestCheckResourceAttr("runscope_test.test", "created_by.#", "1"),
resource.TestCheckResourceAttrSet("runscope_test.test", "created_by.0.id"),
resource.TestCheckResourceAttrSet("runscope_test.test", "created_by.0.name"),
resource.TestCheckResourceAttrSet("runscope_test.test", "created_by.0.email"),
),
},
{
Config: fmt.Sprintf(testAccTestCustomConfig, bucketName, teamId),
Check: resource.ComposeTestCheckFunc(
testAccCheckTestExists("runscope_test.test", test2),
resource.TestCheckResourceAttr("runscope_test.test", "name", "runscope custom test"),
resource.TestCheckResourceAttr("runscope_test.test", "description", "runscope custom test description"),
resource.TestCheckResourceAttrSet("runscope_test.test", "default_environment_id"),
resource.TestCheckResourceAttrSet("runscope_test.test", "created_at"),
resource.TestCheckResourceAttr("runscope_test.test", "created_by.#", "1"),
resource.TestCheckResourceAttrSet("runscope_test.test", "created_by.0.id"),
resource.TestCheckResourceAttrSet("runscope_test.test", "created_by.0.name"),
resource.TestCheckResourceAttrSet("runscope_test.test", "created_by.0.email"),
resource.TestCheckResourceAttrSet("runscope_test.test", "trigger_url"),
testAccCheckTestIdEqual(test1, test2),
),
},
},
})
}
func testAccCheckTestDestroy(s *terraform.State) error {
ctx := context.Background()
client := testAccProvider.Meta().(*providerConfig).client
for _, rs := range s.RootModule().Resources {
if rs.Type != "runscope_test" {
continue
}
opts := runscope.TestDeleteOpts{}
opts.Id = rs.Primary.ID
opts.BucketId = rs.Primary.Attributes["bucket_id"]
if err := client.Test.Delete(ctx, opts); err == nil {
return fmt.Errorf("record %s still exists", rs.Primary.ID)
}
}
return nil
}
func testAccCheckTestExists(n string, t *runscope.Test) resource.TestCheckFunc {
return func(s *terraform.State) error {
ctx := context.Background()
rs, ok := s.RootModule().Resources[n]
if !ok {
return fmt.Errorf("not found: %s", n)
}
if rs.Primary.ID == "" {
return fmt.Errorf("no Record ID is set")
}
client := testAccProvider.Meta().(*providerConfig).client
opts := runscope.TestGetOpts{}
opts.Id = rs.Primary.ID
opts.BucketId = rs.Primary.Attributes["bucket_id"]
test, err := client.Test.Get(ctx, opts)
if err != nil {
return err
}
if test.Id != rs.Primary.ID {
return fmt.Errorf("record not found")
}
*t = *test
return nil
}
}
func testAccCheckTestIdEqual(t1, t2 *runscope.Test) resource.TestCheckFunc {
return func(s *terraform.State) error {
if t1.Id != t2.Id {
return fmt.Errorf("expected than \"%s\" equal to \"%s\"", t1.Id, t2.Id)
}
return nil
}
}
const testAccTestDefaultConfig = `
resource "runscope_bucket" "bucket" {
name = "%s"
team_uuid = "%s"
}
resource "runscope_test" "test" {
bucket_id = runscope_bucket.bucket.id
name = "runscope test"
}
`
const testAccTestCustomConfig = `
resource "runscope_bucket" "bucket" {
name = "%s"
team_uuid = "%s"
}
resource "runscope_test" "test" {
bucket_id = runscope_bucket.bucket.id
name = "runscope custom test"
description = "runscope custom test description"
}
`
| [
"\"RUNSCOPE_TEAM_ID\"",
"\"RUNSCOPE_TEAM_ID\"",
"\"RUNSCOPE_TEAM_ID\""
]
| []
| [
"RUNSCOPE_TEAM_ID"
]
| [] | ["RUNSCOPE_TEAM_ID"] | go | 1 | 0 | |
internal/editor/edit_windows.go | //go:build windows
// +build windows
package editor
import (
"os"
"github.com/urfave/cli/v2"
)
// Path return the name/path of the preferred editor
func Path(c *cli.Context) string {
if c != nil {
if ed := c.String("editor"); ed != "" {
return ed
}
}
if ed := os.Getenv("EDITOR"); ed != "" {
return ed
}
return "notepad.exe"
}
| [
"\"EDITOR\""
]
| []
| [
"EDITOR"
]
| [] | ["EDITOR"] | go | 1 | 0 | |
Uncertainty/data/case-ln/case_ln_101.py | from numpy import array
def case_ln_101():
ppc = {"version": '2'}
ppc["baseMVA"] = 100.0
ppc["bus"] = array([
[1.0, 1.0, 37.358, 9.9621, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[3.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[4.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[5.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[6.0, 1.0, 7.4716, 2.7396, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[7.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[8.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[9.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 11.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[10.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 11.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[11.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[12.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[13.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[14.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[15.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[16.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[17.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[18.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[19.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[20.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[21.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[22.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[23.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[24.0, 2.0, 2.4905, 1.4943, 0.0, 0.0, 1.0, 1.0, 0.0, 6.3, 1.0, 1.1, 0.95, 0.6, 10 ],
[25.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[26.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[27.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[28.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[29.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[30.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],
[31.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],
[32.0, 2.0, 1.7434, 1.4943, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[33.0, 2.0, 1.4943, 1.4943, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[34.0, 2.0, 1.4943, 1.4943, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[35.0, 2.0, 3.4867, 1.4943, 0.0, 0.0, 1.0, 1.0, 0.0, 24.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[36.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[37.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[38.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[39.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[40.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[41.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[42.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[43.0, 1.0, 42.2395, 1.584, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[44.0, 1.0, 49.8107, 3.1181, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[45.0, 1.0, 49.8107, 3.1181, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[46.0, 1.0, 49.8107, 3.1181, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[47.0, 1.0, 49.8107, 3.1181, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[48.0, 1.0, 24.9053, 3.2427, 1e-06, -1e-06, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[49.0, 1.0, 51.8031, 3.2427, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[50.0, 1.0, 10.3606, 3.2427, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[51.0, 1.0, 29.8864, 9.9621, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[52.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[53.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[54.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[55.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[56.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[57.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[58.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[59.0, 1.0, 49.8107, 3.1181, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[107.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[108.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[109.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[110.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[111.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[112.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[113.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[114.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[115.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[116.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[117.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[118.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[119.0, 1.0, 59.7728, 29.8864, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[120.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[121.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[122.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[123.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[307.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[310.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[315.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[316.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[482.0, 1.0, 0.0, 0.0, 0.0, -0.99173882, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[483.0, 1.0, 0.0, 0.0, 0.0, -0.99173882, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[484.0, 1.0, 0.0, 0.0, 0.0, -0.99173882, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[499.0, 1.0, 0.0, 0.0, 0.0, 0.0, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[500.0, 1.0, 0.0, 0.0, 0.0, 0.0, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[508.0, 1.0, 0.0, 0.0, 0.0, 0.0, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[539.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[540.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[541.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[542.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 2.0, 1.0, 0.0, 500.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[552.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[553.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[556.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[557.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1418.0, 1.0, 69.735, 19.9243, 5e-07, -5e-07, 2.0, 1.0, 0.0, 220.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[1454.0, 1.0, 34.3694, 9.464, 5e-07, -5e-07, 2.0, 1.0, 0.0, 220.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[1473.0, 1.0, 81.1914, 14.9432, 5e-07, -5e-07, 2.0, 1.0, 0.0, 220.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[1545.0, 1.0, 32.3769, 7.4716, 5e-07, -5e-07, 2.0, 1.0, 0.0, 220.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[1555.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1556.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1557.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1558.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1559.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1560.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1561.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1562.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1563.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1564.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1565.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1566.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1567.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1568.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1569.0, 1.0, 0.0, 0.0, 0.0, -0.5999988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1570.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1571.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1572.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1573.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1574.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1575.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1576.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1577.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1578.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1579.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1580.0, 1.0, 0.0, 0.0, 0.0, -0.5999988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1581.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1582.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1583.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1584.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1585.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1586.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1587.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1588.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1589.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1590.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1591.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1592.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1593.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1594.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1595.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1596.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1597.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1598.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1599.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1600.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1601.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1602.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1603.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1604.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1605.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1606.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1607.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1608.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1609.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],
[1610.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1611.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1612.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],
[1613.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1614.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1615.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1616.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1617.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1618.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1619.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1620.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1621.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1622.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1623.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1624.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1625.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1626.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1627.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1628.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1629.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],
[1630.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1631.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1632.0, 2.0, 3.4867, 1.8131, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],
[1633.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 1.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1634.0, 2.0, 3.4867, 1.8131, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],
[1635.0, 1.0, 149.432, 17.8372, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1641.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1642.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1643.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1644.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1645.0, 2.0, 2.4905, 1.4943, 0.0, 0.0, 1.0, 1.0, 0.0, 6.3, 1.0, 1.1, 0.95, 0.6, 10 ],
[1646.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1647.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1648.0, 2.0, 3.4867, 1.4943, 0.0, 0.0, 1.0, 1.0, 0.0, 24.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1649.0, 2.0, 1.7434, 1.4943, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1650.0, 2.0, 3.4867, 1.4943, 0.0, 0.0, 1.0, 1.0, 0.0, 24.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1651.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.7, 1.0, 1.1, 0.95, 0.6, 10 ],
[1652.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1653.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.7, 1.0, 1.1, 0.95, 0.6, 10 ],
[1654.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.7, 1.0, 1.1, 0.95, 0.6, 10 ],
[1655.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1656.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1657.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1658.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1659.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1660.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1661.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1662.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[1663.0, 3.0, 29.8864, 5.4792, 0.0, 0.0, 1.0, 1.0, 0.0, 27.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1664.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.7, 1.0, 1.1, 0.95, 0.6, 10 ],
[1665.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 24.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1666.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 24.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1667.0, 2.0, 21.9665, 6.2761, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1668.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1669.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1670.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1671.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1672.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1673.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1674.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.7, 1.0, 1.1, 0.95, 0.6, 10 ],
[1675.0, 2.0, 7.8452, 2.6151, 0.0, 0.0, 1.0, 1.0, 0.0, 18.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1676.0, 2.0, 7.8452, 2.6151, 0.0, 0.0, 1.0, 1.0, 0.0, 18.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1677.0, 2.0, 7.8452, 2.8542, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1678.0, 2.0, 7.8452, 2.8542, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1679.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1680.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1681.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1682.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1683.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1684.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1685.0, 2.0, 4.7071, 2.092, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[1686.0, 2.0, 7.8452, 2.8542, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1687.0, 2.0, 7.8452, 2.8542, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1688.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1689.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1690.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 35.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1691.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[1692.0, 2.0, 29.8864, 5.4792, 0.0, 0.0, 1.0, 1.0, 0.0, 27.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1693.0, 2.0, 8.9659, 3.0185, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1694.0, 2.0, 8.9659, 3.0185, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1695.0, 2.0, 8.9659, 3.0185, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1696.0, 2.0, 8.9659, 3.0185, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1697.0, 2.0, 14.9432, 4.483, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1698.0, 2.0, 14.9432, 4.483, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1699.0, 2.0, 14.9432, 4.483, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1700.0, 2.0, 4.9811, 1.8131, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1701.0, 2.0, 4.9811, 1.8131, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1702.0, 2.0, 8.9659, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[1703.0, 2.0, 8.9659, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[1704.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],
[1705.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],
[1706.0, 2.0, 8.9659, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 16.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1707.0, 2.0, 10.4602, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1708.0, 2.0, 3.9849, 1.8131, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],
[1709.0, 2.0, 3.9849, 1.8131, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],
[1710.0, 2.0, 4.9811, 1.8131, 0.0, 0.0, 1.0, 1.0, 0.0, 18.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1711.0, 2.0, 8.9659, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1712.0, 2.0, 8.9659, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1713.0, 2.0, 7.4716, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1714.0, 2.0, 7.4716, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1715.0, 2.0, 7.4716, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1716.0, 2.0, 7.4716, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1717.0, 2.0, 23.9091, 4.4929, 0.0, 0.0, 1.0, 1.0, 0.0, 24.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1718.0, 2.0, 23.9091, 4.4929, 0.0, 0.0, 1.0, 1.0, 0.0, 24.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1719.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1720.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1721.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1722.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1723.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1724.0, 2.0, 4.9811, 1.8131, 0.0, 0.0, 1.0, 1.0, 0.0, 18.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1725.0, 2.0, 4.9811, 1.8131, 0.0, 0.0, 1.0, 1.0, 0.0, 18.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1726.0, 2.0, 4.9811, 1.8131, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[1727.0, 2.0, 4.9811, 1.8131, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[1728.0, 2.0, 10.4602, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1729.0, 2.0, 10.4602, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1730.0, 2.0, 5.9773, 1.8131, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[1731.0, 2.0, 5.9773, 1.8131, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[1732.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[1733.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[1734.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[1735.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1736.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1737.0, 2.0, 5.9773, 1.8131, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[1738.0, 2.0, 5.9773, 1.7932, 0.0, 0.0, 1.0, 1.0, 0.0, 15.75, 1.0, 1.1, 0.95, 0.6, 10 ],
[1739.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1740.0, 2.0, 10.4602, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1741.0, 2.0, 10.4602, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1742.0, 2.0, 10.4602, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1743.0, 2.0, 10.4602, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1744.0, 2.0, 10.4602, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 22.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1745.0, 2.0, 10.4602, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 22.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1746.0, 2.0, 54.7917, 17.4337, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],
[1747.0, 2.0, 4.9811, 1.4943, 0.0, 0.0, 1.0, 1.0, 0.0, 13.8, 1.0, 1.1, 0.95, 0.6, 10 ],
[1748.0, 2.0, 20.9205, 5.4343, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1749.0, 2.0, 20.9205, 5.4343, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1750.0, 2.0, 20.9205, 5.4343, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1751.0, 2.0, 20.9205, 5.4343, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1752.0, 2.0, 10.4602, 2.7147, 0.0, 0.0, 1.0, 1.0, 0.0, 20.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1754.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1755.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1756.0, 1.0, 0.0, 0.0, 0.0, -0.5999988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1757.0, 1.0, 0.0, 0.0, 0.0, -0.5999988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1758.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1759.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1760.0, 1.0, 0.0, 0.0, 0.0, -0.5999988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1761.0, 1.0, 0.0, 0.0, 0.0, -0.5999988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1762.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1763.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1764.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1765.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1766.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1767.0, 1.0, 49.8107, 3.2526, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1768.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1769.0, 1.0, 49.8107, 3.1181, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1770.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1771.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1772.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1773.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1774.0, 1.0, 27.3959, 3.5764, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1775.0, 1.0, 49.8107, 3.1181, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1776.0, 1.0, 24.9053, 3.2526, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1777.0, 1.0, 42.3391, 9.9621, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1778.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1779.0, 1.0, 24.9053, 3.2526, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1780.0, 1.0, 74.716, 11.0879, 1e-06, -1e-06, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1781.0, 1.0, 27.3959, 3.5764, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1782.0, 1.0, 25.9016, 3.3822, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1783.0, 1.0, 25.9016, 3.3822, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1784.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1785.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1786.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1787.0, 1.0, 27.3959, 10.9584, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1788.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1789.0, 1.0, 0.0, 0.0, 0.0, -0.99173882, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1790.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1791.0, 1.0, 166.2382, 50.4632, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1792.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1793.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1794.0, 1.0, 19.9243, 4.9811, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1795.0, 1.0, 19.6304, 2.7794, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1796.0, 1.0, 49.8107, 16.9456, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1797.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1798.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1799.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1800.0, 1.0, 51.8031, 17.623, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1801.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1802.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1803.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1804.0, 1.0, 36.4315, 21.8619, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1805.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1806.0, 1.0, 13.4638, -5.0707, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1807.0, 1.0, 49.8107, 9.9621, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1808.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1809.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1810.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1811.0, 1.0, 0.0, 0.0, 0.0, -2.40000384, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1812.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1813.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1814.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1815.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1816.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1817.0, 1.0, 4.9313, 0.8468, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1818.0, 1.0, 41.0241, 6.2313, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1819.0, 1.0, 2.4507, 0.6127, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1820.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1821.0, 1.0, 28.6561, 6.4256, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1822.0, 1.0, 49.8107, 3.1181, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1823.0, 1.0, 24.9053, 16.9456, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1824.0, 1.0, 27.0472, 4.6822, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1825.0, 1.0, 4.732, 0.8468, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1826.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1827.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1828.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1829.0, 1.0, 119.5855, 24.9302, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1830.0, 1.0, 13.947, 0.9962, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1831.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1832.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1833.0, 1.0, 54.7917, 17.9318, 1e-06, -1e-06, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1834.0, 1.0, 0.0, 0.0, 0.0, -1.4999925, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1835.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1836.0, 1.0, 23.7248, 6.7792, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1837.0, 1.0, 34.9023, -1.056, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1838.0, 1.0, 3.7507, 0.8966, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1839.0, 1.0, 11.3568, 4.2339, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1840.0, 1.0, 30.8727, 6.321, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1841.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1842.0, 1.0, 38.3542, 6.6846, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1843.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1844.0, 1.0, 14.9432, 16.9456, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1845.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1846.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1847.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1848.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1849.0, 1.0, 0.0, 0.0, 0.0, 5.74999045, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1850.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1851.0, 1.0, 0.0, 0.0, 0.0, -1.20000048, 1.0, 1.0, 0.0, 63.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1852.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1853.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1854.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1855.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1856.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1857.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1858.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1859.0, 1.0, 28.3921, 9.464, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1860.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1861.0, 1.0, 49.6264, 10.1514, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1862.0, 1.0, 0.0, 0.0, 0.0, 0.64800415, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1863.0, 1.0, 0.0, 0.0, 0.0, -3.8340098, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1864.0, 1.0, 0.0, 0.0, 0.0, -1.97550375, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1865.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1866.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1867.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1868.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1869.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1870.0, 1.0, 4.2837, 0.6127, 0.0, 0.0, 2.0, 1.0, 0.0, 220.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[1871.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1872.0, 1.0, 0.0, 0.0, 0.0, -1.1999976, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1873.0, 1.0, 0.0, 0.0, 0.0, -1.1999976, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1874.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1875.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1876.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1877.0, 1.0, 0.0, 0.0, 0.0, -1.7999964, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1878.0, 1.0, 0.0, 0.0, 0.0, -0.5999988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1879.0, 1.0, 0.0, 0.0, 0.0, -0.5999988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1880.0, 1.0, 0.0, 0.0, 0.0, 0.599988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1881.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1882.0, 1.0, 0.0, 0.0, 0.0, -1.20000048, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1883.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1884.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1885.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1886.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1887.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1888.0, 1.0, 5.9075, 0.8816, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1889.0, 1.0, 0.0, 0.0, 0.0, -0.6000024, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1890.0, 1.0, 0.0, 0.0, 0.0, -1.1999976, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1891.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1892.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1893.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1894.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1895.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1896.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1897.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1898.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1899.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1900.0, 1.0, 42.882, 2.9388, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1901.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1902.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1903.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1904.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1905.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1906.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1907.0, 1.0, 43.3353, 10.8089, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1908.0, 1.0, 18.43, 4.0845, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1909.0, 1.0, 28.2427, 11.2971, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1910.0, 1.0, 34.8675, 12.2036, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1911.0, 1.0, 56.4853, 11.2971, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1912.0, 1.0, 26.5989, 6.9237, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1913.0, 1.0, 62.3281, -1.7982, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1914.0, 1.0, 12.8661, 4.1642, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1915.0, 1.0, 16.8759, 5.4244, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1916.0, 1.0, 27.3959, 12.4278, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1917.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1918.0, 1.0, 103.6062, 25.3835, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1919.0, 1.0, 32.8252, -20.9703, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1920.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1921.0, 1.0, 37.5423, 0.0, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1922.0, 1.0, 34.5487, 13.0056, 5e-07, -5e-07, 2.0, 1.0, 0.0, 220.0, 2.0, 1.1, 0.95, 0.6, 10 ],
[1923.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1924.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1925.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1926.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1927.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1928.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1929.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1930.0, 1.0, 0.0, 0.0, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1931.0, 1.0, 54.7917, 3.427, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1932.0, 1.0, 29.0944, 10.52, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1933.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1934.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1935.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1936.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1937.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1938.0, 1.0, 16.5371, 4.8316, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1939.0, 1.0, 82.9896, 12.901, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1940.0, 1.0, 44.4809, 4.6822, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1941.0, 1.0, 52.1119, 12.6419, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1942.0, 1.0, 120.5418, 37.8063, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1943.0, 1.0, 29.8914, 5.1305, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1944.0, 1.0, 74.3375, 5.8029, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1945.0, 1.0, 28.2427, 11.2971, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1946.0, 1.0, 78.0533, 12.1538, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1947.0, 1.0, 73.919, 12.104, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1948.0, 1.0, 95.0388, 31.6796, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1949.0, 1.0, 37.0591, -0.4483, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1950.0, 1.0, 80.8427, 22.0163, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1951.0, 1.0, 66.5172, 16.2134, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1952.0, 1.0, 3.417, 0.6226, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1953.0, 1.0, 19.2369, 5.9424, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1954.0, 1.0, 66.2482, 9.464, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1955.0, 1.0, 49.8107, 3.2526, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1956.0, 1.0, 11.1576, 3.6362, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1957.0, 1.0, 0.0, 0.0, 0.0, -2.3999952, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1958.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1959.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1960.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1961.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1962.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1963.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 63.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1964.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 63.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1965.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1966.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1967.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1968.0, 1.0, 86.5909, 5.2799, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1969.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1970.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1971.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1972.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1973.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1974.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1975.0, 1.0, 0.0, 0.0, 0.0, -1.08843537, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1976.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1977.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1978.0, 1.0, 109.0953, 12.6918, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1979.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1980.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1981.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1982.0, 1.0, 9.3146, 3.3373, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1983.0, 1.0, 23.8593, 10.51, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1984.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1985.0, 1.0, 145.2479, 58.6371, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1986.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1987.0, 1.0, 0.0, 0.0, 0.0, -1.23967967, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1988.0, 1.0, 97.081, 18.3801, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1989.0, 1.0, 36.8599, 12.9508, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1990.0, 1.0, 59.7778, 22.0811, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1991.0, 1.0, 78.407, 30.7382, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1992.0, 1.0, 62.2633, 7.5712, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1993.0, 1.0, 27.4457, 12.8512, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1994.0, 1.0, 58.2287, 9.9123, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1995.0, 1.0, 53.4967, 17.0851, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1996.0, 1.0, 0.0, 0.0, 0.0, -2.999994, 1.0, 1.0, 0.0, 63.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1997.0, 1.0, 0.0, 0.0, 0.0, -1.7999964, 1.0, 1.0, 0.0, 63.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1998.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[1999.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2000.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2001.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2002.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2003.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2004.0, 1.0, 53.7457, 12.7515, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2005.0, 1.0, 18.8284, 3.1879, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2006.0, 1.0, 86.1725, 25.4533, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2007.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2008.0, 1.0, 62.1139, 7.6161, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2009.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2010.0, 1.0, 0.0, 0.0, 0.0, 13.8608871, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2011.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 63.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2012.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 63.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2013.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2014.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2015.0, 1.0, 68.6889, 2.3311, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2016.0, 1.0, 40.0428, 7.1478, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2017.0, 1.0, 0.0, 0.0, 0.0, 0.599988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2018.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2019.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2020.0, 1.0, 23.1769, 7.1777, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2021.0, 1.0, 54.5726, 8.4927, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2022.0, 1.0, 0.0, 0.0, 0.0, 1.29600829, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2023.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2024.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2025.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2026.0, 1.0, 47.9179, 4.8814, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2027.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2028.0, 1.0, 89.3105, 14.993, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2029.0, 1.0, 39.8485, 12.7515, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2030.0, 1.0, 55.788, 1.4943, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2031.0, 1.0, 0.0, 0.0, 0.0, -0.9000009, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2032.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2033.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2034.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2035.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2036.0, 1.0, 58.3283, 11.5561, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2037.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2038.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2039.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2040.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2041.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2042.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2043.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 63.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2044.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2045.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2046.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2047.0, 1.0, 64.7041, -8.9609, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2048.0, 1.0, 7.4068, 1.7085, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2049.0, 1.0, 0.0, 0.0, 0.0, -0.5999988, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2050.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2051.0, 1.0, 64.7539, 9.9621, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2052.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2053.0, 1.0, 157.6508, 30.8826, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2054.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2055.0, 1.0, 0.0, 0.0, 0.0, -1.1999976, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2056.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2057.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2058.0, 1.0, 49.0735, 6.0669, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2059.0, 1.0, 41.049, 7.9647, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2060.0, 1.0, 121.4335, 42.2146, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2061.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2062.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2063.0, 1.0, 55.1902, 10.7093, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2064.0, 1.0, 27.7794, 5.6435, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2065.0, 1.0, 53.148, 14.7938, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2066.0, 1.0, 82.6857, 12.9508, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2067.0, 1.0, 77.6548, 15.0428, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2068.0, 1.0, 54.2936, 6.1765, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2069.0, 1.0, 98.9539, 18.4648, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2070.0, 1.0, 133.5424, 30.6834, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2071.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2072.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2073.0, 1.0, 67.7077, 29.5776, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2074.0, 1.0, 47.6688, 15.4911, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2075.0, 1.0, 93.146, 23.6601, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2076.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2077.0, 1.0, 0.0, 0.0, 0.0, 0.900009, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2078.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 66.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2079.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2080.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2081.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2082.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2083.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2084.0, 1.0, 51.9027, 13.2994, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2085.0, 1.0, 27.6947, 10.4104, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2086.0, 1.0, 42.1398, 8.8165, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2087.0, 1.0, 70.9304, 21.8171, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2088.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2089.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2090.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2091.0, 1.0, 64.6543, -7.4517, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2092.0, 1.0, 69.3365, 22.8631, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2093.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2094.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2095.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2096.0, 1.0, 5.6485, 2.0522, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2097.0, 1.0, 51.2054, 20.273, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2098.0, 1.0, 48.7647, 16.9855, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2099.0, 1.0, 50.9015, 11.0679, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2100.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2101.0, 1.0, 94.3514, 26.9575, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2102.0, 1.0, 112.9158, 39.5796, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2103.0, 1.0, 81.9137, 8.2736, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2104.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2105.0, 1.0, 164.2607, 52.7993, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2106.0, 1.0, 38.4389, 1.4794, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2107.0, 1.0, 39.8834, 13.8474, 1e-06, -1e-06, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2108.0, 1.0, 190.3764, 34.0207, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2109.0, 1.0, 150.4283, 20.9205, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2110.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2111.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2112.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2113.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2114.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2115.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2116.0, 2.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 10.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2117.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2118.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2119.0, 1.0, 16.4873, 0.0, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2120.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2121.0, 1.0, 191.7711, 43.8334, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2122.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2123.0, 1.0, 61.5859, 18.7936, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2124.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2125.0, 1.0, 122.1308, 38.409, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2126.0, 1.0, 151.6237, 23.6601, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2127.0, 1.0, 79.5975, 21.6178, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2128.0, 1.0, 89.2558, 8.9111, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2129.0, 1.0, 8.0942, 3.2128, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2130.0, 1.0, 68.2406, 16.4375, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2131.0, 1.0, 0.3835, 1.2202, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2132.0, 1.0, 59.992, 17.2893, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2133.0, 1.0, 107.3918, 3.2875, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2134.0, 1.0, 44.8296, 11.4565, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2135.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2136.0, 1.0, 0.0, 0.0, 0.0, -1.23967967, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2137.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2138.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2139.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2140.0, 1.0, 0.0, 0.0, 0.0, -1.36054422, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2141.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2142.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2143.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2144.0, 1.0, 0.0, 0.0, 0.0, -1.500015, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2145.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2146.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2147.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2148.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2149.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2150.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2151.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2152.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2153.0, 1.0, 68.4947, 22.1658, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2154.0, 1.0, 53.4469, 6.1267, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2155.0, 1.0, 103.1579, 20.8209, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2156.0, 1.0, 34.469, 7.9697, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2157.0, 1.0, 19.6254, 11.2572, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2158.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2159.0, 1.0, 25.9514, 5.778, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2160.0, 1.0, 38.9021, 11.2074, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2161.0, 1.0, 131.301, 20.5718, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2162.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2163.0, 1.0, 92.4187, 11.2423, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2164.0, 1.0, 59.6732, 5.1305, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2165.0, 1.0, 21.9167, 2.0422, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2166.0, 1.0, 88.1151, 21.6178, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2167.0, 1.0, 43.2357, 8.9659, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2168.0, 1.0, 52.8392, 13.7776, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2169.0, 1.0, 101.2253, 7.7705, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2170.0, 1.0, 102.61, 16.9356, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2171.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2172.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2173.0, 1.0, 79.468, 19.1173, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2174.0, 1.0, 160.6394, 41.0938, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2175.0, 1.0, 108.8363, 40.5957, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2176.0, 1.0, 122.5343, 2.5403, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2177.0, 1.0, 103.4568, 16.7862, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2178.0, 1.0, 126.7184, 36.9595, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2179.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2180.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2181.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2182.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2183.0, 1.0, 38.3841, 8.4977, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2184.0, 1.0, 63.7676, 9.8924, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2185.0, 1.0, 64.0565, 24.6563, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2186.0, 1.0, 89.6592, 16.5371, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2187.0, 1.0, 109.5835, 24.6563, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2188.0, 1.0, 108.8861, 26.3996, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2189.0, 1.0, 48.6152, 5.4294, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2190.0, 1.0, 71.0798, 5.0309, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2191.0, 1.0, 73.919, 21.1197, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2192.0, 1.0, 89.4102, -1.6587, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2193.0, 1.0, 121.04, 14.4451, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2194.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2195.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2196.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2197.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2198.0, 1.0, 139.0913, 33.8264, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2199.0, 1.0, 29.7868, 8.1341, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2200.0, 1.0, 63.0454, 14.9183, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2201.0, 1.0, 130.235, 20.5668, 1e-07, -9.9e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2202.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2203.0, 1.0, 28.8902, 7.9697, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2204.0, 1.0, 131.9983, 46.3239, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2205.0, 1.0, 18.9281, 3.9849, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2206.0, 1.0, 78.402, 29.7868, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2207.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2208.0, 1.0, 28.5415, 9.1153, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2209.0, 1.0, 83.4329, 32.9747, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2210.0, 1.0, 31.7792, 14.993, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2211.0, 1.0, 35.7143, 5.9275, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2212.0, 1.0, 38.4538, 11.1576, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2213.0, 1.0, 15.9892, 4.3335, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2214.0, 1.0, 106.4952, 35.2161, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2215.0, 1.0, 57.083, 16.039, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2216.0, 1.0, 40.4961, 11.6557, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2217.0, 1.0, 160.3904, 52.5503, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2218.0, 1.0, 41.3429, 17.4338, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2219.0, 1.0, 24.9053, 3.2526, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2220.0, 1.0, 67.1946, 14.1462, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2221.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2222.0, 1.0, 73.5604, 10.8288, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2223.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2224.0, 1.0, 69.4859, 10.3108, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2225.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2226.0, 1.0, 90.6554, 41.3428, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2227.0, 1.0, 104.6024, 41.841, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2228.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2229.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2230.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 500.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2231.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2232.0, 1.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2233.0, 1.0, 51.5541, 4.8316, 5e-07, -5e-07, 1.0, 1.0, 0.0, 220.0, 1.0, 1.1, 0.95, 0.6, 10 ],
[2234.0, 1.0, 0.0, 0.0, 0.0, 0.0, 2.0, 1.0, 0.0, 220.0, 2.0, 1.1, 0.95, 0.6, 10 ]
])
ppc["gen"] = array([
[1634.0, 40.0, 44.7, 68.2, 0.0, 1.07, 100.0, 1.0, 110.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 129.41, 22.0, 33.0, 33.0, 44.0 ],
[1632.0, 60.0, 43.6, 68.2, 0.0, 1.07, 100.0, 0.0, 110.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 129.41, 22.0, 33.0, 33.0, 44.0 ],
[1629.0, 90.0, 40.8, 77.46, 0.0, 1.07, 100.0, 1.0, 125.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 147.06, 25.0, 37.5, 37.5, 50.0 ],
[1685.0, 154.8, 75.3, 80.0, 0.0, 1.07, 100.0, 1.0, 157.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 177.177, 31.4, 47.1, 47.1, 62.8 ],
[1706.0, 282.3, 96.3, 185.9, 0.0, 1.07, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.0, 60.0, 90.0, 90.0, 120.0 ],
[1747.0, 79.0, 23.2, 41.5, 0.0, 1.0, 100.0, 0.0, 75.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 88.8888, 15.0, 22.5, 22.5, 30.0 ],
[1746.0, 77.8, 18.4, 41.5, 0.0, 1.0, 100.0, 0.0, 75.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 88.8888, 15.0, 22.5, 22.5, 30.0 ],
[31.0, 100.0, 12.6, 62.0, 0.0, 1.0, 100.0, 1.0, 100.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 117.65, 20.0, 30.0, 30.0, 40.0 ],
[30.0, 100.0, 12.6, 62.0, 0.0, 1.0, 100.0, 0.0, 100.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 117.65, 20.0, 30.0, 30.0, 40.0 ],
[23.0, 49.5, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 7.9312, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[4.0, 7.1, 1.8, 62.0, 0.0, 1.0, 100.0, 0.0, 27.9515, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1666.0, 193.0, 107.7, 185.9, 0.0, 1.0, 100.0, 1.0, 367.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.7, 70.0, 105.0, 105.0, 140.0 ],
[1665.0, 264.8, 115.6, 185.9, 0.0, 1.0, 100.0, 1.0, 367.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.7, 70.0, 105.0, 105.0, 140.0 ],
[1745.0, 234.1, 26.6, 216.9, 0.0, 1.0, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],
[1744.0, 231.6, 46.9, 216.9, 0.0, 1.02, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],
[1743.0, 258.5, 46.6, 216.9, 0.0, 1.0, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],
[1742.0, 263.3, 101.2, 216.9, 0.0, 1.02, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],
[1664.0, 350.0, 34.0, 216.9, 0.0, 1.015, 100.0, 0.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],
[26.0, 49.5, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 22.0193, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[28.0, 49.5, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 20.6181, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[19.0, 49.5, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 14.8422, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1741.0, 283.9, 41.3, 216.9, 0.0, 1.0, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],
[1740.0, 262.8, 32.8, 216.9, 0.0, 1.03, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],
[1670.0, 219.8, 92.0, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1669.0, 299.8, 103.9, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1687.0, 297.4, 102.2, 185.9, 0.0, 1.01, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1686.0, 297.7, 86.4, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1729.0, 266.4, 133.3, 216.9, 0.0, 1.0, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],
[1728.0, 225.0, 140.2, 216.9, 0.0, 1.0, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],
[1696.0, 209.0, 112.0, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1695.0, 209.0, 89.0, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1690.0, 133.1, 0.0, 88.0, 0.0, 1.0, 100.0, 1.0, 30.695999999999998,0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1659.0, 22.2, -0.9, 62.0, 0.0, 1.0, 100.0, 1.0, 13.7135, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1738.0, 134.2, 51.3, 50.0, 0.0, 1.0, 100.0, 1.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],
[1737.0, 155.4, 40.6, 50.0, 0.0, 1.0, 100.0, 1.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],
[1707.0, 264.3, 28.2, 216.9, 0.0, 1.0, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],
[1752.0, 254.3, 31.4, 216.9, 0.0, 1.0, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],
[13.0, 90.0, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.2705, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1703.0, 93.2, 0.0, 123.9, 0.0, 1.0, 100.0, 1.0, 150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 176.0, 30.0, 45.0, 45.0, 60.0 ],
[1702.0, 144.4, 17.6, 123.9, 0.0, 1.0, 100.0, 0.0, 150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 176.0, 30.0, 45.0, 45.0, 60.0 ],
[1704.0, 107.3, 0.0, 123.9, 0.0, 1.0, 100.0, 1.0, 150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 176.0, 30.0, 45.0, 45.0, 60.0 ],
[1705.0, 107.7, 9.9, 123.9, 0.0, 1.0, 100.0, 1.0, 150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 176.0, 30.0, 45.0, 45.0, 60.0 ],
[34.0, 30.0, 20.0, 35.0, 0.0, 1.003, 100.0, 1.0, 50.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 40.0, 6.0, 9.0, 9.0, 12.0 ],
[33.0, 30.0, 20.0, 35.0, 0.0, 1.0, 100.0, 1.0, 50.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 40.0, 6.0, 9.0, 9.0, 12.0 ],
[1678.0, 257.9, 99.5, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1677.0, 128.6, 88.6, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1655.0, 49.5, 0.0, 4.95, -0.0, 1.0, 100.0, 0.0, 0.2438, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 110.0, 19.8, 29.7, 29.7, 39.6 ],
[27.0, 48.0, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 4.949, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1657.0, 90.0, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 2.2975, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1650.0, 1068.2, 202.5, 600.0, 0.0, 1.0, 100.0, 1.0, 1150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 1278.0, 223.6, 335.4, 335.4, 447.2 ],
[1648.0, 1000.0, 300.0, 600.0, 0.0, 1.0, 100.0, 1.0, 1150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 1277.778, 230.0, 345.0, 345.0, 460.0 ],
[35.0, 1118.0, 300.0, 600.0, 0.0, 1.0, 100.0, 0.0, 1150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 1278.0, 223.6, 335.4, 335.4, 447.2 ],
[1682.0, 246.6, 95.4, 185.9, 0.0, 1.0, 100.0, 1.0, 330.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 388.0, 66.0, 99.0, 99.0, 132.0 ],
[1681.0, 275.9, 100.9, 185.9, 0.0, 1.0, 100.0, 1.0, 330.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 388.0, 66.0, 99.0, 99.0, 132.0 ],
[2116.0, 58.3, 2.4, 44.9, 0.0, 1.0, 100.0, 0.0, 72.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 85.294, 14.5, 21.75, 21.75, 29.0 ],
[2114.0, 67.9, 2.3, 44.9, 0.0, 1.0, 100.0, 0.0, 72.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 85.294, 14.5, 21.75, 21.75, 29.0 ],
[2113.0, 67.0, 4.7, 44.9, 0.0, 1.0, 100.0, 0.0, 72.5, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 85.294, 14.5, 21.75, 21.75, 29.0 ],
[2112.0, 32.2, 5.0, 5.0, 0.0, 1.0, 100.0, 0.0, 36.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 41.14, 7.2, 10.8, 10.8, 14.4 ],
[2110.0, 32.6, 5.4, 5.0, 0.0, 1.0, 100.0, 0.0, 36.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 41.14, 7.2, 10.8, 10.8, 14.4 ],
[1736.0, 30.2, 5.9, 20.0, 0.0, 1.0, 100.0, 0.0, 42.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 49.412, 8.4, 12.6, 12.6, 16.8 ],
[1735.0, 30.8, 6.3, 20.0, 0.0, 1.0, 100.0, 0.0, 42.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 49.412, 8.4, 12.6, 12.6, 16.8 ],
[1734.0, 200.0, 88.0, 123.9, 0.0, 1.0, 100.0, 0.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],
[1733.0, 200.0, 123.9, 123.9, 0.0, 1.03, 100.0, 0.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],
[1732.0, 130.3, 19.7, 123.9, 0.0, 1.0, 100.0, 0.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],
[1694.0, 212.5, 27.6, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1693.0, 215.3, 38.5, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[25.0, 48.0, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 1.3553, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1701.0, 472.5, 159.0, 290.6, 0.0, 1.03, 100.0, 1.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],
[1700.0, 563.6, 210.1, 290.6, 0.0, 1.03, 100.0, 0.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],
[1652.0, 50.0, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.2813, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1645.0, 50.0, 20.0, 60.0, 0.0, 1.03, 100.0, 1.0, 50.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 58.0, 10.0, 15.0, 15.0, 20.0 ],
[24.0, 50.0, 20.0, 60.0, 0.0, 1.03, 100.0, 0.0, 50.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 58.0, 10.0, 15.0, 15.0, 20.0 ],
[1656.0, 49.5, 0.0, 4.95, -0.0, 1.0, 100.0, 1.0, 3.1519999999999997,0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 110.0, 19.8, 29.7, 29.7, 39.6 ],
[14.0, 49.5, 0.0, 4.95, -0.0, 1.0, 100.0, 0.0, 0.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 110.0, 19.8, 29.7, 29.7, 39.6 ],
[1679.0, 140.0, 9.6, 62.0, 0.0, 1.0, 100.0, 1.0, 14.6025, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[116.0, 99.0, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.3821, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[18.0, 99.0, 20.0, 62.0, 0.0, 1.0, 100.0, 0.0, 2.6694, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[17.0, 99.0, 20.0, 62.0, 0.0, 1.0, 100.0, 0.0, 1.217, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[16.0, 99.0, 20.0, 62.0, 0.0, 1.0, 100.0, 0.0, 13.5941, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[15.0, 99.0, 20.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.2705, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1612.0, 80.6, 23.4, 62.0, 0.0, 1.0, 100.0, 1.0, 100.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 117.65, 20.0, 30.0, 30.0, 40.0 ],
[1609.0, 85.9, 28.5, 62.0, 0.0, 1.0, 100.0, 1.0, 100.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 117.65, 20.0, 30.0, 30.0, 40.0 ],
[1691.0, 100.8, 44.0, 123.9, 0.0, 1.0, 100.0, 1.0, 150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 176.471, 30.0, 45.0, 45.0, 60.0 ],
[1662.0, 106.9, 43.8, 123.9, 0.0, 1.0, 100.0, 0.0, 150.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 176.471, 30.0, 45.0, 45.0, 60.0 ],
[1731.0, 119.9, 64.6, 123.9, 0.0, 1.0, 100.0, 1.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],
[1730.0, 121.8, 59.9, 123.9, 0.0, 1.0, 100.0, 1.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],
[1649.0, 200.0, 180.0, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[32.0, 200.0, 34.0, 216.9, 0.0, 1.015, 100.0, 1.0, 350.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 411.76, 70.0, 105.0, 105.0, 140.0 ],
[1651.0, 300.0, 166.0, 166.0, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 342.86, 60.0, 90.0, 90.0, 120.0 ],
[1653.0, 300.0, 166.0, 166.0, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 342.86, 60.0, 90.0, 90.0, 120.0 ],
[1654.0, 300.0, 166.0, 166.0, 0.0, 1.0, 100.0, 0.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 342.86, 60.0, 90.0, 90.0, 120.0 ],
[1674.0, 300.0, 166.0, 166.0, 0.0, 1.0, 100.0, 0.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 342.86, 60.0, 90.0, 90.0, 120.0 ],
[20.0, 49.5, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 2.5111, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1668.0, 600.0, 283.0, 290.6, 0.0, 1.0, 100.0, 1.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],
[1727.0, 200.0, 54.0, 130.1, 0.0, 0.98, 100.0, 0.0, 210.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 247.06, 42.0, 63.0, 63.0, 84.0 ],
[1726.0, 120.7, 61.9, 123.9, 0.0, 0.98, 100.0, 0.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],
[1697.0, 450.0, 154.0, 290.6, 0.0, 1.0, 100.0, 1.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],
[1643.0, 345.0, 100.0, 62.0, 0.0, 1.0, 100.0, 0.0, 100.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1725.0, 142.8, 36.0, 123.9, 0.0, 1.0, 100.0, 1.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],
[1724.0, 138.7, 67.0, 123.9, 0.0, 1.0, 100.0, 1.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],
[1710.0, 128.8, 69.5, 123.9, 0.0, 1.0, 100.0, 1.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.294, 40.0, 60.0, 60.0, 80.0 ],
[1672.0, 184.5, 123.5, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1671.0, 181.3, 127.5, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1723.0, 34.9, 3.9, 20.0, 0.0, 1.0, 100.0, 0.0, 50.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 58.0, 10.0, 15.0, 15.0, 20.0 ],
[1722.0, 90.0, 1.0, 50.0, 0.0, 1.01, 100.0, 1.0, 90.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 100.0, 18.0, 27.0, 27.0, 36.0 ],
[1721.0, 90.0, 1.0, 50.0, 0.0, 1.0, 100.0, 0.0, 90.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 100.0, 18.0, 27.0, 27.0, 36.0 ],
[1720.0, 90.0, 1.0, 50.0, 0.0, 1.0, 100.0, 0.0, 90.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 100.0, 18.0, 27.0, 27.0, 36.0 ],
[1719.0, 90.0, 1.0, 50.0, 0.0, 1.0, 100.0, 0.0, 90.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 100.0, 18.0, 27.0, 27.0, 36.0 ],
[1646.0, 125.0, 40.0, 80.0, 0.0, 1.03, 100.0, 1.0, 125.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 177.177, 31.4, 47.1, 47.1, 62.8 ],
[1647.0, 125.0, 40.0, 80.0, 0.0, 1.03, 100.0, 1.0, 125.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 177.177, 31.4, 47.1, 47.1, 62.8 ],
[1676.0, 159.5, 85.5, 123.9, 0.0, 1.0, 100.0, 1.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],
[1675.0, 159.5, 79.9, 123.9, 0.0, 1.0, 100.0, 1.0, 200.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 235.29, 40.0, 60.0, 60.0, 80.0 ],
[1718.0, 610.2, 90.7, 387.5, 0.0, 1.0, 100.0, 1.0, 800.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 888.89, 160.0, 240.0, 240.0, 320.0 ],
[1717.0, 574.5, 167.0, 387.5, 0.0, 1.0, 100.0, 1.0, 800.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 888.89, 160.0, 240.0, 240.0, 320.0 ],
[1692.0, 1004.3, 224.5, 484.0, 0.0, 1.0, 100.0, 1.0, 1000.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 1120.0, 201.6, 302.4, 302.4, 403.2 ],
[1663.0, 814.4, 190.8, 484.0, 0.0, 1.0, 100.0, 1.0, 1000.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 1120.0, 201.6, 302.4, 302.4, 403.2 ],
[1709.0, 105.1, 50.2, 77.46, 0.0, 1.03, 100.0, 1.0, 135.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 147.06, 27.0, 40.5, 40.5, 54.0 ],
[1708.0, 101.3, 47.1, 77.46, 0.0, 1.03, 100.0, 1.0, 135.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 147.06, 27.0, 40.5, 40.5, 54.0 ],
[5.0, 49.5, 19.0, 62.0, 0.0, 1.0, 100.0, 1.0, 26.8411, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[29.0, 49.5, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 9.9352, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[2042.0, 39.5, 8.5, 20.0, 0.0, 1.0, 100.0, 0.0, 45.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 52.94, 9.0, 13.5, 13.5, 18.0 ],
[2040.0, 38.7, 4.5, 20.0, 0.0, 1.0, 100.0, 0.0, 45.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 52.94, 9.0, 13.5, 13.5, 18.0 ],
[2039.0, 39.0, 4.8, 20.0, 0.0, 1.0, 100.0, 0.0, 45.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 52.94, 9.0, 13.5, 13.5, 18.0 ],
[2037.0, 40.1, 6.6, 20.0, 0.0, 1.0, 100.0, 0.0, 45.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 52.94, 9.0, 13.5, 13.5, 18.0 ],
[1599.0, 50.0, 27.0, 20.0, 0.0, 1.0, 100.0, 0.0, 45.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 60.0, 10.0, 15.0, 15.0, 20.0 ],
[1597.0, 50.0, 27.0, 20.0, 0.0, 1.0, 100.0, 0.0, 45.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 60.0, 10.0, 15.0, 15.0, 20.0 ],
[1661.0, 99.0, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 8.0792, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1699.0, 597.1, 168.2, 290.6, 0.0, 1.0, 100.0, 1.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],
[1698.0, 551.0, 167.2, 290.6, 0.0, 1.0, 100.0, 0.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],
[1714.0, 213.5, 57.0, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1713.0, 235.0, 71.0, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1716.0, 222.7, 53.2, 185.9, 0.0, 1.0, 100.0, 0.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1715.0, 202.3, 59.3, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1680.0, 20.6, 6.6, 4.95, -0.0, 1.0, 100.0, 1.0, 13.9289, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 49.5, 9.9, 14.85, 14.85, 19.8 ],
[1658.0, 99.0, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 25.5205, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[21.0, 49.5, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 14.9237, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1667.0, 594.9, 157.8, 290.6, 0.0, 1.03, 100.0, 1.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],
[1673.0, 600.0, 137.0, 290.6, 0.0, 1.03, 100.0, 1.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],
[1712.0, 256.7, 92.1, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1711.0, 256.7, 75.7, 185.9, 0.0, 1.0, 100.0, 1.0, 300.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 352.94, 60.0, 90.0, 90.0, 120.0 ],
[1749.0, 564.0, 103.0, 290.6, 0.0, 1.0, 100.0, 1.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],
[1748.0, 543.0, 116.0, 290.6, 0.0, 1.0, 100.0, 0.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],
[1684.0, 235.0, 80.0, 185.9, 0.0, 1.0, 100.0, 1.0, 330.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 388.0, 66.0, 99.0, 99.0, 132.0 ],
[1683.0, 234.4, 74.8, 185.9, 0.0, 1.0, 100.0, 1.0, 330.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 388.0, 66.0, 99.0, 99.0, 132.0 ],
[22.0, 49.5, 19.0, 62.0, 0.0, 1.0, 100.0, 1.0, 14.9237, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1660.0, 99.0, 19.0, 62.0, 0.0, 1.0, 100.0, 0.0, 27.6718, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1689.0, 114.9, -7.7, 62.0, 0.0, 1.0, 100.0, 1.0, 7.0619, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[117.0, 99.0, 15.0, 62.0, 0.0, 1.0, 100.0, 0.0, 28.0952, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[110.0, 99.0, 15.0, 62.0, 0.0, 1.0, 100.0, 0.0, 20.2656, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[108.0, 99.0, 15.0, 62.0, 0.0, 1.0, 100.0, 0.0, 12.3172, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1688.0, 91.2, -3.3, 62.0, 0.0, 1.0, 100.0, 1.0, 6.6153, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[118.0, 99.0, 15.0, 62.0, 0.0, 1.0, 100.0, 0.0, 0.5386, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[111.0, 50.0, 10.0, 62.0, 0.0, 1.0, 100.0, 0.0, 5.6076, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[107.0, 50.0, 10.0, 62.0, 0.0, 1.0, 100.0, 0.0, 11.3177, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1.0, 0.95, 117.65, 19.8, 29.7, 29.7, 39.6 ],
[1751.0, 497.9, 119.0, 290.6, 0.0, 1.0, 100.0, 0.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ],
[1750.0, 506.0, 142.0, 290.6, 0.0, 1.0, 100.0, 1.0, 600.0, 0.0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0.0, 0.95, 666.67, 120.0, 180.0, 180.0, 240.0 ]
])
ppc["branch"] = array([
[1418.0, 2021.0, 0.000709, 0.03936, 0.0061, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[541.0, 2024.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[540.0, 2024.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1545.0, 1418.0, 0.00764, 0.040964, 0.06498, 70.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1545.0, 1418.0, 0.007179, 0.042257, 0.064288, 70.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1545.0, 2021.0, 0.0124, 0.0812, 0.1232, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[542.0, 1960.0, 0.001528, 0.02064, 2.0724, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[539.0, 1960.0, 0.00172, 0.02296, 2.21372, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2234.0, 2233.0, 0.0, 0.187, 0.281, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1870.0, 1871.0, 0.0055, 0.2, 0.3, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1821.0, 1804.0, 0.0017, 0.0122, 0.03806, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1821.0, 1804.0, 0.0017, 0.0122, 0.03806, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1821.0, 1913.0, 0.002785, 0.020342, 0.06345, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1821.0, 1913.0, 0.002804, 0.020317, 0.063616, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2194.0, 2193.0, 0.0007, 0.0031, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2194.0, 2193.0, 0.0007, 0.0031, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1869.0, 2170.0, 0.0, 0.0001, 0.0002, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2232.0, 2231.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2232.0, 1962.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2232.0, 1988.0, 0.00046, 0.003737, 0.012788, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2232.0, 1988.0, 0.000424, 0.003818, 0.01291, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2232.0, 1993.0, 0.001928, 0.011229, 0.034974, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2232.0, 1993.0, 0.001775, 0.011229, 0.034426, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2232.0, 1824.0, 0.00242, 0.01694, 0.049586, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2232.0, 1824.0, 5e-06, 3.5e-05, 2.4e-05, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2232.0, 1839.0, 0.000545, 0.004212, 0.013316, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2232.0, 1839.0, 0.000541, 0.004268, 0.013416, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1966.0, 1965.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1966.0, 1961.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1966.0, 2034.0, 0.000436, 0.005137, 0.500594, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1763.0, 2099.0, 0.004241, 0.030126, 0.085066, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2192.0, 1782.0, 0.002004, 0.011367, 0.016964, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2192.0, 1840.0, 0.001859, 0.011245, 0.03521, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2192.0, 1840.0, 0.001995, 0.011437, 0.033768, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1794.0, 2208.0, 0.002049, 0.019073, 0.054854, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1794.0, 2026.0, 0.004879, 0.030837, 0.09544, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1796.0, 2220.0, 0.001408, 0.006842, 0.024408, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1796.0, 2220.0, 0.001394, 0.006874, 0.024286, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2000.0, 1999.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2000.0, 1998.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2000.0, 2153.0, 0.008206, 0.048173, 0.133258, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2000.0, 2153.0, 0.007348, 0.042683, 0.114282, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2000.0, 2152.0, 0.007455, 0.049655, 0.13954, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2000.0, 1776.0, 0.007141, 0.033921, 0.09508, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2000.0, 2065.0, 0.0017, 0.0076, 0.0198, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2000.0, 2065.0, 0.0018, 0.00704, 0.0182, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2000.0, 2004.0, 0.0041, 0.0196, 0.0546, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2000.0, 1989.0, 0.005358, 0.0248, 0.0503, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2000.0, 1989.0, 0.004066, 0.021045, 0.057736, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2000.0, 2036.0, 0.0139, 0.0491, 0.1352, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2000.0, 1931.0, 0.001403, 0.007678, 0.020786, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2003.0, 2002.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2003.0, 2001.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2003.0, 115.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2003.0, 1970.0, 0.000812, 0.015612, 1.68775, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2003.0, 1972.0, 0.000816, 0.015984, 1.68775, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2003.0, 1789.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2003.0, 483.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[115.0, 109.0, 0.001236, 0.013293, 1.480528, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2191.0, 1837.0, 0.001635, 0.012705, 0.037662, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2191.0, 1818.0, 0.01022, 0.042629, 0.06611, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2226.0, 2210.0, 0.001173, 0.005248, 0.008748, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2226.0, 2190.0, 0.00036, 0.0073, 0.0134, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2189.0, 2188.0, 0.0023, 0.0078, 0.0138, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2189.0, 1907.0, 0.002424, 0.014193, 0.040774, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2189.0, 2187.0, 0.007996, 0.039339, 0.110062, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2186.0, 2217.0, 0.0055, 0.0238, 0.0364, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2186.0, 1956.0, 0.002, 0.01, 0.016, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2186.0, 2185.0, 0.0028, 0.0141, 0.0216, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2219.0, 2218.0, 0.002676, 0.015582, 0.050366, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2219.0, 2218.0, 0.002791, 0.015447, 0.050366, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 1796.0, 0.001819, 0.009567, 0.03228, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 1796.0, 0.00179, 0.009574, 0.03228, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 2219.0, 0.001167, 0.006646, 0.023698, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 2219.0, 0.001154, 0.006607, 0.023536, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 2215.0, 0.0029, 0.0172, 0.0498, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 2215.0, 0.003, 0.0174, 0.0496, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 1947.0, 0.00434, 0.02042, 0.09428, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2221.0, 2216.0, 0.0005, 0.00293, 0.008814, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 2216.0, 0.0005, 0.00293, 0.008814, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 1938.0, 0.001983, 0.0125, 0.038, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 2217.0, 0.0026, 0.0159, 0.045, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 2217.0, 0.0025, 0.0156, 0.04604, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 1956.0, 0.001996, 0.015004, 0.049722, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 1956.0, 0.001942, 0.015223, 0.048658, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 2214.0, 0.00705, 0.0366, 0.0638, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1970.0, 122.0, 0.004241, 0.030126, 0.085066, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1970.0, 2032.0, 0.001038, 0.010782, 0.99978, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1972.0, 112.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1972.0, 1970.0, 1e-05, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1972.0, 1971.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1972.0, 2034.0, 0.000863, 0.008857, 0.583716, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[122.0, 121.0, 0.000863, 0.008857, 0.583716, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1898.0, 1970.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1898.0, 122.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1898.0, 120.0, 0.001351, 0.015445, 1.51142, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1896.0, 1972.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1896.0, 1897.0, 0.001355, 0.017948, 1.76, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2184.0, 2169.0, 0.002551, 0.012, 0.032826, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2184.0, 2169.0, 0.002288, 0.012288, 0.051244, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2203.0, 2134.0, 0.0149, 0.0858, 0.1412, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2203.0, 1949.0, 0.0105, 0.05925, 0.0525, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2203.0, 2208.0, 0.00447, 0.02537, 0.03784, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2183.0, 2222.0, 0.001446, 0.009469, 0.030074, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2212.0, 1473.0, 0.0218, 0.0638, 0.066, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2212.0, 1831.0, 0.004731, 0.023671, 0.047954, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2212.0, 2097.0, 0.003778, 0.017949, 0.05031, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2212.0, 2182.0, 0.0035, 0.0205, 0.0556, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2212.0, 2182.0, 0.007552, 0.0302, 0.046742, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2212.0, 1909.0, 0.004017, 0.028224, 0.081516, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2181.0, 57.0, 1e-06, 1e-06, 2e-06, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2181.0, 2209.0, 0.0143, 0.075, 0.1148, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2181.0, 2180.0, 0.0006, 0.0032, 0.005, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2181.0, 2179.0, 0.0052, 0.0259, 0.038, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1770.0, 1912.0, 0.0004, 0.003044, 0.009322, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1770.0, 1912.0, 0.0004, 0.003044, 0.009322, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1770.0, 2155.0, 0.000856, 0.006515, 0.019094, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1770.0, 2155.0, 0.000856, 0.006515, 0.019094, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1770.0, 2224.0, 0.00164, 0.012482, 0.036582, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1770.0, 2224.0, 0.00164, 0.012482, 0.036582, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1770.0, 2030.0, 0.001344, 0.010229, 0.02998, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1770.0, 2030.0, 0.001344, 0.010229, 0.02998, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1770.0, 1940.0, 0.001313, 0.009985, 0.029266, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1770.0, 1940.0, 0.001313, 0.009985, 0.029266, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1772.0, 1771.0, 0.000697, 0.008904, 0.966246, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1772.0, 1771.0, 0.000697, 0.008904, 0.966246, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1944.0, 42.0, 0.003347, 0.019091, 0.05291, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1944.0, 1888.0, 0.00452, 0.021267, 0.06035, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1944.0, 1888.0, 0.0033, 0.021, 0.061034, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[40.0, 2157.0, 0.002254, 0.015419, 0.044362, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1988.0, 1985.0, 0.0004, 0.0018, 0.0044, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1988.0, 1985.0, 0.0004, 0.0018, 0.0044, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1988.0, 2193.0, 0.0003, 0.0017, 0.004, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1988.0, 2193.0, 0.0003, 0.0025, 0.005, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1988.0, 2090.0, 0.0019, 0.0086, 0.0214, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1988.0, 2087.0, 0.0008, 0.0055, 0.0142, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1924.0, 2226.0, 0.002291, 0.017079, 0.050654, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1924.0, 2226.0, 0.00258, 0.018126, 0.05235, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1924.0, 1856.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1924.0, 2227.0, 0.004044, 0.029321, 0.090328, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1924.0, 2227.0, 0.003984, 0.029357, 0.09127, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1924.0, 2074.0, 0.001113, 0.006391, 0.02179, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1924.0, 2074.0, 0.001088, 0.006441, 0.021698, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1813.0, 1928.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1812.0, 1924.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1928.0, 1970.0, 0.0012, 0.015315, 1.662034, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1928.0, 1972.0, 0.0012, 0.015315, 1.662034, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1928.0, 1855.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1928.0, 1790.0, 0.0005, 0.009109, 0.977482, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1928.0, 1790.0, 0.000499, 0.009108, 0.977482, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1928.0, 2034.0, 0.000494, 0.009033, 0.96659, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1928.0, 2024.0, 0.000363, 0.006412, 0.672766, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1912.0, 2155.0, 0.000721, 0.003805, 0.023416, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2177.0, 2175.0, 0.0018, 0.0107, 0.0208, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2177.0, 2175.0, 0.0013, 0.0109, 0.0364, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2177.0, 2174.0, 0.003659, 0.01587, 0.045896, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2177.0, 2176.0, 0.001, 0.004, 0.0076, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2177.0, 2176.0, 0.0009, 0.0039, 0.00888, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2173.0, 2171.0, 0.0049, 0.0203, 0.0352, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2173.0, 2172.0, 0.0014, 0.0089, 0.0272, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1810.0, 1939.0, 0.000764, 0.005558, 0.06534, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1810.0, 2202.0, 0.001198, 0.009194, 0.095348, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2171.0, 2168.0, 0.002645, 0.016233, 0.122918, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2171.0, 1829.0, 0.000831, 0.007075, 0.049208, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2171.0, 2169.0, 0.0006, 0.0048, 0.0144, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2171.0, 2169.0, 0.0007, 0.005, 0.0146, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2171.0, 1941.0, 0.0005, 0.003, 0.0076, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1809.0, 2218.0, 0.000453, 0.005, 0.0074, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1809.0, 2218.0, 0.000453, 0.005, 0.0074, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[53.0, 1909.0, 0.003648, 0.013602, 0.02284, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[55.0, 1909.0, 0.003648, 0.013602, 0.02284, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[36.0, 1831.0, 0.001722, 0.010968, 0.017098, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2167.0, 1982.0, 0.0036, 0.0317, 0.0886, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2167.0, 1983.0, 0.00206, 0.01115, 0.01946, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2162.0, 1908.0, 0.000426, 0.002537, 0.00866, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2162.0, 1908.0, 0.00045, 0.002581, 0.008058, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2162.0, 2161.0, 0.001, 0.006138, 0.017238, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2162.0, 2161.0, 0.001, 0.00539, 0.01767, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1925.0, 1794.0, 0.004382, 0.027697, 0.085722, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1925.0, 1794.0, 0.003049, 0.028391, 0.081652, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1925.0, 1887.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1925.0, 2166.0, 0.003412, 0.01859, 0.035532, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1925.0, 2209.0, 0.005598, 0.030473, 0.051208, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1925.0, 2209.0, 0.005475, 0.032322, 0.077422, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1925.0, 1908.0, 0.005469, 0.034514, 0.10096, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1925.0, 1908.0, 0.005539, 0.034934, 0.100658, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1925.0, 2164.0, 0.00228, 0.015838, 0.046554, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1925.0, 2208.0, 0.005808, 0.044554, 0.131736, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1925.0, 2026.0, 0.014736, 0.08342, 0.159408, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1927.0, 1928.0, 0.001024, 0.01164, 1.045364, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1927.0, 1928.0, 0.00083, 0.011237, 1.038556, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1927.0, 1886.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1927.0, 1814.0, 0.00049, 0.005109, 0.49856, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2166.0, 2164.0, 0.0019, 0.0094, 0.0118, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2166.0, 2165.0, 0.0011, 0.006921, 0.0214, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2166.0, 2165.0, 0.001254, 0.006957, 0.020732, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2166.0, 1783.0, 0.018061, 0.104849, 0.16225, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2166.0, 2163.0, 0.02, 0.128, 0.184, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1841.0, 1925.0, 0.002005, 0.015458, 0.048382, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1841.0, 1925.0, 0.001952, 0.015406, 0.048262, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2160.0, 1842.0, 0.009545, 0.050416, 0.0775, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2160.0, 1910.0, 0.001505, 0.00955, 0.029252, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2159.0, 2156.0, 0.0024, 0.0141, 0.0394, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2159.0, 2156.0, 0.002467, 0.012564, 0.036174, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2159.0, 2158.0, 0.0036, 0.0224, 0.0614, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2159.0, 2157.0, 0.0066, 0.0357, 0.056, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2159.0, 2157.0, 0.0066, 0.0357, 0.066724, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1906.0, 2156.0, 0.001131, 0.010327, 0.03263, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1906.0, 2156.0, 0.00134, 0.010137, 0.032934, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2155.0, 2232.0, 0.002, 0.011176, 0.022224, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2155.0, 2232.0, 0.002, 0.011176, 0.022224, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2155.0, 2154.0, 0.000957, 0.004942, 0.015, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2155.0, 1940.0, 0.0013, 0.0068, 0.06552, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[45.0, 1995.0, 0.007107, 0.034738, 0.060772, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[45.0, 1995.0, 0.004876, 0.023832, 0.041692, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[45.0, 2185.0, 0.002149, 0.010502, 0.018372, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[45.0, 2185.0, 0.00157, 0.007675, 0.013426, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2188.0, 2228.0, 0.0032, 0.0124, 0.033, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2188.0, 2228.0, 0.003, 0.0143, 0.0408, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2153.0, 2152.0, 0.0053, 0.0319, 0.0654, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1987.0, 2003.0, 0.00057, 0.005567, 0.51967, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2151.0, 2150.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2151.0, 2149.0, 0.0003, 0.0024, 0.0064, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2151.0, 2149.0, 0.0003, 0.0024, 0.0064, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2148.0, 2147.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2148.0, 2146.0, 0.0003, 0.0024, 0.0062, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2145.0, 2143.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2145.0, 2142.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2145.0, 2141.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2145.0, 2144.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2142.0, 1987.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2142.0, 2139.0, 0.0016, 0.0178, 1.672, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2142.0, 2140.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2141.0, 2138.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2137.0, 2142.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2137.0, 2141.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2137.0, 2135.0, 0.0015, 0.0181, 1.6626, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2137.0, 2136.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1807.0, 2106.0, 0.001225, 0.00965, 0.029664, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2156.0, 51.0, 0.00113, 0.008562, 0.02454, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2156.0, 51.0, 0.001024, 0.007755, 0.022224, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2156.0, 2130.0, 0.008293, 0.046318, 0.129332, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2175.0, 2207.0, 0.001095, 0.007076, 0.019756, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2175.0, 2207.0, 0.001116, 0.007079, 0.019756, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2175.0, 1784.0, 0.000787, 0.004344, 0.014244, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2175.0, 1784.0, 0.000787, 0.004344, 0.014244, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1947.0, 2220.0, 0.000603, 0.003376, 0.009118, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1947.0, 2220.0, 0.000475, 0.00314, 0.009422, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2209.0, 2134.0, 0.0137, 0.0773, 0.1374, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2209.0, 2208.0, 0.00517, 0.0294, 0.04392, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2094.0, 1791.0, 0.000869, 0.007208, 0.024548, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2094.0, 1791.0, 0.000738, 0.007235, 0.024668, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2094.0, 1990.0, 0.001151, 0.007729, 0.026286, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2094.0, 1990.0, 0.000871, 0.007813, 0.026216, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2094.0, 48.0, 0.005823, 0.027349, 0.07467, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2094.0, 48.0, 0.005823, 0.027349, 0.07467, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2094.0, 1842.0, 0.001531, 0.010085, 0.030386, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2094.0, 1842.0, 0.001531, 0.010085, 0.030386, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2094.0, 2228.0, 0.007567, 0.040931, 0.114362, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2094.0, 2228.0, 0.006829, 0.035599, 0.10737, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2094.0, 2228.0, 0.010092, 0.044787, 0.083766, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2094.0, 1.0, 0.006166, 0.027296, 0.045504, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1937.0, 1792.0, 0.0, 1e-06, 0.0, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1937.0, 2133.0, 0.00124, 0.008152, 0.014254, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1937.0, 2014.0, 0.002055, 0.016456, 0.05077, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1937.0, 2014.0, 0.002055, 0.016456, 0.05077, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1937.0, 1774.0, 0.005207, 0.03944, 0.113034, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1792.0, 2123.0, 0.00124, 0.01052, 0.018254, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1792.0, 2014.0, 0.002055, 0.016456, 0.05077, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1792.0, 1774.0, 0.005207, 0.03944, 0.113034, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1901.0, 1913.0, 0.0037, 0.0294, 0.085666, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1802.0, 1913.0, 0.002304, 0.015628, 0.04459, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2152.0, 2132.0, 0.002, 0.0066, 0.0096, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2152.0, 2131.0, 0.002, 0.0084, 0.0176, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2152.0, 2131.0, 0.0027, 0.009, 0.0144, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1820.0, 1821.0, 0.003241, 0.020126, 0.057066, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[59.0, 1804.0, 0.0, 0.0001, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[58.0, 1804.0, 0.0, 0.0001, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2227.0, 2226.0, 0.0006, 0.00225, 0.007, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2227.0, 2226.0, 0.0006, 0.00225, 0.007, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2227.0, 1955.0, 0.000528, 0.005104, 0.00836, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2227.0, 1955.0, 0.000528, 0.005104, 0.00836, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2216.0, 2214.0, 0.0072, 0.0325, 0.047, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1854.0, 2128.0, 0.00069, 0.004434, 0.014444, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1854.0, 2198.0, 0.002688, 0.016159, 0.048504, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1854.0, 2172.0, 0.000758, 0.004368, 0.015356, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1854.0, 2172.0, 0.000706, 0.004367, 0.015052, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2200.0, 1943.0, 0.0003, 0.0029, 0.00475, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2010.0, 557.0, 1e-06, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2010.0, 556.0, 1e-06, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2010.0, 553.0, 1e-06, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2010.0, 552.0, 1e-06, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2010.0, 2009.0, 0.0, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2130.0, 51.0, 0.006325, 0.047909, 0.137306, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2130.0, 2156.0, 0.006231, 0.047431, 0.139012, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2130.0, 2129.0, 0.008403, 0.052574, 0.08514, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2130.0, 2129.0, 0.008106, 0.03814, 0.0886, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2128.0, 1840.0, 0.001822, 0.010859, 0.032462, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2211.0, 2210.0, 0.0043, 0.0204, 0.0302, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[46.0, 1925.0, 0.007438, 0.056343, 0.161476, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[46.0, 2166.0, 0.005702, 0.043196, 0.123798, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[46.0, 1783.0, 0.005678, 0.043008, 0.12326, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2210.0, 1910.0, 0.004774, 0.033037, 0.094882, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2127.0, 2225.0, 0.0016, 0.0087, 0.0092, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2127.0, 1824.0, 0.002094, 0.01628, 0.048262, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1837.0, 43.0, 0.002851, 0.021598, 0.0619, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1837.0, 43.0, 0.002851, 0.021598, 0.0619, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1837.0, 3.0, 0.007298, 0.023277, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1826.0, 1827.0, 0.002963, 0.017781, 0.051432, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2168.0, 2172.0, 0.001353, 0.007979, 0.09775, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2126.0, 2177.0, 0.001083, 0.006426, 0.017174, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2125.0, 2133.0, 0.001, 0.0066, 0.01932, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2125.0, 2133.0, 0.0011, 0.0066, 0.0216, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2125.0, 2124.0, 0.001048, 0.007655, 0.021428, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2125.0, 2124.0, 0.001064, 0.007566, 0.02158, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1806.0, 1968.0, 0.004027, 0.025987, 0.06444, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1806.0, 1968.0, 0.006024, 0.031897, 0.07314, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[41.0, 1777.0, 0.002361, 0.01109, 0.030276, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[41.0, 1777.0, 0.002361, 0.01109, 0.030276, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[41.0, 2036.0, 0.001453, 0.011009, 0.031552, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[41.0, 2036.0, 0.001453, 0.011009, 0.031552, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[41.0, 1817.0, 0.002715, 0.020567, 0.058944, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[41.0, 1817.0, 0.002715, 0.020567, 0.058944, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[54.0, 2064.0, 0.003648, 0.013602, 0.02284, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1800.0, 1944.0, 0.00362, 0.02356, 0.070238, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1800.0, 1944.0, 0.00362, 0.02356, 0.070238, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1917.0, 1978.0, 0.001756, 0.012722, 0.039038, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1917.0, 1978.0, 0.001756, 0.012768, 0.039174, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2193.0, 2232.0, 0.00036, 0.00247, 0.008304, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2193.0, 2232.0, 0.00036, 0.002473, 0.008404, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1793.0, 1831.0, 0.004018, 0.02119, 0.031322, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1952.0, 1951.0, 0.00445, 0.02678, 0.0424, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1834.0, 1973.0, 0.001166, 0.01489, 1.616022, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1834.0, 1897.0, 0.000188, 0.003424, 0.356704, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1834.0, 1897.0, 0.000184, 0.003403, 0.358824, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1834.0, 1897.0, 0.000222, 0.003421, 0.351524, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1858.0, 1859.0, 0.0011, 0.0097, 0.030288, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1858.0, 1859.0, 0.0011, 0.0097, 0.030288, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2174.0, 2126.0, 0.0016, 0.0111, 0.0326, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2174.0, 2126.0, 0.002435, 0.013008, 0.039056, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2174.0, 2121.0, 0.0012, 0.0051, 0.017, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2174.0, 2182.0, 0.01269, 0.070386, 0.213056, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2174.0, 2120.0, 0.0205, 0.0676, 0.291, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2174.0, 44.0, 0.005062, 0.023775, 0.064912, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2015.0, 2196.0, 0.0006, 0.0031, 0.0436, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1861.0, 2196.0, 0.0006, 0.0031, 0.0436, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2118.0, 1780.0, 0.014222, 0.06951, 0.121602, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2118.0, 1780.0, 0.014222, 0.06951, 0.121602, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2116.0, 2115.0, 0.0, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2114.0, 2115.0, 0.0, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2113.0, 2115.0, 0.0, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2119.0, 1924.0, 0.024837, 0.137353, 0.21539, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2119.0, 2118.0, 0.0018, 0.0039, 0.0067, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2119.0, 1780.0, 0.013636, 0.077335, 0.11541, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2119.0, 1780.0, 0.013636, 0.077335, 0.11541, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2119.0, 2117.0, 0.00714, 0.021, 0.0326, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2119.0, 1992.0, 0.015847, 0.094112, 0.149088, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2119.0, 1992.0, 0.0163, 0.097, 0.1432, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1977.0, 1927.0, 0.000918, 0.012759, 1.2575, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1977.0, 1927.0, 0.000926, 0.012736, 1.256638, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1977.0, 1883.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1977.0, 1976.0, 0.001129, 0.015209, 1.424948, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1977.0, 1902.0, 0.000146, 0.001874, 0.18991, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1977.0, 1903.0, 0.000172, 0.001884, 0.195408, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1780.0, 1992.0, 0.004254, 0.024125, 0.036002, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1780.0, 1992.0, 0.004254, 0.024125, 0.036002, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1975.0, 1977.0, 0.001129, 0.015209, 0.142494, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1975.0, 1974.0, 0.0, 0.0001, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2112.0, 2111.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2110.0, 2111.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2109.0, 1844.0, 0.002676, 0.015397, 0.031688, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2109.0, 2207.0, 0.0017, 0.0107, 0.0284, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2109.0, 2207.0, 0.0006, 0.0105, 0.0286, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2109.0, 1769.0, 0.003999, 0.030444, 0.089226, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2109.0, 1769.0, 0.003999, 0.030444, 0.089226, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2109.0, 2005.0, 0.0016, 0.0048, 0.1224, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2109.0, 2204.0, 0.001983, 0.011962, 0.03345, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2109.0, 2108.0, 0.0017, 0.0091, 0.0272, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2109.0, 2108.0, 0.002178, 0.011857, 0.128572, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2107.0, 1948.0, 0.01167, 0.052547, 0.12149, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2107.0, 1953.0, 0.0086, 0.0528, 0.15631, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2106.0, 1948.0, 0.004412, 0.025837, 0.072956, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2106.0, 1921.0, 0.0041, 0.0339, 0.104598, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2106.0, 2105.0, 0.005559, 0.034409, 0.034118, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2106.0, 2105.0, 0.006452, 0.030781, 0.04556, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2202.0, 1939.0, 0.001728, 0.014502, 0.11525, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2202.0, 1939.0, 0.001774, 0.014573, 0.113328, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2202.0, 2200.0, 0.000613, 0.004558, 0.02771, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2202.0, 2200.0, 0.000609, 0.004555, 0.027656, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2202.0, 1943.0, 0.000486, 0.004698, 0.007696, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2202.0, 1943.0, 0.000486, 0.004698, 0.007696, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2202.0, 1874.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2202.0, 2223.0, 0.00323, 0.013, 0.04, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2202.0, 2223.0, 0.00323, 0.013, 0.04, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2202.0, 2199.0, 0.00423, 0.0233, 0.06904, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2202.0, 2199.0, 0.002383, 0.018144, 0.053178, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2202.0, 2201.0, 0.000809, 0.006324, 0.084454, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2202.0, 2201.0, 0.0008, 0.0063, 0.01612, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1976.0, 1875.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1976.0, 1974.0, 1e-05, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1976.0, 1897.0, 0.001027, 0.013427, 1.31672, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1976.0, 1897.0, 0.001027, 0.013427, 1.31672, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1976.0, 1926.0, 0.00054, 0.007314, 0.736074, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1974.0, 1973.0, 0.001798, 0.017107, 0.320912, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1984.0, 2153.0, 0.0013, 0.0098, 0.0296, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1984.0, 2153.0, 0.0013, 0.0098, 0.0298, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2104.0, 2119.0, 0.0099, 0.035083, 0.048204, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2012.0, 2011.0, 0.043836, 0.178923, 0.032564, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2102.0, 1930.0, 0.00553, 0.029104, 0.081816, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2102.0, 1930.0, 0.003466, 0.018151, 0.05141, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2102.0, 2101.0, 0.0019, 0.012, 0.0332, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2102.0, 2100.0, 0.0098, 0.0256, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2146.0, 2149.0, 0.0, 1e-06, 2e-06, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2146.0, 2075.0, 0.004, 0.0362, 0.0958, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2146.0, 2098.0, 0.0042, 0.0213, 0.0612, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2146.0, 2098.0, 0.00376, 0.021467, 0.060712, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2146.0, 1931.0, 0.005604, 0.031448, 0.087188, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2149.0, 2099.0, 0.0023, 0.0112, 0.03, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2149.0, 2099.0, 0.0026, 0.013, 0.03, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2149.0, 1915.0, 0.001405, 0.006673, 0.0208, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2149.0, 1915.0, 0.001368, 0.00666, 0.020638, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2103.0, 1806.0, 0.009481, 0.05461, 0.09703, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2103.0, 1942.0, 0.00216, 0.01062, 0.0171, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2103.0, 1942.0, 0.00216, 0.01062, 0.0171, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2103.0, 1915.0, 0.002927, 0.011569, 0.03306, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2103.0, 1915.0, 0.002199, 0.011585, 0.0324, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1936.0, 2069.0, 0.001533, 0.01167, 0.03418, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1936.0, 2069.0, 0.001405, 0.01136, 0.03412, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1938.0, 2217.0, 0.000413, 0.002459, 0.0076, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[52.0, 2098.0, 0.003648, 0.013602, 0.02284, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1948.0, 1838.0, 0.004812, 0.029932, 0.088632, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1948.0, 1838.0, 0.004831, 0.030014, 0.0893, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1948.0, 2105.0, 0.004686, 0.03165, 0.96246, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1948.0, 2105.0, 0.004761, 0.03174, 0.945046, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2097.0, 2182.0, 0.0012, 0.0056, 0.0108, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1959.0, 1876.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2164.0, 2179.0, 0.0053, 0.0326, 0.0446, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2134.0, 2096.0, 0.0064, 0.061, 0.0914, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1949.0, 1795.0, 0.001026, 0.009918, 0.016246, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1949.0, 1795.0, 0.001026, 0.009918, 0.016246, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1949.0, 2211.0, 0.00437, 0.0184, 0.0161, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1788.0, 2098.0, 0.008655, 0.03852, 0.0579, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2187.0, 1991.0, 0.00095, 0.00498, 0.008738, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2187.0, 1842.0, 0.001028, 0.005377, 0.008848, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2187.0, 1842.0, 0.001367, 0.007231, 0.011618, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2187.0, 1774.0, 0.000967, 0.008013, 0.027288, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2187.0, 1774.0, 0.000967, 0.008013, 0.027288, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1778.0, 1948.0, 0.001734, 0.013202, 0.038696, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1778.0, 1948.0, 0.001734, 0.013202, 0.038696, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1778.0, 2105.0, 0.00244, 0.018575, 0.05444, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1778.0, 2105.0, 0.00244, 0.018575, 0.05444, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2093.0, 2092.0, 0.0021, 0.009, 0.0162, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2093.0, 2092.0, 0.0021, 0.0092, 0.0164, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2122.0, 2091.0, 0.0018, 0.0107, 0.0316, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2122.0, 1.0, 0.0025, 0.01318, 0.01978, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2090.0, 2089.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2090.0, 2088.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2090.0, 1993.0, 0.001073, 0.006678, 0.020362, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2090.0, 1993.0, 0.001068, 0.006721, 0.020362, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2090.0, 2087.0, 0.0007, 0.004, 0.0106, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2090.0, 2087.0, 0.0007, 0.004, 0.0106, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2090.0, 2086.0, 0.0014, 0.0061, 0.0178, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2090.0, 2086.0, 0.0015, 0.0062, 0.0178, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2088.0, 2092.0, 0.000577, 0.004153, 0.012844, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2088.0, 2092.0, 0.000577, 0.004153, 0.013046, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2088.0, 2084.0, 0.0085, 0.0302, 0.0566, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2088.0, 2084.0, 0.0085, 0.0393, 0.0566, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2088.0, 2085.0, 0.0019, 0.0104, 0.0164, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2088.0, 2085.0, 0.0016, 0.008, 0.022, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2088.0, 1779.0, 0.001312, 0.009985, 0.029266, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2088.0, 1779.0, 0.001312, 0.009985, 0.029266, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2088.0, 1859.0, 0.002117, 0.014224, 0.044428, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2088.0, 1859.0, 0.014442, 0.014442, 0.04484, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2083.0, 2082.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2083.0, 2135.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2083.0, 2139.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2083.0, 1771.0, 0.000327, 0.00455, 0.448486, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2135.0, 1966.0, 0.000205, 0.002384, 0.23393, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2135.0, 1966.0, 0.000168, 0.00234, 0.237148, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2135.0, 2081.0, 0.0006, 0.0071, 0.697466, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2080.0, 2135.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2080.0, 2139.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2080.0, 2079.0, 0.0007, 0.0071, 0.6752, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1767.0, 1795.0, 0.0007, 0.003549, 0.011358, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1767.0, 1795.0, 0.0007, 0.003549, 0.011358, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[114.0, 109.0, 0.001236, 0.013293, 1.480528, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[114.0, 1786.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[113.0, 112.0, 0.001641, 0.01764, 1.964682, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[113.0, 1786.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1785.0, 2205.0, 0.001323, 0.013531, 0.041808, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1785.0, 2205.0, 0.001323, 0.013531, 0.041808, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1785.0, 2084.0, 9.8e-05, 0.001366, 0.134654, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1785.0, 2084.0, 9.8e-05, 0.001366, 0.134654, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1785.0, 119.0, 0.003842, 0.035772, 0.102888, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1785.0, 119.0, 0.003842, 0.035772, 0.102888, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1929.0, 1932.0, 0.00352, 0.01739, 0.027392, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2099.0, 2075.0, 0.0075, 0.0333, 0.0862, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2099.0, 1932.0, 0.000571, 0.003917, 0.011298, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2099.0, 1932.0, 0.000625, 0.004002, 0.011024, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2198.0, 2192.0, 0.005799, 0.044143, 0.129376, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2198.0, 2192.0, 0.005799, 0.044143, 0.129376, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2198.0, 2197.0, 0.000333, 0.001914, 0.010434, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2198.0, 2197.0, 0.000335, 0.001915, 0.010716, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2198.0, 2195.0, 0.000709, 0.004256, 0.014632, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2198.0, 2196.0, 0.001161, 0.006866, 0.02572, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1934.0, 1933.0, 0.006777, 0.036325, 0.099522, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1766.0, 2098.0, 0.004241, 0.030126, 0.085066, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1968.0, 1948.0, 0.007335, 0.040468, 0.132678, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1968.0, 1948.0, 0.007335, 0.040468, 0.132678, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2123.0, 1986.0, 0.0014, 0.008, 0.012, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2123.0, 2133.0, 0.0024, 0.0152, 0.0254, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2123.0, 2133.0, 0.0028, 0.0165, 0.0256, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2123.0, 2122.0, 0.0014, 0.008, 0.0134, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2123.0, 2122.0, 0.0007, 0.0052, 0.0224, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2123.0, 2021.0, 0.012484, 0.069281, 0.11486, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2132.0, 2131.0, 0.0015, 0.0066, 0.012, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2178.0, 2191.0, 0.006813, 0.043, 0.06108, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2178.0, 1818.0, 0.001267, 0.006536, 0.0117, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2178.0, 1818.0, 0.001185, 0.006504, 0.010946, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[12.0, 1679.0, 0.0, 1e-05, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[12.0, 116.0, 0.0, 1e-05, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[11.0, 18.0, 0.0, 1e-05, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[11.0, 17.0, 0.0, 1e-05, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[11.0, 16.0, 0.0, 1e-05, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[11.0, 15.0, 0.0, 1e-05, 0.0, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1857.0, 51.0, 0.002531, 0.019174, 0.05495, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1857.0, 2156.0, 0.003173, 0.027163, 0.078504, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1982.0, 1911.0, 0.004746, 0.035379, 0.105292, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1918.0, 1917.0, 0.00248, 0.01851, 0.055088, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1918.0, 1917.0, 0.002438, 0.01845, 0.055446, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1918.0, 2202.0, 0.001864, 0.014205, 0.044768, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1918.0, 2202.0, 0.001869, 0.014081, 0.044908, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1914.0, 2107.0, 0.0036, 0.019, 0.051544, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1914.0, 2058.0, 0.0061, 0.0313, 0.0847, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1914.0, 1953.0, 0.0113, 0.0675, 0.199492, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[49.0, 2171.0, 0.001603, 0.012145, 0.034808, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[49.0, 2169.0, 0.001099, 0.008326, 0.023862, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2218.0, 2185.0, 0.001653, 0.010407, 0.0294, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1849.0, 1966.0, 0.000152, 0.001935, 0.20991, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1849.0, 1966.0, 0.000124, 0.001938, 0.209752, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1849.0, 1848.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1849.0, 1847.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1849.0, 1846.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1849.0, 1845.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2074.0, 2233.0, 0.0045, 0.0226, 0.0614, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2172.0, 2198.0, 0.003409, 0.020465, 0.11888, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2172.0, 1829.0, 0.000246, 0.001611, 0.03219, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2172.0, 1829.0, 0.000222, 0.001538, 0.032516, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2172.0, 1867.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2172.0, 1865.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2172.0, 1840.0, 0.002366, 0.01494, 0.043588, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2172.0, 2073.0, 0.001, 0.0068, 0.0192, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2172.0, 2073.0, 0.001, 0.0072, 0.0196, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2172.0, 2169.0, 0.0016, 0.008, 0.0176, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2172.0, 2169.0, 0.002, 0.0121, 0.0176, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1973.0, 1868.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1973.0, 1866.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1973.0, 1897.0, 0.0014, 0.0163, 1.604962, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1973.0, 1926.0, 0.000371, 0.004039, 0.2452, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1797.0, 2221.0, 0.002538, 0.018658, 0.057658, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1797.0, 1947.0, 0.000244, 0.001883, 0.006854, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1797.0, 1947.0, 0.000319, 0.001779, 0.007006, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1797.0, 1947.0, 0.000316, 0.001744, 0.006838, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1797.0, 2216.0, 0.0032, 0.01325, 0.0247, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1797.0, 2220.0, 0.000283, 0.001786, 0.007918, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1797.0, 2220.0, 0.000276, 0.001786, 0.00784, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1797.0, 1823.0, 0.006105, 0.032408, 0.092494, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1797.0, 1823.0, 0.006105, 0.032408, 0.092494, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1797.0, 2214.0, 0.00572, 0.02325, 0.0247, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1799.0, 1970.0, 0.000271, 0.002947, 0.303246, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1799.0, 1798.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1799.0, 1897.0, 0.000631, 0.009242, 0.194064, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1799.0, 1969.0, 9.4e-05, 0.000882, 0.09577, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1798.0, 1972.0, 0.00026, 0.00296, 0.303556, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1798.0, 1897.0, 0.000581, 0.009148, 0.197, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1798.0, 1969.0, 9.5e-05, 0.000894, 0.096712, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1776.0, 2066.0, 0.000748, 0.003551, 0.009954, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1776.0, 2066.0, 0.000748, 0.003551, 0.009954, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2013.0, 1806.0, 0.004027, 0.025987, 0.06444, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2013.0, 1819.0, 0.000878, 0.008242, 0.022352, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2013.0, 1819.0, 0.001401, 0.008357, 0.023872, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2069.0, 1930.0, 0.003186, 0.016051, 0.046862, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2069.0, 1930.0, 0.003638, 0.018825, 0.052778, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2069.0, 1942.0, 0.001495, 0.008215, 0.023988, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2069.0, 1932.0, 0.003694, 0.020963, 0.05775, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2095.0, 1991.0, 0.0038, 0.0265, 0.0452, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2095.0, 1774.0, 0.002207, 0.016799, 0.049234, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2095.0, 1774.0, 0.002207, 0.016799, 0.049234, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2206.0, 1954.0, 0.000436, 0.003126, 0.010554, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2206.0, 1954.0, 0.00048, 0.003156, 0.010722, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2206.0, 2205.0, 0.0035, 0.0208, 0.0568, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2154.0, 2232.0, 0.001636, 0.007686, 0.020984, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2154.0, 2232.0, 0.001636, 0.007686, 0.020984, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2154.0, 1824.0, 0.001747, 0.011028, 0.02, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2068.0, 2174.0, 0.0053, 0.0356, 0.1608, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1995.0, 2127.0, 0.002277, 0.013038, 0.02106, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1995.0, 2185.0, 0.009767, 0.035062, 0.048936, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1995.0, 2185.0, 0.005959, 0.032066, 0.049696, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1819.0, 2062.0, 0.003176, 0.015785, 0.043182, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1819.0, 1953.0, 0.004039, 0.022981, 0.066948, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1775.0, 1817.0, 0.00056, 0.004262, 0.012492, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1775.0, 1817.0, 0.00056, 0.004262, 0.012492, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2067.0, 2004.0, 0.0011, 0.0053, 0.0164, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2067.0, 2066.0, 0.0035, 0.01357, 0.0193, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2205.0, 2130.0, 0.005, 0.0289, 0.081, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2205.0, 2130.0, 0.003152, 0.02578, 0.0731, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 2177.0, 0.002603, 0.021498, 0.07278, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 2177.0, 0.002582, 0.021425, 0.0731, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 1919.0, 0.001405, 0.011326, 0.219716, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 1919.0, 0.00139, 0.011124, 0.22341, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 2156.0, 0.005768, 0.043001, 0.127542, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 2156.0, 0.005768, 0.043001, 0.127542, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 2175.0, 0.002549, 0.017938, 0.059848, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 2175.0, 0.002488, 0.01794, 0.059848, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 2126.0, 0.002403, 0.02124, 0.071276, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 2126.0, 0.002353, 0.021196, 0.072128, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 1833.0, 0.003269, 0.018545, 0.027674, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 1833.0, 0.003269, 0.018545, 0.027674, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1920.0, 1833.0, 0.003269, 0.018545, 0.027674, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 1832.0, 0.000607, 0.004514, 0.015152, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 2.0, 0.000607, 0.004504, 0.015044, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1960.0, 1790.0, 0.000544, 0.007352, 0.76844, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1960.0, 1790.0, 0.000544, 0.007352, 0.76844, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1960.0, 1786.0, 0.000733, 0.009358, 1.015624, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1960.0, 1786.0, 0.000733, 0.009358, 1.015624, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1960.0, 123.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1960.0, 2079.0, 0.000508, 0.0044, 0.4396, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1960.0, 2081.0, 0.000464, 0.00536, 0.5338, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[123.0, 1959.0, 0.000968, 0.01148, 1.1461, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1978.0, 2183.0, 0.0019, 0.0102, 0.0276, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1978.0, 1888.0, 0.0035, 0.0221, 0.064074, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1978.0, 1888.0, 0.0036, 0.0222, 0.064304, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2121.0, 2071.0, 0.0028, 0.0171, 0.0458, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[37.0, 2149.0, 0.001399, 0.00713, 0.021124, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1791.0, 2187.0, 0.000547, 0.004293, 0.012496, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1791.0, 2187.0, 0.000564, 0.003571, 0.010164, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2087.0, 2203.0, 0.01588, 0.0793, 0.1166, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1840.0, 1782.0, 0.002004, 0.011367, 0.016964, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1888.0, 42.0, 0.001897, 0.010818, 0.029982, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2065.0, 2064.0, 0.0047, 0.0232, 0.0596, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2065.0, 1825.0, 0.010653, 0.057707, 0.104974, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2182.0, 1831.0, 0.006864, 0.041913, 0.08442, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2182.0, 2097.0, 0.001925, 0.009143, 0.02563, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2182.0, 2120.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2182.0, 44.0, 0.007721, 0.036266, 0.099012, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2120.0, 1454.0, 0.0152, 0.069, 0.1232, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2120.0, 2068.0, 0.0076, 0.0355, 0.1318, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2120.0, 2124.0, 0.0107, 0.0548, 0.1562, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2120.0, 2063.0, 0.0078, 0.0253, 0.08, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1958.0, 2230.0, 0.000968, 0.01148, 1.2124, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1765.0, 2212.0, 0.004241, 0.030126, 0.085066, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1765.0, 1909.0, 0.009008, 0.044028, 0.077024, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2062.0, 2102.0, 0.0019, 0.0088, 0.0194, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2062.0, 2102.0, 0.0016, 0.0072, 0.021, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2062.0, 2102.0, 0.001246, 0.007242, 0.0218, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2062.0, 1942.0, 0.0066, 0.03245, 0.0523, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2062.0, 2061.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2062.0, 2058.0, 0.0101, 0.0509, 0.141, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2062.0, 2060.0, 0.0013, 0.0092, 0.025, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2062.0, 2060.0, 0.00201, 0.01179, 0.0338, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2062.0, 2059.0, 0.0034, 0.01617, 0.044, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2062.0, 1953.0, 0.0025, 0.014, 0.036, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2062.0, 1953.0, 0.0025, 0.014, 0.036, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2057.0, 2003.0, 0.001561, 0.014418, 1.393376, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2057.0, 2141.0, 0.000512, 0.008616, 0.84623, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2057.0, 2010.0, 0.000932, 0.01154, 1.07545, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2057.0, 2009.0, 0.001, 0.0116, 1.0912, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2057.0, 2140.0, 0.0007, 0.008796, 0.873706, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2057.0, 2056.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2207.0, 2206.0, 0.00062, 0.00339, 0.00774, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2207.0, 2206.0, 0.00054, 0.00357, 0.00774, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2207.0, 2205.0, 0.003, 0.0161, 0.0416, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2207.0, 2054.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2207.0, 2052.0, 1e-05, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2207.0, 2018.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2207.0, 1784.0, 0.00052, 0.00287, 0.00941, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2207.0, 1784.0, 0.00052, 0.00287, 0.00941, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2207.0, 2053.0, 0.0015, 0.0078, 0.022, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2052.0, 2051.0, 0.0013, 0.0078, 0.0226, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2079.0, 315.0, 0.0, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2079.0, 2050.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2079.0, 2019.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2079.0, 2081.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2079.0, 2230.0, 0.000544, 0.007352, 0.76844, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2081.0, 307.0, 0.0, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2081.0, 2230.0, 0.00054, 0.00738, 0.766086, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2124.0, 2187.0, 0.00126, 0.007397, 0.019756, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2124.0, 1916.0, 0.000818, 0.0061, 0.001808, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2124.0, 1916.0, 0.000818, 0.0061, 0.001808, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2124.0, 6.0, 0.000717, 0.002597, 0.003648, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2124.0, 2121.0, 0.002019, 0.0095, 0.046, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2124.0, 2014.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2124.0, 2006.0, 0.0087, 0.0339, 0.2008, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2124.0, 1774.0, 0.001156, 0.006379, 0.020912, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2124.0, 1774.0, 0.001156, 0.006379, 0.020912, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2014.0, 2174.0, 0.0026, 0.0129, 0.0374, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2014.0, 2174.0, 0.0023, 0.0129, 0.0374, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2014.0, 2121.0, 0.002312, 0.016324, 0.04676, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2014.0, 2063.0, 0.0081, 0.0314, 0.0662, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2230.0, 1773.0, 0.000279, 0.003874, 0.381812, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2230.0, 1773.0, 0.000279, 0.003874, 0.381812, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2230.0, 2229.0, 0.000612, 0.007548, 0.76969, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2230.0, 2229.0, 0.000684, 0.007548, 0.761836, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2230.0, 2024.0, 0.000436, 0.006384, 0.62015, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2230.0, 2024.0, 0.00044, 0.00638, 0.6202, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2230.0, 2024.0, 0.00044, 0.00638, 0.6202, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2071.0, 2070.0, 0.0004, 0.0025, 0.0666, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2071.0, 2070.0, 0.0003, 0.0013, 0.0666, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2071.0, 2108.0, 0.0025, 0.0133, 0.0396, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1769.0, 1844.0, 0.003178, 0.024071, 0.068986, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1769.0, 1844.0, 0.003178, 0.024071, 0.068986, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1773.0, 2024.0, 0.000296, 0.004117, 0.40581, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1773.0, 2024.0, 0.000296, 0.004117, 0.40581, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1843.0, 1954.0, 0.000196, 0.001444, 0.005702, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1843.0, 1954.0, 0.00017, 0.001475, 0.00593, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1990.0, 1781.0, 0.002351, 0.017893, 0.052442, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1990.0, 1781.0, 0.002515, 0.019148, 0.05612, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1990.0, 1791.0, 0.001184, 0.005796, 0.016876, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1990.0, 1791.0, 0.000773, 0.005178, 0.014792, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1990.0, 2091.0, 0.002873, 0.014873, 0.026988, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1990.0, 2091.0, 0.001843, 0.012695, 0.028906, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2092.0, 1949.0, 0.000576, 0.005568, 0.00912, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2075.0, 1776.0, 0.003123, 0.014847, 0.041616, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2075.0, 1776.0, 0.003123, 0.014847, 0.041616, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2075.0, 2066.0, 0.003, 0.0162, 0.0458, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2075.0, 2066.0, 0.003, 0.0162, 0.0458, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1909.0, 1831.0, 0.000425, 0.002347, 0.007694, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1909.0, 1831.0, 0.000425, 0.002347, 0.007694, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2004.0, 2000.0, 0.0043, 0.0189, 0.0516, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[50.0, 1894.0, 0.007438, 0.037376, 0.062508, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[50.0, 1894.0, 0.007438, 0.037376, 0.062508, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2180.0, 2166.0, 0.011111, 0.065754, 0.098978, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2180.0, 2134.0, 0.0056, 0.0304, 0.0504, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2131.0, 2000.0, 0.0109, 0.0472, 0.1306, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2131.0, 2064.0, 0.00604, 0.037441, 0.111652, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2131.0, 2064.0, 0.006511, 0.037267, 0.111562, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2131.0, 2065.0, 0.015, 0.0413, 0.0936, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2048.0, 2047.0, 0.0049, 0.021, 0.034, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2048.0, 2214.0, 0.0132, 0.0474, 0.074, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1913.0, 2153.0, 0.0017, 0.0122, 0.03806, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1913.0, 2153.0, 0.0017, 0.0123, 0.038104, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1913.0, 2132.0, 0.0015, 0.0104, 0.03276, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1913.0, 2132.0, 0.0014, 0.0105, 0.03257, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1850.0, 2204.0, 0.0007, 0.003549, 0.011358, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1850.0, 2204.0, 0.00068, 0.003595, 0.011282, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1935.0, 1934.0, 0.00093, 0.005165, 0.014484, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2046.0, 2010.0, 0.00011, 0.0016, 0.157, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2046.0, 2010.0, 0.000112, 0.001608, 0.1727, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2046.0, 2045.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2045.0, 2010.0, 0.00011, 0.0016, 0.157, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2044.0, 2045.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2058.0, 1933.0, 0.001967, 0.011025, 0.032296, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2058.0, 1934.0, 0.00524, 0.028022, 0.078426, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2084.0, 1779.0, 0.003284, 0.025003, 0.07328, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2084.0, 1779.0, 0.003284, 0.025003, 0.07328, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2195.0, 2196.0, 0.0006, 0.0034, 0.016282, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1764.0, 1831.0, 4.9e-05, 0.000287, 0.001824, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[56.0, 2153.0, 0.003648, 0.013602, 0.02284, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2042.0, 2041.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2040.0, 2041.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2039.0, 2038.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2037.0, 2038.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2006.0, 1769.0, 0.005199, 0.039577, 0.115992, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2028.0, 1907.0, 0.001632, 0.014674, 0.046224, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2028.0, 1955.0, 1e-06, 1e-05, 0.0, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2028.0, 2228.0, 0.0022, 0.016793, 0.049218, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1805.0, 2064.0, 0.004105, 0.025004, 0.073654, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1989.0, 2075.0, 0.002775, 0.01195, 0.031086, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1989.0, 2075.0, 0.002042, 0.009724, 0.0056, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2036.0, 1777.0, 0.001686, 0.01625, 0.028548, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2036.0, 1776.0, 0.002319, 0.017657, 0.05175, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2036.0, 1776.0, 0.002319, 0.017657, 0.05175, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2158.0, 2159.0, 0.003785, 0.035893, 0.102126, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2158.0, 1832.0, 0.003733, 0.026363, 0.08693, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2158.0, 2.0, 0.003679, 0.026454, 0.08693, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2063.0, 2068.0, 0.0013, 0.0076, 0.1, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2085.0, 1949.0, 0.001026, 0.009918, 0.016246, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2060.0, 2101.0, 0.001194, 0.006769, 0.02107, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2060.0, 2101.0, 0.00123, 0.00755, 0.0216, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1828.0, 1827.0, 0.002291, 0.013129, 0.037544, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1894.0, 1951.0, 0.000967, 0.005386, 0.015858, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1894.0, 1951.0, 0.00083, 0.005543, 0.015894, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1894.0, 1800.0, 0.0032, 0.0256, 0.050238, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1894.0, 1800.0, 0.0032, 0.0256, 0.050238, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1894.0, 1952.0, 0.0053, 0.0287, 0.043366, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1894.0, 1888.0, 0.0046, 0.0265, 0.07574, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1894.0, 1888.0, 0.0049, 0.0281, 0.076512, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1894.0, 1893.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1894.0, 1891.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1894.0, 2047.0, 0.003, 0.0182, 0.052822, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1894.0, 2047.0, 0.003, 0.0183, 0.052868, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1894.0, 1827.0, 0.000858, 0.005166, 0.015054, 10.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1894.0, 1827.0, 0.000914, 0.005525, 0.01506, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1897.0, 1895.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1897.0, 1892.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[120.0, 1897.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2047.0, 1917.0, 0.006735, 0.04502, 0.1218, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2047.0, 1978.0, 0.005, 0.0273, 0.0742, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2047.0, 2048.0, 0.011661, 0.047648, 0.068356, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2047.0, 2163.0, 0.0157, 0.0776, 0.1892, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1762.0, 1921.0, 0.004241, 0.030126, 0.085066, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2225.0, 1912.0, 0.0035, 0.0199, 0.055758, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2225.0, 2167.0, 0.0014, 0.0093, 0.02272, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2225.0, 2167.0, 0.0026, 0.0129, 0.0206, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2225.0, 2224.0, 0.0008, 0.00608, 0.018, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2225.0, 2224.0, 0.0007, 0.0061, 0.01778, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2225.0, 1982.0, 0.004371, 0.036771, 0.102082, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2225.0, 1911.0, 0.000587, 0.005466, 0.015722, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2225.0, 1911.0, 0.001272, 0.011845, 0.034066, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2225.0, 1995.0, 0.0032, 0.0166, 0.0476, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2225.0, 2035.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2225.0, 1980.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2225.0, 1983.0, 0.005, 0.0147, 0.0374, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2034.0, 1966.0, 0.000356, 0.005065, 0.51967, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2034.0, 2003.0, 0.00121, 0.01355, 1.2482, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2034.0, 1772.0, 0.000317, 0.00405, 0.439468, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2034.0, 1772.0, 0.000309, 0.004298, 0.42362, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2034.0, 2033.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2034.0, 1981.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2034.0, 2032.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2034.0, 1771.0, 0.000759, 0.010812, 1.0325, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[121.0, 2034.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1801.0, 2131.0, 0.0037, 0.0294, 0.085666, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2220.0, 2170.0, 0.000467, 0.004897, 0.015144, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2220.0, 2170.0, 0.000467, 0.0049, 0.015136, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2030.0, 1940.0, 0.000667, 0.003612, 0.055194, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2204.0, 1844.0, 0.001053, 0.007978, 0.022864, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2204.0, 1844.0, 0.001053, 0.007978, 0.022864, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2204.0, 2206.0, 0.0023, 0.0127, 0.033, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2233.0, 1992.0, 0.0055, 0.0269, 0.044, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2233.0, 1871.0, 0.0055, 0.0269, 0.044, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2233.0, 2190.0, 0.0017, 0.0128, 0.0398, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2233.0, 2228.0, 0.001919, 0.010339, 0.029802, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2233.0, 2228.0, 0.003985, 0.013988, 0.035304, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2223.0, 2169.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2223.0, 2222.0, 0.003, 0.0199, 0.0546, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2223.0, 2222.0, 0.002477, 0.015386, 0.086506, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1946.0, 2124.0, 0.002181, 0.012442, 0.034482, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1946.0, 1769.0, 0.004399, 0.033488, 0.098148, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2213.0, 2212.0, 0.00872, 0.0415, 0.0603, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1823.0, 1822.0, 0.001557, 0.008831, 0.013178, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1823.0, 1822.0, 0.001557, 0.008831, 0.013178, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1992.0, 47.0, 0.008124, 0.030296, 0.05087, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1992.0, 1871.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[38.0, 1921.0, 0.005421, 0.030248, 0.044896, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1832.0, 2.0, 0.0, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2199.0, 2163.0, 0.012972, 0.060245, 0.0882, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2029.0, 1825.0, 0.002794, 0.015736, 0.030542, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2029.0, 1825.0, 0.002779, 0.016037, 0.030802, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2029.0, 2004.0, 0.0061, 0.0282, 0.0736, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2029.0, 119.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2161.0, 2165.0, 0.002758, 0.017246, 0.05042, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2161.0, 2165.0, 0.00281, 0.017192, 0.050784, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2190.0, 1955.0, 0.0015, 0.005, 0.008, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2059.0, 1933.0, 0.007141, 0.03759, 0.110426, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2059.0, 2060.0, 0.001137, 0.007726, 0.021632, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2066.0, 1777.0, 0.008535, 0.047552, 0.135966, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2066.0, 2036.0, 0.0277, 0.0546, 0.1086, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2066.0, 1817.0, 0.001193, 0.008897, 0.028558, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2066.0, 1817.0, 0.001271, 0.008926, 0.028726, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2214.0, 1822.0, 0.001297, 0.008265, 0.028008, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2214.0, 2048.0, 0.004664, 0.019059, 0.027342, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2228.0, 2188.0, 0.0032, 0.0124, 0.033, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2228.0, 47.0, 0.002432, 0.009068, 0.015226, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2228.0, 1907.0, 0.000749, 0.006419, 0.019036, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2228.0, 1907.0, 0.000404, 0.006082, 0.019234, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2228.0, 48.0, 0.002281, 0.010715, 0.029254, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2228.0, 48.0, 0.002281, 0.010715, 0.029254, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2228.0, 2028.0, 0.003431, 0.018104, 0.05278, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2228.0, 2028.0, 0.002438, 0.018489, 0.053282, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2228.0, 2025.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2024.0, 1790.0, 0.000393, 0.006763, 0.725106, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2024.0, 2139.0, 0.0012, 0.0095, 0.8706, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2024.0, 2034.0, 0.0009, 0.0131, 1.2058, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2024.0, 2023.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2024.0, 1771.0, 0.00041, 0.005233, 0.567852, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2024.0, 1771.0, 0.000362, 0.005035, 0.496268, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1816.0, 2003.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1816.0, 1899.0, 0.00067, 0.01333, 1.33542, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1815.0, 2003.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1815.0, 1899.0, 0.00067, 0.01333, 1.33542, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1923.0, 1807.0, 0.004043, 0.031502, 0.092992, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1923.0, 1837.0, 0.00419, 0.032116, 0.097538, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1923.0, 1837.0, 0.003923, 0.032344, 0.097258, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1923.0, 2106.0, 0.005601, 0.039221, 0.120638, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1923.0, 2106.0, 0.00442, 0.04115, 0.118408, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1923.0, 1921.0, 0.008033, 0.074789, 0.215092, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1923.0, 1968.0, 8.3e-05, 0.001479, 0.004712, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1923.0, 1968.0, 6.2e-05, 0.001495, 0.004682, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1923.0, 2178.0, 0.001489, 0.009279, 0.019006, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1923.0, 2178.0, 0.0019, 0.008904, 0.019006, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1923.0, 1818.0, 0.000639, 0.003844, 0.011098, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1923.0, 1818.0, 0.000629, 0.00385, 0.011346, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1899.0, 2136.0, 0.000834, 0.010243, 0.944442, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1899.0, 2144.0, 0.000915, 0.009985, 0.950792, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1899.0, 500.0, 0.00067, 0.01333, 1.33542, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1899.0, 499.0, 0.00067, 0.01333, 1.33542, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1836.0, 1968.0, 0.001023, 0.007793, 0.02284, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1836.0, 1968.0, 0.001023, 0.007793, 0.02284, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1835.0, 1899.0, 3.5e-05, 0.000554, 0.01563, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1768.0, 2160.0, 0.000808, 0.00615, 0.018024, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1768.0, 2160.0, 0.000808, 0.00615, 0.018024, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1768.0, 1795.0, 0.002839, 0.021615, 0.06335, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1768.0, 1795.0, 0.002839, 0.021615, 0.06335, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1768.0, 2210.0, 0.001992, 0.015161, 0.044434, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1768.0, 2210.0, 0.002895, 0.022041, 0.0646, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1768.0, 1844.0, 0.002519, 0.019179, 0.056212, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1768.0, 1994.0, 0.002367, 0.013057, 0.042808, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1768.0, 1994.0, 0.001992, 0.015161, 0.044434, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1768.0, 1910.0, 0.001432, 0.010899, 0.031942, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1768.0, 1910.0, 0.001432, 0.010899, 0.031942, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2196.0, 2008.0, 0.002104, 0.008588, 0.01563, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2196.0, 2016.0, 0.002104, 0.008588, 0.01563, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2196.0, 1852.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1926.0, 1853.0, 1e-06, 1e-06, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1830.0, 2159.0, 0.005669, 0.029498, 0.084286, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1830.0, 1831.0, 0.005312, 0.030531, 0.088372, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1830.0, 1831.0, 0.005391, 0.030252, 0.088402, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1830.0, 2097.0, 0.003948, 0.020204, 0.05813, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1983.0, 1950.0, 0.0012, 0.0116, 0.019, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2086.0, 2030.0, 0.00086, 0.004229, 0.012674, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2185.0, 2217.0, 0.0024, 0.0101, 0.0152, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2027.0, 1947.0, 0.000579, 0.003409, 0.008058, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2027.0, 1947.0, 0.000579, 0.00341, 0.00806, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2027.0, 1822.0, 0.003665, 0.023351, 0.069198, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1860.0, 1956.0, 0.000192, 0.001612, 0.007754, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1860.0, 1956.0, 0.00019, 0.001612, 0.008058, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[39.0, 2146.0, 0.005056, 0.02051, 0.02918, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1994.0, 2160.0, 0.003787, 0.015066, 0.02744, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1994.0, 1844.0, 0.006343, 0.034897, 0.072984, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1994.0, 2088.0, 0.003409, 0.018265, 0.06, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1994.0, 2088.0, 0.00339, 0.018097, 0.06, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1774.0, 2125.0, 0.000519, 0.002865, 0.009394, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1774.0, 2125.0, 0.000519, 0.002865, 0.009394, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2053.0, 2051.0, 1e-05, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1900.0, 2196.0, 0.00048, 0.0046, 0.0076, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2091.0, 1781.0, 0.000508, 0.003865, 0.011328, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2091.0, 1787.0, 0.000211, 0.000705, 0.03415, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2091.0, 1.0, 0.0, 1e-06, 2e-06, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1.0, 1781.0, 0.00044, 0.003349, 0.009814, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1.0, 1787.0, 0.000216, 0.000738, 0.035304, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1803.0, 2153.0, 0.004651, 0.032568, 0.093178, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1905.0, 2129.0, 0.004099, 0.034324, 0.09695, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1904.0, 2129.0, 0.004105, 0.025004, 0.073654, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2108.0, 2124.0, 0.004633, 0.02824, 0.08162, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2108.0, 1769.0, 0.003559, 0.027095, 0.07941, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2108.0, 1769.0, 0.003559, 0.027095, 0.07941, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2108.0, 1945.0, 0.00096, 0.00928, 0.0152, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1941.0, 1829.0, 0.001096, 0.005395, 0.043434, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2021.0, 2020.0, 0.00781, 0.0352, 0.0262, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2021.0, 2091.0, 0.014, 0.0727, 0.110892, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2163.0, 1783.0, 0.004747, 0.036136, 0.10591, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2163.0, 2026.0, 0.0123, 0.0679, 0.104, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1902.0, 1903.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1859.0, 2204.0, 0.0049, 0.0288, 0.08016, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[2222.0, 1917.0, 0.002438, 0.01471, 0.04222, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1950.0, 2215.0, 0.00095, 0.005619, 0.018094, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1950.0, 2215.0, 0.001591, 0.007644, 0.012924, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1950.0, 2218.0, 0.003325, 0.02037, 0.03325, 100.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[316.0, 315.0, 0.001572, 0.02166, 3.44616, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[310.0, 307.0, 0.001592, 0.021628, 3.43046, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1922.0, 1921.0, 0.0055, 0.0332, 0.048824, 100.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[482.0, 1789.0, 0.001904, 0.030428, 2.94106, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[484.0, 483.0, 0.001926, 0.030303, 2.93952, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[508.0, 1899.0, 0.001544, 0.016148, 1.54645, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[508.0, 1899.0, 0.00134, 0.014248, 1.32665, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[508.0, 482.0, 0.0, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[508.0, 484.0, 0.0, 0.0001, 0.0, 400.0, 0.0,0.0,0.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[500.0, 508.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[499.0, 508.0, 0.0, 1e-05, 0.0, 400.0, 0.0,0.0,0.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1685.0, 1869.0, 0.00131, 0.072778, 0.0027, 180.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1706.0, 1985.0, 0.0003, 0.019557, 0.0, 360.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1642.0, 1763.0, 0.002379, 0.1292, 0.0029, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1747.0, 2181.0, 0.0047, 0.1573, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1746.0, 2181.0, 0.0047, 0.156, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[31.0, 57.0, 0.0047, 0.1573, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[30.0, 57.0, 0.0047, 0.1573, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[23.0, 40.0, 0.002828, 0.1393, 0.0011, 100.0, 0.0,0.0,0.940909, 0.0,1.0,-30.0, 30.0, 0.1 ],
[4.0, 3.0, 0.002083, 0.116667, 0.00156, 120.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1666.0, 1810.0, 0.000508, 0.037, 0.004284, 420.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1665.0, 1810.0, 0.000507, 0.036952, 0.003864, 420.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1745.0, 2171.0, 0.000585, 0.034067, 0.006103, 436.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1744.0, 2171.0, 0.000585, 0.034067, 0.061027, 436.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1743.0, 2171.0, 0.000526, 0.030275, 0.00981, 418.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1742.0, 2171.0, 0.000526, 0.030275, 0.00981, 418.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1664.0, 1809.0, 0.0012, 0.074111, 0.0018, 180.0, 0.0,0.0,1.097727, 0.0,0.0,-30.0, 30.0, 0.1 ],
[26.0, 53.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[28.0, 55.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[19.0, 36.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1741.0, 2162.0, 0.0006, 0.0345, 0.0, 418.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1740.0, 2162.0, 0.0006, 0.0343, 0.0, 418.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1670.0, 1841.0, 0.000544, 0.037838, 0.0148, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1669.0, 1841.0, 0.000544, 0.037838, 0.0148, 370.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1687.0, 1906.0, 0.000791, 0.048433, 0.0033, 370.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1686.0, 1906.0, 0.000791, 0.048433, 0.0033, 370.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1729.0, 1986.0, 0.000659, 0.043486, 0.00189, 430.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1728.0, 2122.0, 0.000659, 0.043486, 0.00189, 430.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1696.0, 1937.0, 0.000802, 0.048833, 0.0051, 370.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1695.0, 1792.0, 0.000802, 0.048833, 0.0051, 370.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1690.0, 1901.0, 0.002669, 0.136, 0.0009, 100.0, 0.0,0.0,1.00625, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1659.0, 1802.0, 0.002379, 0.1292, 0.0029, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1739.0, 2152.0, 0.0041, 0.0942, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1738.0, 2152.0, 0.001394, 0.0686, 0.005, 240.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1737.0, 2152.0, 0.002018, 0.0757, 0.00184, 240.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1707.0, 2152.0, 0.000659, 0.066286, 0.00819, 430.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1752.0, 2152.0, 0.000659, 0.041543, 0.00945, 430.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[13.0, 1820.0, 0.003265, 0.139, 0.00076, 120.0, 0.0,0.0,0.940909, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1703.0, 1984.0, 0.001884, 0.093333, 4.5e-05, 180.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1702.0, 1984.0, 0.001871, 0.093333, 4.5e-05, 180.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1704.0, 1984.0, 0.001876, 0.093333, 4.5e-05, 180.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1705.0, 1984.0, 0.001867, 0.093333, 4.5e-05, 180.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[34.0, 59.0, 0.0064, 0.1807, 0.0, 75.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[33.0, 58.0, 0.0064, 0.1807, 0.0, 75.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1678.0, 1854.0, 0.000769, 0.050067, 0.00276, 370.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1677.0, 1854.0, 0.000762, 0.0499, 0.00276, 370.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1655.0, 1826.0, 0.000959, 0.192917, 0.00084, 120.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[27.0, 54.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.0, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1657.0, 1793.0, 0.00298, 0.1364, 0.0013, 120.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1650.0, 1834.0, 7e-06, 0.00569, 0.01386, 1260.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1648.0, 1834.0, 7e-06, 0.00569, 0.01386, 1260.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[35.0, 1834.0, 7e-06, 0.00569, 0.01386, 1260.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1682.0, 1858.0, 0.000527, 0.04415, 0.0034, 400.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1681.0, 1858.0, 0.000527, 0.04415, 0.0034, 400.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2115.0, 2118.0, 0.0029, 0.0762, 0.0, 300.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2111.0, 2117.0, 0.0045, 0.1801, 0.0, 90.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2104.0, 2012.0, 0.005505, 0.199524, 0.001512, 63.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1736.0, 2104.0, 0.006292, 0.268, 0.00075, 50.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1735.0, 2104.0, 0.006204, 0.268, 0.00075, 50.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1734.0, 2149.0, 0.002101, 0.056458, 0.014304, 240.0, 0.0,0.0,1.1, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1733.0, 2149.0, 0.001332, 0.059167, 0.008592, 240.0, 0.0,0.0,1.1, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1732.0, 2149.0, 0.001465, 0.057917, 0.009744, 240.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1694.0, 1936.0, 0.000531, 0.036378, 0.00407, 370.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1693.0, 1936.0, 0.000531, 0.036378, 0.00407, 370.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[25.0, 52.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1701.0, 1959.0, 0.000326, 0.0237, 0.0072, 720.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1700.0, 1959.0, 0.000326, 0.0237, 0.0072, 720.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1652.0, 1788.0, 0.003869, 0.14, 0.002, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1645.0, 1767.0, 0.0115, 0.2541, 0.0, 400.0, 0.0,0.0,1.025, 0.0,1.0,-30.0, 30.0, 0.1 ],
[24.0, 1767.0, 0.0115, 0.2541, 0.0, 400.0, 0.0,0.0,1.025, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1656.0, 1929.0, 0.002209, 0.100333, 2.4e-05, 120.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[14.0, 1929.0, 0.002431, 0.116667, 6e-05, 120.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1644.0, 1766.0, 0.002379, 0.1292, 0.0029, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[12.0, 1857.0, 0.000929, 0.054167, 0.00648, 240.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[11.0, 1857.0, 0.000948, 0.054167, 0.00648, 240.0, 0.0,0.0,1.09773, 0.0,1.0,-30.0, 30.0, 0.1 ],
[11.0, 1857.0, 0.003124, 0.133, 0.0022, 100.0, 0.0,0.0,1.04546, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1691.0, 2013.0, 0.004251, 0.1313, 0.0015, 180.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1662.0, 2013.0, 0.001786, 0.099067, 0.003675, 180.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1731.0, 2095.0, 0.001658, 0.068, 0.0046, 240.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1730.0, 2095.0, 0.001598, 0.0681, 0.004, 240.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1649.0, 1775.0, 0.000575, 0.044846, 0.003081, 390.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[32.0, 1775.0, 0.000575, 0.044846, 0.003081, 390.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1651.0, 1814.0, 0.0006, 0.0441, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1653.0, 1814.0, 0.0006, 0.0441, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1654.0, 1814.0, 0.0006, 0.0441, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1674.0, 1814.0, 0.0006, 0.0441, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[20.0, 37.0, 0.002851, 0.13, 0.00066, 100.0, 0.0,0.0,1.05852, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1668.0, 2182.0, 0.0029, 0.0694, 0.0107, 720.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1727.0, 2120.0, 0.000367, 0.023333, 0.0321, 260.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1726.0, 2120.0, 0.000367, 0.023333, 0.0321, 260.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1697.0, 1958.0, 0.000117, 0.023367, 0.01176, 720.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1643.0, 1765.0, 0.002379, 0.1292, 0.0029, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1725.0, 2071.0, 0.0013, 0.0643, 0.0, 240.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1724.0, 2071.0, 0.0013, 0.0643, 0.0, 240.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1710.0, 2071.0, 0.0013, 0.0643, 0.0, 240.0, 0.0,0.0,1.06818, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1672.0, 1843.0, 0.000575, 0.044846, 0.003081, 390.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1671.0, 1843.0, 0.000575, 0.044846, 0.003081, 390.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1723.0, 2011.0, 0.005759, 0.207937, 0.001512, 32.0, 0.0,0.0,1.0375, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1722.0, 2180.0, 0.004, 0.119, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1721.0, 2180.0, 0.004, 0.119, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1720.0, 2180.0, 0.004, 0.119, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1719.0, 2180.0, 0.0054, 0.116, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1676.0, 1850.0, 0.000178, 0.053846, 0.0, 260.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1675.0, 1850.0, 0.000178, 0.053846, 0.0, 260.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1718.0, 2045.0, 0.000218, 0.01863, 0.0, 120.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1717.0, 2046.0, 0.000218, 0.01827, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1692.0, 2045.0, 0.000175, 0.015526, 0.013338, 400.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1663.0, 2045.0, 0.000175, 0.015526, 0.013338, 400.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1709.0, 2195.0, 0.001558, 0.08475, 0.00336, 160.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1708.0, 2195.0, 0.001879, 0.088667, 0.00435, 160.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[5.0, 1764.0, 0.002083, 0.116667, 0.00156, 120.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[29.0, 56.0, 0.002914, 0.127, 0.0012, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2038.0, 2096.0, 0.0022, 0.114, 0.0, 120.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1661.0, 1805.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1699.0, 2229.0, 0.000375, 0.022667, 0.00294, 720.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1698.0, 2229.0, 0.001028, 0.046333, 0.0054, 720.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1714.0, 2158.0, 0.0008, 0.0461, 0.0, 370.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1713.0, 2158.0, 0.0008, 0.0463, 0.0, 370.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1716.0, 2229.0, 0.0008, 0.0451, 0.0, 370.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1715.0, 2229.0, 0.0007, 0.0411, 0.0, 370.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1680.0, 1828.0, 0.002439, 0.111755, 0.000752, 120.0, 0.0,0.0,0.988943, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1641.0, 1762.0, 0.003175, 0.1308, 0.00239, 100.0, 0.0,0.0,1.05852, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1658.0, 1801.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[21.0, 38.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1667.0, 1836.0, 0.000318, 0.02355, 0.00108, 720.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1673.0, 1835.0, 0.000328, 0.023833, 0.00168, 720.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1712.0, 2027.0, 0.0006, 0.0348, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1711.0, 2027.0, 0.0006, 0.0348, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1749.0, 1969.0, 0.000223, 0.0195, 0.004392, 720.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1748.0, 1969.0, 0.000228, 0.019319, 0.004248, 720.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1684.0, 1860.0, 0.000526, 0.037775, 0.0028, 400.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1683.0, 1860.0, 0.000528, 0.0378, 0.00236, 400.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[22.0, 39.0, 0.000706, 0.0772, 0.00092, 100.0, 0.0,0.0,1.05852, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1660.0, 1803.0, 0.003032, 0.14, 0.0013, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1689.0, 1905.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[117.0, 1905.0, 0.002828, 0.141, 1e-05, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[110.0, 1905.0, 0.002841, 0.141, 1e-05, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[108.0, 1905.0, 0.002828, 0.141, 1e-05, 100.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1688.0, 1904.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.075, 0.0,1.0,-30.0, 30.0, 0.1 ],
[118.0, 1904.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.075, 0.0,1.0,-30.0, 30.0, 0.1 ],
[111.0, 1904.0, 0.00297, 0.137, 0.0027, 100.0, 0.0,0.0,1.075, 0.0,1.0,-30.0, 30.0, 0.1 ],
[107.0, 1904.0, 0.00297, 0.137, 0.0027, 50.0, 0.0,0.0,1.075, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1751.0, 1902.0, 0.000223, 0.0195, 0.004176, 720.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1750.0, 1902.0, 0.000219, 0.019278, 0.00432, 720.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2194.0, 1633.0, 0.002, 0.0983, 0.0, 150.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1635.0, 1633.0, 0.0014, 0.0563, 0.0, 150.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1634.0, 1633.0, 0.0009, -0.003, 0.0, 75.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2194.0, 1631.0, 0.002, 0.0997, 0.0, 150.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1635.0, 1631.0, 0.0014, 0.0567, 0.0, 150.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1632.0, 1631.0, 0.0008, -0.0033, 0.0, 75.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2194.0, 1628.0, 0.001271, 0.096333, 0.00115, 150.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1630.0, 1628.0, 0.001185, 0.057, 0.00115, 150.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1629.0, 1628.0, 0.001033, -0.005, 0.00115, 75.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1965.0, 1587.0, 6.7e-05, 0.018139, 0.00103533, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2231.0, 1587.0, 5.6e-05, -0.00171, 0.00103533, 1002.0, 0.0,0.0,1.09773, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1964.0, 1587.0, 0.000397, 0.03773, 0.00103533, 270.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1961.0, 1586.0, 6.4e-05, 0.01821, 0.00103533, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1962.0, 1586.0, 5.9e-05, -0.00176, 0.00103533, 1002.0, 0.0,0.0,1.09773, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1963.0, 1586.0, 0.000397, 0.037788, 0.00103533, 270.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2002.0, 1627.0, 8.6e-05, 0.01918, 0.0, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1999.0, 1627.0, 8.8e-05, -0.00199, 0.0, 750.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1997.0, 1627.0, 0.000652, 0.04874, 0.0, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2001.0, 1626.0, 8.6e-05, 0.01918, 0.0, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1998.0, 1626.0, 8.8e-05, -0.00199, 0.0, 750.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1996.0, 1626.0, 0.000652, 0.04874, 0.0, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1970.0, 1592.0, 6.6e-05, 0.018757, 0.00120233, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 1592.0, 5.9e-05, -0.00301, 0.00120233, 1002.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1864.0, 1592.0, 0.000397, 0.038328, 0.00120233, 330.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1972.0, 1591.0, 6.6e-05, 0.018757, 0.00126933, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2221.0, 1591.0, 5.9e-05, -0.00301, 0.00126933, 1002.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1863.0, 1591.0, 0.000397, 0.038328, 0.00126933, 330.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1772.0, 1556.0, 9.1e-05, 0.02099, 0.0, 10000.0, 0.0,0.0,1.05, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1770.0, 1556.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1759.0, 1556.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1772.0, 1555.0, 9.1e-05, 0.02099, 0.0, 10000.0, 0.0,0.0,1.05, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1770.0, 1555.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1758.0, 1555.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,0.0,-30.0, 30.0, 0.1 ],
[1855.0, 1584.0, 8.3e-05, 0.021439, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1856.0, 1584.0, 6.5e-05, -0.00326, 0.0, 400.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1957.0, 1584.0, 0.000454, 0.038229, 0.0, 400.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1813.0, 1570.0, 7.8e-05, 0.018807, 0.001336, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1812.0, 1570.0, 5.7e-05, -0.00212, 0.001336, 1002.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1811.0, 1570.0, 0.000428, 0.033328, 0.001336, 300.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1886.0, 1573.0, 6.3e-05, 0.018623, 0.00153633, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1887.0, 1573.0, 6.3e-05, -0.00257, 0.00153633, 1002.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1884.0, 1573.0, 0.000381, 0.035269, 0.00153633, 300.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1927.0, 1578.0, 5.8e-05, 0.017275, 0.002004, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1925.0, 1578.0, 6.9e-05, -0.00173, 0.002004, 1002.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1885.0, 1578.0, 0.000349, 0.039152, 0.002004, 300.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2143.0, 1624.0, 0.000125, 0.02587, 0.0, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2150.0, 1624.0, 9.2e-05, -0.00513, 0.0, 750.0, 0.0,0.0,1.07273, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1625.0, 1624.0, 0.000505, 0.04532, 0.0, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2138.0, 1622.0, 0.000228, 0.02372, 0.0, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2147.0, 1622.0, 0.000123, -0.00264, 0.0, 750.0, 0.0,0.0,1.06818, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1623.0, 1622.0, 0.000586, 0.02816, 0.0, 240.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1790.0, 1564.0, 9.6e-05, 0.0209, 0.002, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2094.0, 1564.0, 7.9e-05, -0.00277, 0.002, 750.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1565.0, 1564.0, 0.000524, 0.052407, 0.002, 240.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1790.0, 1563.0, 9.6e-05, 0.0209, 0.002, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2094.0, 1563.0, 7.9e-05, -0.00277, 0.002, 750.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1565.0, 1563.0, 0.000524, 0.052407, 0.002, 240.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2152.0, 1619.0, 0.00085, 0.01, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1621.0, 1619.0, 0.0048, 0.1195, 0.0, 400.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1620.0, 1619.0, 0.0027, 0.1195, 0.0, 400.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1875.0, 1590.0, 8e-05, 0.01881, 0.0, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1874.0, 1590.0, 0.00277, -0.00232, 0.0, 1002.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1873.0, 1590.0, 0.0004, 0.037, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1974.0, 1572.0, 8e-06, 0.018685, 0.00153333, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2202.0, 1572.0, -1e-05, -0.0033, 0.00153333, 10000.0, 0.0,0.0,1.01932, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1872.0, 1572.0, 0.000442, 0.039535, 0.00153333, 300.0, 0.0,0.0,0.978409, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2082.0, 1618.0, 0.000117, 0.02364, 0.00205, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2089.0, 1618.0, 4.2e-05, -0.00236, 0.00205, 750.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2078.0, 1618.0, 0.000345, 0.031, 0.00205, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2083.0, 1617.0, 6.6e-05, 0.022113, 0.001075, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2088.0, 1617.0, 9e-05, -0.00185, 0.001075, 750.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2077.0, 1617.0, 0.000509, 0.047513, 0.001075, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2080.0, 1616.0, 0.000115, 0.022847, 0.00225, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2088.0, 1616.0, 0.000118, -0.00186, 0.00225, 750.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2076.0, 1616.0, 0.000507, 0.03022, 0.00225, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1786.0, 1562.0, 9.1e-05, 0.02099, 0.0, 10000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1785.0, 1562.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1755.0, 1562.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1786.0, 1561.0, 9.1e-05, 0.02099, 0.0, 10000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1785.0, 1561.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1754.0, 1561.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1868.0, 1615.0, 0.000105, 0.01782, 0.003375, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1867.0, 1615.0, 5.8e-05, -0.00247, 0.003375, 750.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2072.0, 1615.0, 0.000494, 0.030927, 0.003375, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1866.0, 1614.0, 7.9e-05, 0.019153, 0.00145, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1865.0, 1614.0, 6.4e-05, -0.00314, 0.00145, 750.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2007.0, 1614.0, 0.000335, 0.030553, 0.00145, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1799.0, 1568.0, 7.8e-05, 0.018079, 0.001336, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1797.0, 1568.0, 4.9e-05, -0.00241, 0.001336, 1002.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1569.0, 1568.0, 0.000403, 0.038458, 0.001336, 300.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1798.0, 1566.0, 7.4e-05, 0.018598, 0.001837, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1797.0, 1566.0, 5.3e-05, -0.00316, 0.001837, 1002.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1567.0, 1566.0, 0.000378, 0.039316, 0.001837, 300.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2013.0, 1611.0, 0.001709, 0.13125, 0.000972, 120.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1613.0, 1611.0, 0.001024, 0.070417, 0.000972, 120.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1612.0, 1611.0, 0.001075, -0.00625, 0.000972, 120.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2013.0, 1608.0, 0.0021, 0.1588, 0.000972, 120.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1610.0, 1608.0, 0.0012, 0.0852, 0.000972, 120.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1609.0, 1608.0, 0.0013, 0.0063, 0.000972, 120.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1960.0, 1585.0, 7.3e-05, 0.018815, 0.00096667, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 1585.0, 6e-05, -0.00139, 0.00096667, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1881.0, 1585.0, 0.000405, 0.037565, 0.00096667, 300.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[123.0, 1583.0, 7.4e-05, 0.018955, 0.00096667, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1920.0, 1583.0, 6.1e-05, -0.00145, 0.00096667, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1808.0, 1583.0, 0.000406, 0.037395, 0.00096667, 300.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2056.0, 1607.0, 8.6e-05, 0.012, 0.0, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2061.0, 1607.0, 8.4e-05, 0.0052, 0.0, 750.0, 0.0,0.0,1.07045, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2055.0, 1607.0, 0.00064, 0.0098, 0.0, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2057.0, 1588.0, 8.2e-05, 0.01899, 0.0, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2062.0, 1588.0, 9.5e-05, 0.00187, 0.0, 750.0, 0.0,0.0,1.07045, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1967.0, 1588.0, 0.000595, 0.04896, 0.0, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2050.0, 1606.0, 0.000124, 0.026467, 0.003, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2054.0, 1606.0, 8.8e-05, -0.00659, 0.003, 750.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2049.0, 1606.0, 0.000433, 0.03668, 0.003, 240.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2019.0, 1605.0, 6.9e-05, 0.01806, 0.000725, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2018.0, 1605.0, 8.7e-05, -0.00197, 0.000725, 750.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2017.0, 1605.0, 0.000344, 0.03106, 0.000725, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2081.0, 1576.0, 5.9e-05, 0.017137, 0.0009, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2052.0, 1576.0, 7.4e-05, -0.0013, 0.0009, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1880.0, 1576.0, 0.000392, 0.036947, 0.0009, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2230.0, 1604.0, 8.3e-05, 0.019047, 0.001425, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2124.0, 1604.0, 6.1e-05, -0.00317, 0.001425, 750.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1878.0, 1604.0, 0.000339, 0.031247, 0.001425, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2230.0, 1582.0, 6e-05, 0.017225, 0.00096667, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2014.0, 1582.0, 7.3e-05, -0.00129, 0.00096667, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1877.0, 1582.0, 0.000392, 0.036925, 0.00096667, 330.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1773.0, 1558.0, 9.1e-05, 0.02099, 0.0, 10000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1769.0, 1558.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1761.0, 1558.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1773.0, 1557.0, 9.1e-05, 0.02099, 0.0, 10000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1769.0, 1557.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1760.0, 1557.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1787.0, 8.0, 0.000881, 0.085611, 0.000444, 180.0, 0.0,0.0,1.0625, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1646.0, 8.0, 0.000767, -0.00617, 0.000444, 180.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[10.0, 8.0, 9.1e-05, 0.051056, 0.000444, 90.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1787.0, 7.0, 0.000881, 0.085611, 0.000444, 180.0, 0.0,0.0,1.0625, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1647.0, 7.0, 0.000767, -0.00617, 0.000444, 180.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[9.0, 7.0, 9.1e-05, 0.051056, 0.000444, 90.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2046.0, 1603.0, 0.0, 0.04475, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1935.0, 1603.0, 0.0, -0.00462, 0.0, 400.0, 0.0,0.0,1.0725, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2043.0, 1603.0, 0.0, 0.07026, 0.0, 400.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2096.0, 1601.0, 0.0018, 0.1243, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1602.0, 1601.0, 0.0015, 0.0698, 0.0, 400.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2041.0, 1601.0, 0.0014, -0.0077, 0.0, 400.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2179.0, 1598.0, 0.0063, 0.2671, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1600.0, 1598.0, 0.0058, 0.1401, 0.0, 400.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1599.0, 1598.0, 0.003, -0.0097, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2179.0, 1596.0, 0.0063, 0.2652, 0.0, 400.0, 0.0,0.0,1.1, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1600.0, 1596.0, 0.0059, 0.1419, 0.0, 400.0, 0.0,0.0,1.04545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1597.0, 1596.0, 0.0028, -0.0079, 0.0, 400.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1895.0, 1575.0, 9.1e-05, 0.02099, 0.0, 10000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1893.0, 1575.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1890.0, 1575.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1892.0, 1574.0, 9.1e-05, 0.02099, 0.0, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1891.0, 1574.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1889.0, 1574.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2033.0, 1595.0, 8.5e-05, 0.01857, 0.00183333, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2035.0, 1595.0, 4.7e-05, -0.00287, 0.00183333, 1000.0, 0.0,0.0,1.09773, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2031.0, 1595.0, 0.000426, 0.03594, 0.00183333, 300.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1981.0, 1593.0, 7.3e-05, 0.0163, 0.001, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1980.0, 1593.0, 5.4e-05, -0.001, 0.001, 1000.0, 0.0,0.0,1.09773, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1979.0, 1593.0, 0.000377, 0.03705, 0.001, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2023.0, 1594.0, 0.000116, 0.018433, 0.002075, 750.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2025.0, 1594.0, 7.4e-05, -0.00326, 0.002075, 750.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2022.0, 1594.0, 0.000476, 0.032887, 0.002075, 240.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2024.0, 1589.0, 6.4e-05, 0.016337, 0.00120233, 1002.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2228.0, 1589.0, 6.3e-05, -0.0024, 0.00120233, 1002.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1862.0, 1589.0, 0.000244, 0.030978, 0.00120233, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1899.0, 1581.0, 8.5e-05, 0.018221, 0.001275, 750.0, 0.0,0.0,1.072, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1923.0, 1581.0, 8.5e-05, -0.00243, 0.001275, 750.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1879.0, 1581.0, -9e-05, 0.041486, 0.001275, 240.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1899.0, 1579.0, 8.4e-05, 0.018087, 0.00135, 750.0, 0.0,0.0,1.072, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1923.0, 1579.0, 8.4e-05, -0.00222, 0.00135, 750.0, 0.0,0.0,1.07159, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1580.0, 1579.0, -8e-05, 0.04158, 0.00135, 240.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1771.0, 1560.0, 9.1e-05, 0.02099, 0.0, 10000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1768.0, 1560.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1757.0, 1560.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1771.0, 1559.0, 9.1e-05, 0.02099, 0.0, 10000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1768.0, 1559.0, 6.7e-05, -0.00349, 0.0, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1756.0, 1559.0, 0.00037, 0.03445, 0.0, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1853.0, 1571.0, 6.1e-05, 0.01713, 0.00126667, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1852.0, 1571.0, 7.3e-05, -0.00142, 0.00126667, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1851.0, 1571.0, 0.000408, 0.0376, 0.00126667, 330.0, 0.0,0.0,1.0, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1926.0, 1577.0, 5e-05, 0.01767, 0.00133333, 1000.0, 0.0,0.0,1.05, 0.0,1.0,-30.0, 30.0, 0.1 ],
[2196.0, 1577.0, 7e-05, -0.00193, 0.00133333, 1000.0, 0.0,0.0,1.04546, 0.0,1.0,-30.0, 30.0, 0.1 ],
[1882.0, 1577.0, 0.000396, 0.03757, 0.00133333, 330.0, 0.0,0.0,0.954545, 0.0,1.0,-30.0, 30.0, 0.1 ]
])
ppc["gencost"] = array([
[2.0, 0.0, 0.0, 3.0, 0.0, 44.0, 0.0, 66.0, 33.0, 52.8, 26.4 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 44.0, 0.0, 66.0, 33.0, 52.8, 26.4 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 50.0, 0.0, 75.0, 37.5, 60.0, 30.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 62.8, 0.0, 94.2, 47.1, 75.36, 37.68 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 30.0, 0.0, 45.0, 22.5, 36.0, 18.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 30.0, 0.0, 45.0, 22.5, 36.0, 18.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 40.0, 0.0, 60.0, 30.0, 48.0, 24.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 40.0, 0.0, 60.0, 30.0, 48.0, 24.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 140.0, 0.0, 210.0, 105.0, 168.0, 84.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 400.0, 0.0, 600.0, 300.0, 480.0, 240.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 400.0, 0.0, 600.0, 300.0, 480.0, 240.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 64.0, 0.0, 96.0, 48.0, 76.8, 38.4 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 64.0, 0.0, 96.0, 48.0, 76.8, 38.4 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 64.0, 0.0, 96.0, 48.0, 76.8, 38.4 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 29.0, 0.0, 43.5, 21.75, 34.8, 17.4 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 29.0, 0.0, 43.5, 21.75, 34.8, 17.4 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 29.0, 0.0, 43.5, 21.75, 34.8, 17.4 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 14.4, 0.0, 21.6, 10.8, 17.28, 8.64 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 14.4, 0.0, 21.6, 10.8, 17.28, 8.64 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 16.8, 0.0, 25.2, 12.6, 20.16, 10.08 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 16.8, 0.0, 25.2, 12.6, 20.16, 10.08 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 48.0, 0.0, 72.0, 36.0, 57.6, 28.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 48.0, 0.0, 72.0, 36.0, 57.6, 28.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 40.0, 0.0, 60.0, 30.0, 48.0, 24.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 40.0, 0.0, 60.0, 30.0, 48.0, 24.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 60.0, 0.0, 90.0, 45.0, 72.0, 36.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 60.0, 0.0, 90.0, 45.0, 72.0, 36.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 84.0, 0.0, 126.0, 63.0, 100.8, 50.4 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 39.6, 0.0, 59.4, 29.7, 47.52, 23.76 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 80.0, 0.0, 120.0, 60.0, 96.0, 48.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 20.0, 0.0, 30.0, 15.0, 24.0, 12.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 36.0, 0.0, 54.0, 27.0, 43.2, 21.6 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 36.0, 0.0, 54.0, 27.0, 43.2, 21.6 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 36.0, 0.0, 54.0, 27.0, 43.2, 21.6 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 36.0, 0.0, 54.0, 27.0, 43.2, 21.6 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 62.8, 0.0, 94.2, 47.1, 75.36, 37.68 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 62.8, 0.0, 94.2, 47.1, 75.36, 37.68 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 320.0, 0.0, 480.0, 240.0, 384.0, 192.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 320.0, 0.0, 480.0, 240.0, 384.0, 192.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 403.2, 0.0, 604.8, 302.4, 483.84, 241.92 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 403.2, 0.0, 604.8, 302.4, 483.84, 241.92 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 54.0, 0.0, 81.0, 40.5, 64.8, 32.4 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 54.0, 0.0, 81.0, 40.5, 64.8, 32.4 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 18.0, 0.0, 27.0, 13.5, 21.6, 10.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 18.0, 0.0, 27.0, 13.5, 21.6, 10.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 18.0, 0.0, 27.0, 13.5, 21.6, 10.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 18.0, 0.0, 27.0, 13.5, 21.6, 10.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 20.0, 0.0, 30.0, 15.0, 24.0, 12.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 20.0, 0.0, 30.0, 15.0, 24.0, 12.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 120.0, 0.0, 180.0, 90.0, 144.0, 72.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 208.0, 0.0, 312.0, 156.0, 249.6, 124.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 0.0, 0.0, 12.0, 6.0, 9.6, 4.8 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ],
[2.0, 0.0, 0.0, 3.0, 0.0, 240.0, 0.0, 360.0, 180.0, 288.0, 144.0 ]
])
return ppc | []
| []
| []
| [] | [] | python | null | null | null |
ravager/services/aria/updater.py | import os
from ravager.services.aria import aria2c
from ravager.helpers.humanize import humanize
from ravager.celery_tasks.tasks import app
from ravager.database.tasks import Tasks
from ravager.database.helpers.structs import OpsDataStruct
from ravager.config import TIMEZONE
from telegram import Bot
from datetime import datetime, timedelta
import pytz
import logging
logger = logging.getLogger(__file__)
class Updater:
def __init__(self):
self.gid = None
self.task = None
self.bot = None
@app.task(name="download_updater", bind=True)
def download_updater(self, gid, msg_id, source_msg_id):
self.bot = Bot(token=os.environ.get("BOT_TOKEN"))
print(gid)
status = aria2c.tell_status(gid=gid)
print(status)
self.task = OpsDataStruct()
self.task.source_msg_id = str(source_msg_id)
self.task = Tasks(task=self.task).get_task()
chat_id = str(self.task.user_id)
self.task.file_path = status["dir"]
Tasks(task=self.task).set_task()
old = 0
old_time = datetime.now(pytz.timezone(TIMEZONE))
while True:
status = aria2c.tell_status(gid=gid)
current_status = status["status"]
match current_status:
case "active":
total = humanize(int(status["totalLength"]))
completed = humanize(int(status["completedLength"]))
dl_speed = humanize(int(status["downloadSpeed"]))
difference = timedelta(seconds=5)
if total.size > 0:
now = datetime.now(pytz.timezone(TIMEZONE))
percent = (completed.original * 100) // total.original
update_text = f"{percent}% completed | remaining {completed.size:.2f} {completed.unit}/{total.size:.2f} {total.unit} | " \
f"Downloading at {dl_speed.size:.2f} {dl_speed.unit}ps "
len_update_text = len(update_text.encode("utf-8"))
if (percent != old) and (now - old_time > difference) and (len_update_text < 512):
try:
self.bot.edit_message_text(chat_id=chat_id, message_id=msg_id,
text=update_text)
old_time = now
except Exception as e:
logger.error(e)
finally:
old = percent
case "paused":
self.bot.delete_message(chat_id=chat_id, message_id=msg_id)
break
case "waiting":
self.bot.delete_message(chat_id=chat_id, message_id=msg_id)
break
case "error":
self.bot.delete_message(chat_id=chat_id, message_id=msg_id)
break
case "removed":
self.bot.delete_message(chat_id=chat_id, message_id=msg_id)
break
case "complete":
self.bot.delete_message(chat_id=chat_id, message_id=msg_id)
break
case _:
self.bot.delete_message(chat_id=chat_id, message_id=msg_id)
break
| []
| []
| [
"BOT_TOKEN"
]
| [] | ["BOT_TOKEN"] | python | 1 | 0 | |
tests/create_config.py | import os
import time
from click.testing import CliRunner
from bin.throne import cli as throne
runner = CliRunner()
shodan_key = os.getenv('SHODAN_KEY')
throne_user = os.getenv('THRONE_USER')
throne_pass = os.getenv('THRONE_PASS')
def test_throne_setapi():
print("Testing: throne api setapi")
response = runner.invoke(throne, ["api", "setapi", "-u", f"{throne_user}", "-p", f"{throne_pass}"])
assert response.exit_code == 0
assert "Successfully set throne API key." in response.output
def test_shodan_setapi():
print("Testing: throne shodan setapi")
response = runner.invoke(throne, ["shodan", "setapi"], input=f"{shodan_key}")
assert response.exit_code == 0
assert "Successfully set Shodan API key." in response.output | []
| []
| [
"THRONE_USER",
"SHODAN_KEY",
"THRONE_PASS"
]
| [] | ["THRONE_USER", "SHODAN_KEY", "THRONE_PASS"] | python | 3 | 0 | |
docker/configure_workers_and_start.py | #!/usr/bin/env python
# Copyright 2021 The Matrix.org Foundation C.I.C.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script reads environment variables and generates a shared Synapse worker,
# nginx and supervisord configs depending on the workers requested.
#
# The environment variables it reads are:
# * SYNAPSE_SERVER_NAME: The desired server_name of the homeserver.
# * SYNAPSE_REPORT_STATS: Whether to report stats.
# * SYNAPSE_WORKER_TYPES: A comma separated list of worker names as specified in WORKER_CONFIG
# below. Leave empty for no workers, or set to '*' for all possible workers.
# * SYNAPSE_AS_REGISTRATION_DIR: If specified, a directory in which .yaml and .yml files
# will be treated as Application Service registration files.
# * SYNAPSE_TLS_CERT: Path to a TLS certificate in PEM format.
# * SYNAPSE_TLS_KEY: Path to a TLS key. If this and SYNAPSE_TLS_CERT are specified,
# Nginx will be configured to serve TLS on port 8448.
#
# NOTE: According to Complement's ENTRYPOINT expectations for a homeserver image (as defined
# in the project's README), this script may be run multiple times, and functionality should
# continue to work if so.
import os
import subprocess
import sys
from pathlib import Path
from typing import Any, Dict, List, Mapping, MutableMapping, NoReturn, Set
import jinja2
import yaml
MAIN_PROCESS_HTTP_LISTENER_PORT = 8080
WORKERS_CONFIG: Dict[str, Dict[str, Any]] = {
"pusher": {
"app": "synapse.app.pusher",
"listener_resources": [],
"endpoint_patterns": [],
"shared_extra_conf": {"start_pushers": False},
"worker_extra_conf": "",
},
"user_dir": {
"app": "synapse.app.user_dir",
"listener_resources": ["client"],
"endpoint_patterns": [
"^/_matrix/client/(api/v1|r0|v3|unstable)/user_directory/search$"
],
"shared_extra_conf": {"update_user_directory": False},
"worker_extra_conf": "",
},
"media_repository": {
"app": "synapse.app.media_repository",
"listener_resources": ["media"],
"endpoint_patterns": [
"^/_matrix/media/",
"^/_synapse/admin/v1/purge_media_cache$",
"^/_synapse/admin/v1/room/.*/media.*$",
"^/_synapse/admin/v1/user/.*/media.*$",
"^/_synapse/admin/v1/media/.*$",
"^/_synapse/admin/v1/quarantine_media/.*$",
],
"shared_extra_conf": {"enable_media_repo": False},
"worker_extra_conf": "enable_media_repo: true",
},
"appservice": {
"app": "synapse.app.generic_worker",
"listener_resources": [],
"endpoint_patterns": [],
"shared_extra_conf": {"notify_appservices_from_worker": "appservice"},
"worker_extra_conf": "",
},
"federation_sender": {
"app": "synapse.app.federation_sender",
"listener_resources": [],
"endpoint_patterns": [],
"shared_extra_conf": {"send_federation": False},
"worker_extra_conf": "",
},
"synchrotron": {
"app": "synapse.app.generic_worker",
"listener_resources": ["client"],
"endpoint_patterns": [
"^/_matrix/client/(v2_alpha|r0|v3)/sync$",
"^/_matrix/client/(api/v1|v2_alpha|r0|v3)/events$",
"^/_matrix/client/(api/v1|r0|v3)/initialSync$",
"^/_matrix/client/(api/v1|r0|v3)/rooms/[^/]+/initialSync$",
],
"shared_extra_conf": {},
"worker_extra_conf": "",
},
"federation_reader": {
"app": "synapse.app.generic_worker",
"listener_resources": ["federation"],
"endpoint_patterns": [
"^/_matrix/federation/(v1|v2)/event/",
"^/_matrix/federation/(v1|v2)/state/",
"^/_matrix/federation/(v1|v2)/state_ids/",
"^/_matrix/federation/(v1|v2)/backfill/",
"^/_matrix/federation/(v1|v2)/get_missing_events/",
"^/_matrix/federation/(v1|v2)/publicRooms",
"^/_matrix/federation/(v1|v2)/query/",
"^/_matrix/federation/(v1|v2)/make_join/",
"^/_matrix/federation/(v1|v2)/make_leave/",
"^/_matrix/federation/(v1|v2)/send_join/",
"^/_matrix/federation/(v1|v2)/send_leave/",
"^/_matrix/federation/(v1|v2)/invite/",
"^/_matrix/federation/(v1|v2)/query_auth/",
"^/_matrix/federation/(v1|v2)/event_auth/",
"^/_matrix/federation/(v1|v2)/exchange_third_party_invite/",
"^/_matrix/federation/(v1|v2)/user/devices/",
"^/_matrix/federation/(v1|v2)/get_groups_publicised$",
"^/_matrix/key/v2/query",
],
"shared_extra_conf": {},
"worker_extra_conf": "",
},
"federation_inbound": {
"app": "synapse.app.generic_worker",
"listener_resources": ["federation"],
"endpoint_patterns": ["/_matrix/federation/(v1|v2)/send/"],
"shared_extra_conf": {},
"worker_extra_conf": "",
},
"event_persister": {
"app": "synapse.app.generic_worker",
"listener_resources": ["replication"],
"endpoint_patterns": [],
"shared_extra_conf": {},
"worker_extra_conf": "",
},
"background_worker": {
"app": "synapse.app.generic_worker",
"listener_resources": [],
"endpoint_patterns": [],
# This worker cannot be sharded. Therefore there should only ever be one background
# worker, and it should be named background_worker1
"shared_extra_conf": {"run_background_tasks_on": "background_worker1"},
"worker_extra_conf": "",
},
"event_creator": {
"app": "synapse.app.generic_worker",
"listener_resources": ["client"],
"endpoint_patterns": [
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/redact",
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/send",
"^/_matrix/client/(api/v1|r0|v3|unstable)/rooms/.*/(join|invite|leave|ban|unban|kick)$",
"^/_matrix/client/(api/v1|r0|v3|unstable)/join/",
"^/_matrix/client/(api/v1|r0|v3|unstable)/profile/",
"^/_matrix/client/(v1|unstable/org.matrix.msc2716)/rooms/.*/batch_send",
],
"shared_extra_conf": {},
"worker_extra_conf": "",
},
"frontend_proxy": {
"app": "synapse.app.frontend_proxy",
"listener_resources": ["client", "replication"],
"endpoint_patterns": ["^/_matrix/client/(api/v1|r0|v3|unstable)/keys/upload"],
"shared_extra_conf": {},
"worker_extra_conf": (
"worker_main_http_uri: http://127.0.0.1:%d"
% (MAIN_PROCESS_HTTP_LISTENER_PORT,)
),
},
}
# Templates for sections that may be inserted multiple times in config files
SUPERVISORD_PROCESS_CONFIG_BLOCK = """
[program:synapse_{name}]
command=/usr/local/bin/prefix-log /usr/local/bin/python -m {app} \
--config-path="{config_path}" \
--config-path=/conf/workers/shared.yaml \
--config-path=/conf/workers/{name}.yaml
autorestart=unexpected
priority=500
exitcodes=0
stdout_logfile=/dev/stdout
stdout_logfile_maxbytes=0
stderr_logfile=/dev/stderr
stderr_logfile_maxbytes=0
"""
NGINX_LOCATION_CONFIG_BLOCK = """
location ~* {endpoint} {{
proxy_pass {upstream};
proxy_set_header X-Forwarded-For $remote_addr;
proxy_set_header X-Forwarded-Proto $scheme;
proxy_set_header Host $host;
}}
"""
NGINX_UPSTREAM_CONFIG_BLOCK = """
upstream {upstream_worker_type} {{
{body}
}}
"""
# Utility functions
def log(txt: str) -> None:
"""Log something to the stdout.
Args:
txt: The text to log.
"""
print(txt)
def error(txt: str) -> NoReturn:
"""Log something and exit with an error code.
Args:
txt: The text to log in error.
"""
log(txt)
sys.exit(2)
def convert(src: str, dst: str, **template_vars: object) -> None:
"""Generate a file from a template
Args:
src: Path to the input file.
dst: Path to write to.
template_vars: The arguments to replace placeholder variables in the template with.
"""
# Read the template file
with open(src) as infile:
template = infile.read()
# Generate a string from the template. We disable autoescape to prevent template
# variables from being escaped.
rendered = jinja2.Template(template, autoescape=False).render(**template_vars)
# Write the generated contents to a file
#
# We use append mode in case the files have already been written to by something else
# (for instance, as part of the instructions in a dockerfile).
with open(dst, "a") as outfile:
# In case the existing file doesn't end with a newline
outfile.write("\n")
outfile.write(rendered)
def add_sharding_to_shared_config(
shared_config: dict,
worker_type: str,
worker_name: str,
worker_port: int,
) -> None:
"""Given a dictionary representing a config file shared across all workers,
append sharded worker information to it for the current worker_type instance.
Args:
shared_config: The config dict that all worker instances share (after being converted to YAML)
worker_type: The type of worker (one of those defined in WORKERS_CONFIG).
worker_name: The name of the worker instance.
worker_port: The HTTP replication port that the worker instance is listening on.
"""
# The instance_map config field marks the workers that write to various replication streams
instance_map = shared_config.setdefault("instance_map", {})
# Worker-type specific sharding config
if worker_type == "pusher":
shared_config.setdefault("pusher_instances", []).append(worker_name)
elif worker_type == "federation_sender":
shared_config.setdefault("federation_sender_instances", []).append(worker_name)
elif worker_type == "event_persister":
# Event persisters write to the events stream, so we need to update
# the list of event stream writers
shared_config.setdefault("stream_writers", {}).setdefault("events", []).append(
worker_name
)
# Map of stream writer instance names to host/ports combos
instance_map[worker_name] = {
"host": "localhost",
"port": worker_port,
}
elif worker_type == "media_repository":
# The first configured media worker will run the media background jobs
shared_config.setdefault("media_instance_running_background_jobs", worker_name)
def generate_base_homeserver_config() -> None:
"""Starts Synapse and generates a basic homeserver config, which will later be
modified for worker support.
Raises: CalledProcessError if calling start.py returned a non-zero exit code.
"""
# start.py already does this for us, so just call that.
# note that this script is copied in in the official, monolith dockerfile
os.environ["SYNAPSE_HTTP_PORT"] = str(MAIN_PROCESS_HTTP_LISTENER_PORT)
subprocess.check_output(["/usr/local/bin/python", "/start.py", "migrate_config"])
def generate_worker_files(
environ: Mapping[str, str], config_path: str, data_dir: str
) -> None:
"""Read the desired list of workers from environment variables and generate
shared homeserver, nginx and supervisord configs.
Args:
environ: os.environ instance.
config_path: The location of the generated Synapse main worker config file.
data_dir: The location of the synapse data directory. Where log and
user-facing config files live.
"""
# Note that yaml cares about indentation, so care should be taken to insert lines
# into files at the correct indentation below.
# shared_config is the contents of a Synapse config file that will be shared amongst
# the main Synapse process as well as all workers.
# It is intended mainly for disabling functionality when certain workers are spun up,
# and adding a replication listener.
# First read the original config file and extract the listeners block. Then we'll add
# another listener for replication. Later we'll write out the result to the shared
# config file.
listeners = [
{
"port": 9093,
"bind_address": "127.0.0.1",
"type": "http",
"resources": [{"names": ["replication"]}],
}
]
with open(config_path) as file_stream:
original_config = yaml.safe_load(file_stream)
original_listeners = original_config.get("listeners")
if original_listeners:
listeners += original_listeners
# The shared homeserver config. The contents of which will be inserted into the
# base shared worker jinja2 template.
#
# This config file will be passed to all workers, included Synapse's main process.
shared_config: Dict[str, Any] = {"listeners": listeners}
# The supervisord config. The contents of which will be inserted into the
# base supervisord jinja2 template.
#
# Supervisord will be in charge of running everything, from redis to nginx to Synapse
# and all of its worker processes. Load the config template, which defines a few
# services that are necessary to run.
supervisord_config = ""
# Upstreams for load-balancing purposes. This dict takes the form of a worker type to the
# ports of each worker. For example:
# {
# worker_type: {1234, 1235, ...}}
# }
# and will be used to construct 'upstream' nginx directives.
nginx_upstreams: Dict[str, Set[int]] = {}
# A map of: {"endpoint": "upstream"}, where "upstream" is a str representing what will be
# placed after the proxy_pass directive. The main benefit to representing this data as a
# dict over a str is that we can easily deduplicate endpoints across multiple instances
# of the same worker.
#
# An nginx site config that will be amended to depending on the workers that are
# spun up. To be placed in /etc/nginx/conf.d.
nginx_locations = {}
# Read the desired worker configuration from the environment
worker_types_env = environ.get("SYNAPSE_WORKER_TYPES")
if worker_types_env is None:
# No workers, just the main process
worker_types = []
else:
# Split type names by comma
worker_types = worker_types_env.split(",")
# Create the worker configuration directory if it doesn't already exist
os.makedirs("/conf/workers", exist_ok=True)
# Start worker ports from this arbitrary port
worker_port = 18009
# A counter of worker_type -> int. Used for determining the name for a given
# worker type when generating its config file, as each worker's name is just
# worker_type + instance #
worker_type_counter: Dict[str, int] = {}
# A list of internal endpoints to healthcheck, starting with the main process
# which exists even if no workers do.
healthcheck_urls = ["http://localhost:8080/health"]
# For each worker type specified by the user, create config values
for worker_type in worker_types:
worker_type = worker_type.strip()
worker_config = WORKERS_CONFIG.get(worker_type)
if worker_config:
worker_config = worker_config.copy()
else:
log(worker_type + " is an unknown worker type! It will be ignored")
continue
new_worker_count = worker_type_counter.setdefault(worker_type, 0) + 1
worker_type_counter[worker_type] = new_worker_count
# Name workers by their type concatenated with an incrementing number
# e.g. federation_reader1
worker_name = worker_type + str(new_worker_count)
worker_config.update(
{"name": worker_name, "port": str(worker_port), "config_path": config_path}
)
# Update the shared config with any worker-type specific options
shared_config.update(worker_config["shared_extra_conf"])
healthcheck_urls.append("http://localhost:%d/health" % (worker_port,))
# Check if more than one instance of this worker type has been specified
worker_type_total_count = worker_types.count(worker_type)
if worker_type_total_count > 1:
# Update the shared config with sharding-related options if necessary
add_sharding_to_shared_config(
shared_config, worker_type, worker_name, worker_port
)
# Enable the worker in supervisord
supervisord_config += SUPERVISORD_PROCESS_CONFIG_BLOCK.format_map(worker_config)
# Add nginx location blocks for this worker's endpoints (if any are defined)
for pattern in worker_config["endpoint_patterns"]:
# Determine whether we need to load-balance this worker
if worker_type_total_count > 1:
# Create or add to a load-balanced upstream for this worker
nginx_upstreams.setdefault(worker_type, set()).add(worker_port)
# Upstreams are named after the worker_type
upstream = "http://" + worker_type
else:
upstream = "http://localhost:%d" % (worker_port,)
# Note that this endpoint should proxy to this upstream
nginx_locations[pattern] = upstream
# Write out the worker's logging config file
log_config_filepath = generate_worker_log_config(environ, worker_name, data_dir)
# Then a worker config file
convert(
"/conf/worker.yaml.j2",
"/conf/workers/{name}.yaml".format(name=worker_name),
**worker_config,
worker_log_config_filepath=log_config_filepath,
)
worker_port += 1
# Build the nginx location config blocks
nginx_location_config = ""
for endpoint, upstream in nginx_locations.items():
nginx_location_config += NGINX_LOCATION_CONFIG_BLOCK.format(
endpoint=endpoint,
upstream=upstream,
)
# Determine the load-balancing upstreams to configure
nginx_upstream_config = ""
for upstream_worker_type, upstream_worker_ports in nginx_upstreams.items():
body = ""
for port in upstream_worker_ports:
body += " server localhost:%d;\n" % (port,)
# Add to the list of configured upstreams
nginx_upstream_config += NGINX_UPSTREAM_CONFIG_BLOCK.format(
upstream_worker_type=upstream_worker_type,
body=body,
)
# Finally, we'll write out the config files.
# log config for the master process
master_log_config = generate_worker_log_config(environ, "master", data_dir)
shared_config["log_config"] = master_log_config
# Find application service registrations
appservice_registrations = None
appservice_registration_dir = os.environ.get("SYNAPSE_AS_REGISTRATION_DIR")
if appservice_registration_dir:
# Scan for all YAML files that should be application service registrations.
appservice_registrations = [
str(reg_path.resolve())
for reg_path in Path(appservice_registration_dir).iterdir()
if reg_path.suffix.lower() in (".yaml", ".yml")
]
# Shared homeserver config
convert(
"/conf/shared.yaml.j2",
"/conf/workers/shared.yaml",
shared_worker_config=yaml.dump(shared_config),
appservice_registrations=appservice_registrations,
)
# Nginx config
convert(
"/conf/nginx.conf.j2",
"/etc/nginx/conf.d/matrix-synapse.conf",
worker_locations=nginx_location_config,
upstream_directives=nginx_upstream_config,
tls_cert_path=os.environ.get("SYNAPSE_TLS_CERT"),
tls_key_path=os.environ.get("SYNAPSE_TLS_KEY"),
)
# Supervisord config
os.makedirs("/etc/supervisor", exist_ok=True)
convert(
"/conf/supervisord.conf.j2",
"/etc/supervisor/supervisord.conf",
main_config_path=config_path,
worker_config=supervisord_config,
)
# healthcheck config
convert(
"/conf/healthcheck.sh.j2",
"/healthcheck.sh",
healthcheck_urls=healthcheck_urls,
)
# Ensure the logging directory exists
log_dir = data_dir + "/logs"
if not os.path.exists(log_dir):
os.mkdir(log_dir)
def generate_worker_log_config(
environ: Mapping[str, str], worker_name: str, data_dir: str
) -> str:
"""Generate a log.config file for the given worker.
Returns: the path to the generated file
"""
# Check whether we should write worker logs to disk, in addition to the console
extra_log_template_args = {}
if environ.get("SYNAPSE_WORKERS_WRITE_LOGS_TO_DISK"):
extra_log_template_args["LOG_FILE_PATH"] = "{dir}/logs/{name}.log".format(
dir=data_dir, name=worker_name
)
# Render and write the file
log_config_filepath = "/conf/workers/{name}.log.config".format(name=worker_name)
convert(
"/conf/log.config",
log_config_filepath,
worker_name=worker_name,
**extra_log_template_args,
)
return log_config_filepath
def main(args: List[str], environ: MutableMapping[str, str]) -> None:
config_dir = environ.get("SYNAPSE_CONFIG_DIR", "/data")
config_path = environ.get("SYNAPSE_CONFIG_PATH", config_dir + "/homeserver.yaml")
data_dir = environ.get("SYNAPSE_DATA_DIR", "/data")
# override SYNAPSE_NO_TLS, we don't support TLS in worker mode,
# this needs to be handled by a frontend proxy
environ["SYNAPSE_NO_TLS"] = "yes"
# Generate the base homeserver config if one does not yet exist
if not os.path.exists(config_path):
log("Generating base homeserver config")
generate_base_homeserver_config()
# This script may be run multiple times (mostly by Complement, see note at top of file).
# Don't re-configure workers in this instance.
mark_filepath = "/conf/workers_have_been_configured"
if not os.path.exists(mark_filepath):
# Always regenerate all other config files
generate_worker_files(environ, config_path, data_dir)
# Mark workers as being configured
with open(mark_filepath, "w") as f:
f.write("")
# Start supervisord, which will start Synapse, all of the configured worker
# processes, redis, nginx etc. according to the config we created above.
log("Starting supervisord")
os.execl(
"/usr/local/bin/supervisord",
"supervisord",
"-c",
"/etc/supervisor/supervisord.conf",
)
if __name__ == "__main__":
main(sys.argv, os.environ)
| []
| []
| [
"SYNAPSE_TLS_CERT",
"SYNAPSE_HTTP_PORT",
"SYNAPSE_AS_REGISTRATION_DIR",
"SYNAPSE_TLS_KEY"
]
| [] | ["SYNAPSE_TLS_CERT", "SYNAPSE_HTTP_PORT", "SYNAPSE_AS_REGISTRATION_DIR", "SYNAPSE_TLS_KEY"] | python | 4 | 0 | |
flink-runtime/src/test/java/org/apache/flink/runtime/testutils/ZooKeeperTestUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.runtime.testutils;
import org.apache.flink.configuration.AkkaOptions;
import org.apache.flink.configuration.CheckpointingOptions;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.HighAvailabilityOptions;
import org.apache.flink.configuration.StateBackendOptions;
import org.apache.flink.runtime.jobmanager.HighAvailabilityMode;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import java.time.Duration;
import static org.apache.flink.util.Preconditions.checkNotNull;
/** ZooKeeper test utilities. */
public class ZooKeeperTestUtils {
private static final Logger LOG = LoggerFactory.getLogger(ZooKeeperTestUtils.class);
/**
* Creates a configuration to operate in {@link HighAvailabilityMode#ZOOKEEPER}.
*
* @param zooKeeperQuorum ZooKeeper quorum to connect to
* @param fsStateHandlePath Base path for file system state backend (for checkpoints and
* recovery)
* @return A new configuration to operate in {@link HighAvailabilityMode#ZOOKEEPER}.
*/
public static Configuration createZooKeeperHAConfig(
String zooKeeperQuorum, String fsStateHandlePath) {
return configureZooKeeperHA(new Configuration(), zooKeeperQuorum, fsStateHandlePath);
}
/**
* Sets all necessary configuration keys to operate in {@link HighAvailabilityMode#ZOOKEEPER}.
*
* @param config Configuration to use
* @param zooKeeperQuorum ZooKeeper quorum to connect to
* @param fsStateHandlePath Base path for file system state backend (for checkpoints and
* recovery)
* @return The modified configuration to operate in {@link HighAvailabilityMode#ZOOKEEPER}.
*/
public static Configuration configureZooKeeperHA(
Configuration config, String zooKeeperQuorum, String fsStateHandlePath) {
checkNotNull(config, "Configuration");
checkNotNull(zooKeeperQuorum, "ZooKeeper quorum");
checkNotNull(fsStateHandlePath, "File state handle backend path");
// ZooKeeper recovery mode
config.setString(HighAvailabilityOptions.HA_MODE, "ZOOKEEPER");
config.setString(HighAvailabilityOptions.HA_ZOOKEEPER_QUORUM, zooKeeperQuorum);
int connTimeout = 5000;
if (runsOnCIInfrastructure()) {
// The regular timeout is to aggressive for Travis and connections are often lost.
LOG.info(
"Detected CI environment: Configuring connection and session timeout of 30 seconds");
connTimeout = 30000;
}
config.setInteger(HighAvailabilityOptions.ZOOKEEPER_CONNECTION_TIMEOUT, connTimeout);
config.setInteger(HighAvailabilityOptions.ZOOKEEPER_SESSION_TIMEOUT, connTimeout);
// File system state backend
config.setString(StateBackendOptions.STATE_BACKEND, "FILESYSTEM");
config.setString(
CheckpointingOptions.CHECKPOINTS_DIRECTORY, fsStateHandlePath + "/checkpoints");
config.setString(HighAvailabilityOptions.HA_STORAGE_PATH, fsStateHandlePath + "/recovery");
config.set(AkkaOptions.ASK_TIMEOUT_DURATION, Duration.ofSeconds(100));
return config;
}
/** @return true, if a CI environment is detected. */
public static boolean runsOnCIInfrastructure() {
return System.getenv().containsKey("CI") || System.getenv().containsKey("TF_BUILD");
}
}
| []
| []
| []
| [] | [] | java | 0 | 0 | |
modules/setting/setting.go | // Copyright 2014 The Gogs Authors. All rights reserved.
// Copyright 2017 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package setting
import (
"crypto/rand"
"encoding/base64"
"fmt"
"io"
"net"
"net/mail"
"net/url"
"os"
"os/exec"
"path"
"path/filepath"
"regexp"
"runtime"
"strconv"
"strings"
"time"
"code.gitea.io/git"
"code.gitea.io/gitea/modules/log"
_ "code.gitea.io/gitea/modules/minwinsvc" // import minwinsvc for windows services
"code.gitea.io/gitea/modules/user"
"github.com/Unknwon/com"
"github.com/dgrijalva/jwt-go"
_ "github.com/go-macaron/cache/memcache" // memcache plugin for cache
_ "github.com/go-macaron/cache/redis"
"github.com/go-macaron/session"
_ "github.com/go-macaron/session/redis" // redis plugin for store session
"github.com/go-xorm/core"
"github.com/kballard/go-shellquote"
"gopkg.in/ini.v1"
"strk.kbt.io/projects/go/libravatar"
)
// Scheme describes protocol types
type Scheme string
// enumerates all the scheme types
const (
HTTP Scheme = "http"
HTTPS Scheme = "https"
FCGI Scheme = "fcgi"
UnixSocket Scheme = "unix"
)
// LandingPage describes the default page
type LandingPage string
// enumerates all the landing page types
const (
LandingPageHome LandingPage = "/"
LandingPageExplore LandingPage = "/explore"
LandingPageOrganizations LandingPage = "/explore/organizations"
)
// MarkupParser defines the external parser configured in ini
type MarkupParser struct {
Enabled bool
MarkupName string
Command string
FileExtensions []string
IsInputFile bool
}
// enumerates all the policy repository creating
const (
RepoCreatingLastUserVisibility = "last"
RepoCreatingPrivate = "private"
RepoCreatingPublic = "public"
)
// settings
var (
// AppVer settings
AppVer string
AppBuiltWith string
AppName string
AppURL string
AppSubURL string
AppSubURLDepth int // Number of slashes
AppPath string
AppDataPath string
AppWorkPath string
// Server settings
Protocol Scheme
Domain string
HTTPAddr string
HTTPPort string
LocalURL string
RedirectOtherPort bool
PortToRedirect string
OfflineMode bool
DisableRouterLog bool
CertFile string
KeyFile string
StaticRootPath string
EnableGzip bool
LandingPageURL LandingPage
UnixSocketPermission uint32
EnablePprof bool
SSH = struct {
Disabled bool `ini:"DISABLE_SSH"`
StartBuiltinServer bool `ini:"START_SSH_SERVER"`
BuiltinServerUser string `ini:"BUILTIN_SSH_SERVER_USER"`
Domain string `ini:"SSH_DOMAIN"`
Port int `ini:"SSH_PORT"`
ListenHost string `ini:"SSH_LISTEN_HOST"`
ListenPort int `ini:"SSH_LISTEN_PORT"`
RootPath string `ini:"SSH_ROOT_PATH"`
ServerCiphers []string `ini:"SSH_SERVER_CIPHERS"`
ServerKeyExchanges []string `ini:"SSH_SERVER_KEY_EXCHANGES"`
ServerMACs []string `ini:"SSH_SERVER_MACS"`
KeyTestPath string `ini:"SSH_KEY_TEST_PATH"`
KeygenPath string `ini:"SSH_KEYGEN_PATH"`
AuthorizedKeysBackup bool `ini:"SSH_AUTHORIZED_KEYS_BACKUP"`
MinimumKeySizeCheck bool `ini:"-"`
MinimumKeySizes map[string]int `ini:"-"`
ExposeAnonymous bool `ini:"SSH_EXPOSE_ANONYMOUS"`
}{
Disabled: false,
StartBuiltinServer: false,
Domain: "",
Port: 22,
ServerCiphers: []string{"aes128-ctr", "aes192-ctr", "aes256-ctr", "[email protected]", "arcfour256", "arcfour128"},
ServerKeyExchanges: []string{"diffie-hellman-group1-sha1", "diffie-hellman-group14-sha1", "ecdh-sha2-nistp256", "ecdh-sha2-nistp384", "ecdh-sha2-nistp521", "[email protected]"},
ServerMACs: []string{"[email protected]", "hmac-sha2-256", "hmac-sha1", "hmac-sha1-96"},
KeygenPath: "ssh-keygen",
}
LFS struct {
StartServer bool `ini:"LFS_START_SERVER"`
ContentPath string `ini:"LFS_CONTENT_PATH"`
JWTSecretBase64 string `ini:"LFS_JWT_SECRET"`
JWTSecretBytes []byte `ini:"-"`
}
// Security settings
InstallLock bool
SecretKey string
LogInRememberDays int
CookieUserName string
CookieRememberName string
ReverseProxyAuthUser string
MinPasswordLength int
ImportLocalPaths bool
DisableGitHooks bool
// Database settings
UseSQLite3 bool
UseMySQL bool
UseMSSQL bool
UsePostgreSQL bool
UseTiDB bool
// Indexer settings
Indexer struct {
IssuePath string
RepoIndexerEnabled bool
RepoPath string
UpdateQueueLength int
MaxIndexerFileSize int64
}
// Webhook settings
Webhook = struct {
QueueLength int
DeliverTimeout int
SkipTLSVerify bool
Types []string
PagingNum int
}{
QueueLength: 1000,
DeliverTimeout: 5,
SkipTLSVerify: false,
PagingNum: 10,
}
// Repository settings
Repository = struct {
AnsiCharset string
ForcePrivate bool
DefaultPrivate string
MaxCreationLimit int
MirrorQueueLength int
PullRequestQueueLength int
PreferredLicenses []string
DisableHTTPGit bool
UseCompatSSHURI bool
// Repository editor settings
Editor struct {
LineWrapExtensions []string
PreviewableFileModes []string
} `ini:"-"`
// Repository upload settings
Upload struct {
Enabled bool
TempPath string
AllowedTypes []string `delim:"|"`
FileMaxSize int64
MaxFiles int
} `ini:"-"`
// Repository local settings
Local struct {
LocalCopyPath string
LocalWikiPath string
} `ini:"-"`
}{
AnsiCharset: "",
ForcePrivate: false,
DefaultPrivate: RepoCreatingLastUserVisibility,
MaxCreationLimit: -1,
MirrorQueueLength: 1000,
PullRequestQueueLength: 1000,
PreferredLicenses: []string{"Apache License 2.0,MIT License"},
DisableHTTPGit: false,
UseCompatSSHURI: false,
// Repository editor settings
Editor: struct {
LineWrapExtensions []string
PreviewableFileModes []string
}{
LineWrapExtensions: strings.Split(".txt,.md,.markdown,.mdown,.mkd,", ","),
PreviewableFileModes: []string{"markdown"},
},
// Repository upload settings
Upload: struct {
Enabled bool
TempPath string
AllowedTypes []string `delim:"|"`
FileMaxSize int64
MaxFiles int
}{
Enabled: true,
TempPath: "data/tmp/uploads",
AllowedTypes: []string{},
FileMaxSize: 3,
MaxFiles: 5,
},
// Repository local settings
Local: struct {
LocalCopyPath string
LocalWikiPath string
}{
LocalCopyPath: "tmp/local-repo",
LocalWikiPath: "tmp/local-wiki",
},
}
RepoRootPath string
ScriptType = "bash"
// UI settings
UI = struct {
ExplorePagingNum int
IssuePagingNum int
RepoSearchPagingNum int
FeedMaxCommitNum int
ReactionMaxUserNum int
ThemeColorMetaTag string
MaxDisplayFileSize int64
ShowUserEmail bool
Admin struct {
UserPagingNum int
RepoPagingNum int
NoticePagingNum int
OrgPagingNum int
} `ini:"ui.admin"`
User struct {
RepoPagingNum int
} `ini:"ui.user"`
Meta struct {
Author string
Description string
Keywords string
} `ini:"ui.meta"`
}{
ExplorePagingNum: 20,
IssuePagingNum: 10,
RepoSearchPagingNum: 10,
FeedMaxCommitNum: 5,
ReactionMaxUserNum: 10,
ThemeColorMetaTag: `#6cc644`,
MaxDisplayFileSize: 8388608,
Admin: struct {
UserPagingNum int
RepoPagingNum int
NoticePagingNum int
OrgPagingNum int
}{
UserPagingNum: 50,
RepoPagingNum: 50,
NoticePagingNum: 25,
OrgPagingNum: 50,
},
User: struct {
RepoPagingNum int
}{
RepoPagingNum: 15,
},
Meta: struct {
Author string
Description string
Keywords string
}{
Author: "Gitea - Git with a cup of tea",
Description: "Gitea (Git with a cup of tea) is a painless self-hosted Git service written in Go",
Keywords: "go,git,self-hosted,gitea",
},
}
// Markdown settings
Markdown = struct {
EnableHardLineBreak bool
CustomURLSchemes []string `ini:"CUSTOM_URL_SCHEMES"`
FileExtensions []string
}{
EnableHardLineBreak: false,
FileExtensions: strings.Split(".md,.markdown,.mdown,.mkd", ","),
}
// Admin settings
Admin struct {
DisableRegularOrgCreation bool
}
// Picture settings
AvatarUploadPath string
GravatarSource string
GravatarSourceURL *url.URL
DisableGravatar bool
EnableFederatedAvatar bool
LibravatarService *libravatar.Libravatar
// Log settings
LogLevel string
LogRootPath string
LogModes []string
LogConfigs []string
// Attachment settings
AttachmentPath string
AttachmentAllowedTypes string
AttachmentMaxSize int64
AttachmentMaxFiles int
AttachmentEnabled bool
// Time settings
TimeFormat string
// Session settings
SessionConfig session.Options
CSRFCookieName = "_csrf"
// Cron tasks
Cron = struct {
UpdateMirror struct {
Enabled bool
RunAtStart bool
Schedule string
} `ini:"cron.update_mirrors"`
RepoHealthCheck struct {
Enabled bool
RunAtStart bool
Schedule string
Timeout time.Duration
Args []string `delim:" "`
} `ini:"cron.repo_health_check"`
CheckRepoStats struct {
Enabled bool
RunAtStart bool
Schedule string
} `ini:"cron.check_repo_stats"`
ArchiveCleanup struct {
Enabled bool
RunAtStart bool
Schedule string
OlderThan time.Duration
} `ini:"cron.archive_cleanup"`
SyncExternalUsers struct {
Enabled bool
RunAtStart bool
Schedule string
UpdateExisting bool
} `ini:"cron.sync_external_users"`
DeletedBranchesCleanup struct {
Enabled bool
RunAtStart bool
Schedule string
OlderThan time.Duration
} `ini:"cron.deleted_branches_cleanup"`
}{
UpdateMirror: struct {
Enabled bool
RunAtStart bool
Schedule string
}{
Enabled: true,
RunAtStart: false,
Schedule: "@every 10m",
},
RepoHealthCheck: struct {
Enabled bool
RunAtStart bool
Schedule string
Timeout time.Duration
Args []string `delim:" "`
}{
Enabled: true,
RunAtStart: false,
Schedule: "@every 24h",
Timeout: 60 * time.Second,
Args: []string{},
},
CheckRepoStats: struct {
Enabled bool
RunAtStart bool
Schedule string
}{
Enabled: true,
RunAtStart: true,
Schedule: "@every 24h",
},
ArchiveCleanup: struct {
Enabled bool
RunAtStart bool
Schedule string
OlderThan time.Duration
}{
Enabled: true,
RunAtStart: true,
Schedule: "@every 24h",
OlderThan: 24 * time.Hour,
},
SyncExternalUsers: struct {
Enabled bool
RunAtStart bool
Schedule string
UpdateExisting bool
}{
Enabled: true,
RunAtStart: false,
Schedule: "@every 24h",
UpdateExisting: true,
},
DeletedBranchesCleanup: struct {
Enabled bool
RunAtStart bool
Schedule string
OlderThan time.Duration
}{
Enabled: true,
RunAtStart: true,
Schedule: "@every 24h",
OlderThan: 24 * time.Hour,
},
}
// Git settings
Git = struct {
Version string `ini:"-"`
DisableDiffHighlight bool
MaxGitDiffLines int
MaxGitDiffLineCharacters int
MaxGitDiffFiles int
GCArgs []string `delim:" "`
Timeout struct {
Migrate int
Mirror int
Clone int
Pull int
GC int `ini:"GC"`
} `ini:"git.timeout"`
}{
DisableDiffHighlight: false,
MaxGitDiffLines: 1000,
MaxGitDiffLineCharacters: 5000,
MaxGitDiffFiles: 100,
GCArgs: []string{},
Timeout: struct {
Migrate int
Mirror int
Clone int
Pull int
GC int `ini:"GC"`
}{
Migrate: 600,
Mirror: 300,
Clone: 300,
Pull: 300,
GC: 60,
},
}
// Mirror settings
Mirror struct {
DefaultInterval time.Duration
MinInterval time.Duration
}
// API settings
API = struct {
MaxResponseItems int
}{
MaxResponseItems: 50,
}
// I18n settings
Langs []string
Names []string
dateLangs map[string]string
// Highlight settings are loaded in modules/template/highlight.go
// Other settings
ShowFooterBranding bool
ShowFooterVersion bool
ShowFooterTemplateLoadTime bool
// Global setting objects
Cfg *ini.File
CustomPath string // Custom directory path
CustomConf string
CustomPID string
ProdMode bool
RunUser string
IsWindows bool
HasRobotsTxt bool
InternalToken string // internal access token
IterateBufferSize int
ExternalMarkupParsers []MarkupParser
// UILocation is the location on the UI, so that we can display the time on UI.
// Currently only show the default time.Local, it could be added to app.ini after UI is ready
UILocation = time.Local
)
// DateLang transforms standard language locale name to corresponding value in datetime plugin.
func DateLang(lang string) string {
name, ok := dateLangs[lang]
if ok {
return name
}
return "en"
}
func getAppPath() (string, error) {
var appPath string
var err error
if IsWindows && filepath.IsAbs(os.Args[0]) {
appPath = filepath.Clean(os.Args[0])
} else {
appPath, err = exec.LookPath(os.Args[0])
}
if err != nil {
return "", err
}
appPath, err = filepath.Abs(appPath)
if err != nil {
return "", err
}
// Note: we don't use path.Dir here because it does not handle case
// which path starts with two "/" in Windows: "//psf/Home/..."
return strings.Replace(appPath, "\\", "/", -1), err
}
func getWorkPath(appPath string) string {
workPath := ""
giteaWorkPath := os.Getenv("GITEA_WORK_DIR")
if len(giteaWorkPath) > 0 {
workPath = giteaWorkPath
} else {
i := strings.LastIndex(appPath, "/")
if i == -1 {
workPath = appPath
} else {
workPath = appPath[:i]
}
}
return strings.Replace(workPath, "\\", "/", -1)
}
func init() {
IsWindows = runtime.GOOS == "windows"
log.NewLogger(0, "console", `{"level": 0}`)
var err error
if AppPath, err = getAppPath(); err != nil {
log.Fatal(4, "Failed to get app path: %v", err)
}
AppWorkPath = getWorkPath(AppPath)
}
func forcePathSeparator(path string) {
if strings.Contains(path, "\\") {
log.Fatal(4, "Do not use '\\' or '\\\\' in paths, instead, please use '/' in all places")
}
}
// IsRunUserMatchCurrentUser returns false if configured run user does not match
// actual user that runs the app. The first return value is the actual user name.
// This check is ignored under Windows since SSH remote login is not the main
// method to login on Windows.
func IsRunUserMatchCurrentUser(runUser string) (string, bool) {
if IsWindows {
return "", true
}
currentUser := user.CurrentUsername()
return currentUser, runUser == currentUser
}
func createPIDFile(pidPath string) {
currentPid := os.Getpid()
if err := os.MkdirAll(filepath.Dir(pidPath), os.ModePerm); err != nil {
log.Fatal(4, "Failed to create PID folder: %v", err)
}
file, err := os.Create(pidPath)
if err != nil {
log.Fatal(4, "Failed to create PID file: %v", err)
}
defer file.Close()
if _, err := file.WriteString(strconv.FormatInt(int64(currentPid), 10)); err != nil {
log.Fatal(4, "Failed to write PID information: %v", err)
}
}
// NewContext initializes configuration context.
// NOTE: do not print any log except error.
func NewContext() {
Cfg = ini.Empty()
CustomPath = os.Getenv("GITEA_CUSTOM")
if len(CustomPath) == 0 {
CustomPath = path.Join(AppWorkPath, "custom")
} else if !filepath.IsAbs(CustomPath) {
CustomPath = path.Join(AppWorkPath, CustomPath)
}
if len(CustomPID) > 0 {
createPIDFile(CustomPID)
}
if len(CustomConf) == 0 {
CustomConf = path.Join(CustomPath, "conf/app.ini")
} else if !filepath.IsAbs(CustomConf) {
CustomConf = path.Join(CustomPath, CustomConf)
}
if com.IsFile(CustomConf) {
if err := Cfg.Append(CustomConf); err != nil {
log.Fatal(4, "Failed to load custom conf '%s': %v", CustomConf, err)
}
} else {
log.Warn("Custom config '%s' not found, ignore this if you're running first time", CustomConf)
}
Cfg.NameMapper = ini.AllCapsUnderscore
homeDir, err := com.HomeDir()
if err != nil {
log.Fatal(4, "Failed to get home directory: %v", err)
}
homeDir = strings.Replace(homeDir, "\\", "/", -1)
LogLevel = getLogLevel("log", "LEVEL", "Info")
LogRootPath = Cfg.Section("log").Key("ROOT_PATH").MustString(path.Join(AppWorkPath, "log"))
forcePathSeparator(LogRootPath)
sec := Cfg.Section("server")
AppName = Cfg.Section("").Key("APP_NAME").MustString("Gitea: Git with a cup of tea")
Protocol = HTTP
if sec.Key("PROTOCOL").String() == "https" {
Protocol = HTTPS
CertFile = sec.Key("CERT_FILE").String()
KeyFile = sec.Key("KEY_FILE").String()
} else if sec.Key("PROTOCOL").String() == "fcgi" {
Protocol = FCGI
} else if sec.Key("PROTOCOL").String() == "unix" {
Protocol = UnixSocket
UnixSocketPermissionRaw := sec.Key("UNIX_SOCKET_PERMISSION").MustString("666")
UnixSocketPermissionParsed, err := strconv.ParseUint(UnixSocketPermissionRaw, 8, 32)
if err != nil || UnixSocketPermissionParsed > 0777 {
log.Fatal(4, "Failed to parse unixSocketPermission: %s", UnixSocketPermissionRaw)
}
UnixSocketPermission = uint32(UnixSocketPermissionParsed)
}
Domain = sec.Key("DOMAIN").MustString("localhost")
HTTPAddr = sec.Key("HTTP_ADDR").MustString("0.0.0.0")
HTTPPort = sec.Key("HTTP_PORT").MustString("3000")
defaultAppURL := string(Protocol) + "://" + Domain
if (Protocol == HTTP && HTTPPort != "80") || (Protocol == HTTPS && HTTPPort != "443") {
defaultAppURL += ":" + HTTPPort
}
AppURL = sec.Key("ROOT_URL").MustString(defaultAppURL)
AppURL = strings.TrimRight(AppURL, "/") + "/"
// Check if has app suburl.
url, err := url.Parse(AppURL)
if err != nil {
log.Fatal(4, "Invalid ROOT_URL '%s': %s", AppURL, err)
}
// Suburl should start with '/' and end without '/', such as '/{subpath}'.
// This value is empty if site does not have sub-url.
AppSubURL = strings.TrimSuffix(url.Path, "/")
AppSubURLDepth = strings.Count(AppSubURL, "/")
// Check if Domain differs from AppURL domain than update it to AppURL's domain
// TODO: Can be replaced with url.Hostname() when minimal GoLang version is 1.8
urlHostname := strings.SplitN(url.Host, ":", 2)[0]
if urlHostname != Domain && net.ParseIP(urlHostname) == nil {
Domain = urlHostname
}
var defaultLocalURL string
switch Protocol {
case UnixSocket:
defaultLocalURL = "http://unix/"
case FCGI:
defaultLocalURL = AppURL
default:
defaultLocalURL = string(Protocol) + "://"
if HTTPAddr == "0.0.0.0" {
defaultLocalURL += "localhost"
} else {
defaultLocalURL += HTTPAddr
}
defaultLocalURL += ":" + HTTPPort + "/"
}
LocalURL = sec.Key("LOCAL_ROOT_URL").MustString(defaultLocalURL)
RedirectOtherPort = sec.Key("REDIRECT_OTHER_PORT").MustBool(false)
PortToRedirect = sec.Key("PORT_TO_REDIRECT").MustString("80")
OfflineMode = sec.Key("OFFLINE_MODE").MustBool()
DisableRouterLog = sec.Key("DISABLE_ROUTER_LOG").MustBool()
StaticRootPath = sec.Key("STATIC_ROOT_PATH").MustString(AppWorkPath)
AppDataPath = sec.Key("APP_DATA_PATH").MustString(path.Join(AppWorkPath, "data"))
EnableGzip = sec.Key("ENABLE_GZIP").MustBool()
EnablePprof = sec.Key("ENABLE_PPROF").MustBool(false)
switch sec.Key("LANDING_PAGE").MustString("home") {
case "explore":
LandingPageURL = LandingPageExplore
case "organizations":
LandingPageURL = LandingPageOrganizations
default:
LandingPageURL = LandingPageHome
}
if len(SSH.Domain) == 0 {
SSH.Domain = Domain
}
SSH.RootPath = path.Join(homeDir, ".ssh")
serverCiphers := sec.Key("SSH_SERVER_CIPHERS").Strings(",")
if len(serverCiphers) > 0 {
SSH.ServerCiphers = serverCiphers
}
serverKeyExchanges := sec.Key("SSH_SERVER_KEY_EXCHANGES").Strings(",")
if len(serverKeyExchanges) > 0 {
SSH.ServerKeyExchanges = serverKeyExchanges
}
serverMACs := sec.Key("SSH_SERVER_MACS").Strings(",")
if len(serverMACs) > 0 {
SSH.ServerMACs = serverMACs
}
SSH.KeyTestPath = os.TempDir()
if err = Cfg.Section("server").MapTo(&SSH); err != nil {
log.Fatal(4, "Failed to map SSH settings: %v", err)
}
SSH.KeygenPath = sec.Key("SSH_KEYGEN_PATH").MustString("ssh-keygen")
SSH.Port = sec.Key("SSH_PORT").MustInt(22)
SSH.ListenPort = sec.Key("SSH_LISTEN_PORT").MustInt(SSH.Port)
// When disable SSH, start builtin server value is ignored.
if SSH.Disabled {
SSH.StartBuiltinServer = false
}
if !SSH.Disabled && !SSH.StartBuiltinServer {
if err := os.MkdirAll(SSH.RootPath, 0700); err != nil {
log.Fatal(4, "Failed to create '%s': %v", SSH.RootPath, err)
} else if err = os.MkdirAll(SSH.KeyTestPath, 0644); err != nil {
log.Fatal(4, "Failed to create '%s': %v", SSH.KeyTestPath, err)
}
}
SSH.MinimumKeySizeCheck = sec.Key("MINIMUM_KEY_SIZE_CHECK").MustBool()
SSH.MinimumKeySizes = map[string]int{}
minimumKeySizes := Cfg.Section("ssh.minimum_key_sizes").Keys()
for _, key := range minimumKeySizes {
if key.MustInt() != -1 {
SSH.MinimumKeySizes[strings.ToLower(key.Name())] = key.MustInt()
}
}
SSH.AuthorizedKeysBackup = sec.Key("SSH_AUTHORIZED_KEYS_BACKUP").MustBool(true)
SSH.ExposeAnonymous = sec.Key("SSH_EXPOSE_ANONYMOUS").MustBool(false)
sec = Cfg.Section("server")
if err = sec.MapTo(&LFS); err != nil {
log.Fatal(4, "Failed to map LFS settings: %v", err)
}
LFS.ContentPath = sec.Key("LFS_CONTENT_PATH").MustString(filepath.Join(AppDataPath, "lfs"))
if !filepath.IsAbs(LFS.ContentPath) {
LFS.ContentPath = filepath.Join(AppWorkPath, LFS.ContentPath)
}
if LFS.StartServer {
if err := os.MkdirAll(LFS.ContentPath, 0700); err != nil {
log.Fatal(4, "Failed to create '%s': %v", LFS.ContentPath, err)
}
LFS.JWTSecretBytes = make([]byte, 32)
n, err := base64.RawURLEncoding.Decode(LFS.JWTSecretBytes, []byte(LFS.JWTSecretBase64))
if err != nil || n != 32 {
//Generate new secret and save to config
_, err := io.ReadFull(rand.Reader, LFS.JWTSecretBytes)
if err != nil {
log.Fatal(4, "Error reading random bytes: %v", err)
}
LFS.JWTSecretBase64 = base64.RawURLEncoding.EncodeToString(LFS.JWTSecretBytes)
// Save secret
cfg := ini.Empty()
if com.IsFile(CustomConf) {
// Keeps custom settings if there is already something.
if err := cfg.Append(CustomConf); err != nil {
log.Error(4, "Failed to load custom conf '%s': %v", CustomConf, err)
}
}
cfg.Section("server").Key("LFS_JWT_SECRET").SetValue(LFS.JWTSecretBase64)
if err := os.MkdirAll(filepath.Dir(CustomConf), os.ModePerm); err != nil {
log.Fatal(4, "Failed to create '%s': %v", CustomConf, err)
}
if err := cfg.SaveTo(CustomConf); err != nil {
log.Fatal(4, "Error saving generated JWT Secret to custom config: %v", err)
return
}
}
//Disable LFS client hooks if installed for the current OS user
//Needs at least git v2.1.2
binVersion, err := git.BinVersion()
if err != nil {
log.Fatal(4, "Error retrieving git version: %v", err)
}
splitVersion := strings.SplitN(binVersion, ".", 4)
majorVersion, err := strconv.ParseUint(splitVersion[0], 10, 64)
if err != nil {
log.Fatal(4, "Error parsing git major version: %v", err)
}
minorVersion, err := strconv.ParseUint(splitVersion[1], 10, 64)
if err != nil {
log.Fatal(4, "Error parsing git minor version: %v", err)
}
revisionVersion, err := strconv.ParseUint(splitVersion[2], 10, 64)
if err != nil {
log.Fatal(4, "Error parsing git revision version: %v", err)
}
if !((majorVersion > 2) || (majorVersion == 2 && minorVersion > 1) ||
(majorVersion == 2 && minorVersion == 1 && revisionVersion >= 2)) {
LFS.StartServer = false
log.Error(4, "LFS server support needs at least Git v2.1.2")
} else {
git.GlobalCommandArgs = append(git.GlobalCommandArgs, "-c", "filter.lfs.required=",
"-c", "filter.lfs.smudge=", "-c", "filter.lfs.clean=")
}
}
sec = Cfg.Section("security")
InstallLock = sec.Key("INSTALL_LOCK").MustBool(false)
SecretKey = sec.Key("SECRET_KEY").MustString("!#@FDEWREWR&*(")
LogInRememberDays = sec.Key("LOGIN_REMEMBER_DAYS").MustInt(7)
CookieUserName = sec.Key("COOKIE_USERNAME").MustString("gitea_awesome")
CookieRememberName = sec.Key("COOKIE_REMEMBER_NAME").MustString("gitea_incredible")
ReverseProxyAuthUser = sec.Key("REVERSE_PROXY_AUTHENTICATION_USER").MustString("X-WEBAUTH-USER")
MinPasswordLength = sec.Key("MIN_PASSWORD_LENGTH").MustInt(6)
ImportLocalPaths = sec.Key("IMPORT_LOCAL_PATHS").MustBool(false)
DisableGitHooks = sec.Key("DISABLE_GIT_HOOKS").MustBool(false)
InternalToken = sec.Key("INTERNAL_TOKEN").String()
if len(InternalToken) == 0 {
secretBytes := make([]byte, 32)
_, err := io.ReadFull(rand.Reader, secretBytes)
if err != nil {
log.Fatal(4, "Error reading random bytes: %v", err)
}
secretKey := base64.RawURLEncoding.EncodeToString(secretBytes)
now := time.Now()
InternalToken, err = jwt.NewWithClaims(jwt.SigningMethodHS256, jwt.MapClaims{
"nbf": now.Unix(),
}).SignedString([]byte(secretKey))
if err != nil {
log.Fatal(4, "Error generate internal token: %v", err)
}
// Save secret
cfgSave := ini.Empty()
if com.IsFile(CustomConf) {
// Keeps custom settings if there is already something.
if err := cfgSave.Append(CustomConf); err != nil {
log.Error(4, "Failed to load custom conf '%s': %v", CustomConf, err)
}
}
cfgSave.Section("security").Key("INTERNAL_TOKEN").SetValue(InternalToken)
if err := os.MkdirAll(filepath.Dir(CustomConf), os.ModePerm); err != nil {
log.Fatal(4, "Failed to create '%s': %v", CustomConf, err)
}
if err := cfgSave.SaveTo(CustomConf); err != nil {
log.Fatal(4, "Error saving generated JWT Secret to custom config: %v", err)
}
}
IterateBufferSize = Cfg.Section("database").Key("ITERATE_BUFFER_SIZE").MustInt(50)
sec = Cfg.Section("attachment")
AttachmentPath = sec.Key("PATH").MustString(path.Join(AppDataPath, "attachments"))
if !filepath.IsAbs(AttachmentPath) {
AttachmentPath = path.Join(AppWorkPath, AttachmentPath)
}
AttachmentAllowedTypes = strings.Replace(sec.Key("ALLOWED_TYPES").MustString("image/jpeg,image/png,application/zip,application/gzip"), "|", ",", -1)
AttachmentMaxSize = sec.Key("MAX_SIZE").MustInt64(4)
AttachmentMaxFiles = sec.Key("MAX_FILES").MustInt(5)
AttachmentEnabled = sec.Key("ENABLE").MustBool(true)
TimeFormatKey := Cfg.Section("time").Key("FORMAT").MustString("RFC1123")
TimeFormat = map[string]string{
"ANSIC": time.ANSIC,
"UnixDate": time.UnixDate,
"RubyDate": time.RubyDate,
"RFC822": time.RFC822,
"RFC822Z": time.RFC822Z,
"RFC850": time.RFC850,
"RFC1123": time.RFC1123,
"RFC1123Z": time.RFC1123Z,
"RFC3339": time.RFC3339,
"RFC3339Nano": time.RFC3339Nano,
"Kitchen": time.Kitchen,
"Stamp": time.Stamp,
"StampMilli": time.StampMilli,
"StampMicro": time.StampMicro,
"StampNano": time.StampNano,
}[TimeFormatKey]
// When the TimeFormatKey does not exist in the previous map e.g.'2006-01-02 15:04:05'
if len(TimeFormat) == 0 {
TimeFormat = TimeFormatKey
TestTimeFormat, _ := time.Parse(TimeFormat, TimeFormat)
if TestTimeFormat.Format(time.RFC3339) != "2006-01-02T15:04:05Z" {
log.Fatal(4, "Can't create time properly, please check your time format has 2006, 01, 02, 15, 04 and 05")
}
log.Trace("Custom TimeFormat: %s", TimeFormat)
}
RunUser = Cfg.Section("").Key("RUN_USER").MustString(user.CurrentUsername())
// Does not check run user when the install lock is off.
if InstallLock {
currentUser, match := IsRunUserMatchCurrentUser(RunUser)
if !match {
log.Fatal(4, "Expect user '%s' but current user is: %s", RunUser, currentUser)
}
}
SSH.BuiltinServerUser = Cfg.Section("server").Key("BUILTIN_SSH_SERVER_USER").MustString(RunUser)
// Determine and create root git repository path.
sec = Cfg.Section("repository")
Repository.DisableHTTPGit = sec.Key("DISABLE_HTTP_GIT").MustBool()
Repository.UseCompatSSHURI = sec.Key("USE_COMPAT_SSH_URI").MustBool()
Repository.MaxCreationLimit = sec.Key("MAX_CREATION_LIMIT").MustInt(-1)
RepoRootPath = sec.Key("ROOT").MustString(path.Join(homeDir, "gitea-repositories"))
forcePathSeparator(RepoRootPath)
if !filepath.IsAbs(RepoRootPath) {
RepoRootPath = filepath.Join(AppWorkPath, RepoRootPath)
} else {
RepoRootPath = filepath.Clean(RepoRootPath)
}
ScriptType = sec.Key("SCRIPT_TYPE").MustString("bash")
if err = Cfg.Section("repository").MapTo(&Repository); err != nil {
log.Fatal(4, "Failed to map Repository settings: %v", err)
} else if err = Cfg.Section("repository.editor").MapTo(&Repository.Editor); err != nil {
log.Fatal(4, "Failed to map Repository.Editor settings: %v", err)
} else if err = Cfg.Section("repository.upload").MapTo(&Repository.Upload); err != nil {
log.Fatal(4, "Failed to map Repository.Upload settings: %v", err)
} else if err = Cfg.Section("repository.local").MapTo(&Repository.Local); err != nil {
log.Fatal(4, "Failed to map Repository.Local settings: %v", err)
}
if !filepath.IsAbs(Repository.Upload.TempPath) {
Repository.Upload.TempPath = path.Join(AppWorkPath, Repository.Upload.TempPath)
}
sec = Cfg.Section("picture")
AvatarUploadPath = sec.Key("AVATAR_UPLOAD_PATH").MustString(path.Join(AppDataPath, "avatars"))
forcePathSeparator(AvatarUploadPath)
if !filepath.IsAbs(AvatarUploadPath) {
AvatarUploadPath = path.Join(AppWorkPath, AvatarUploadPath)
}
switch source := sec.Key("GRAVATAR_SOURCE").MustString("gravatar"); source {
case "duoshuo":
GravatarSource = "http://gravatar.duoshuo.com/avatar/"
case "gravatar":
GravatarSource = "https://secure.gravatar.com/avatar/"
case "libravatar":
GravatarSource = "https://seccdn.libravatar.org/avatar/"
default:
GravatarSource = source
}
DisableGravatar = sec.Key("DISABLE_GRAVATAR").MustBool()
EnableFederatedAvatar = sec.Key("ENABLE_FEDERATED_AVATAR").MustBool(!InstallLock)
if OfflineMode {
DisableGravatar = true
EnableFederatedAvatar = false
}
if DisableGravatar {
EnableFederatedAvatar = false
}
if EnableFederatedAvatar || !DisableGravatar {
GravatarSourceURL, err = url.Parse(GravatarSource)
if err != nil {
log.Fatal(4, "Failed to parse Gravatar URL(%s): %v",
GravatarSource, err)
}
}
if EnableFederatedAvatar {
LibravatarService = libravatar.New()
if GravatarSourceURL.Scheme == "https" {
LibravatarService.SetUseHTTPS(true)
LibravatarService.SetSecureFallbackHost(GravatarSourceURL.Host)
} else {
LibravatarService.SetUseHTTPS(false)
LibravatarService.SetFallbackHost(GravatarSourceURL.Host)
}
}
if err = Cfg.Section("ui").MapTo(&UI); err != nil {
log.Fatal(4, "Failed to map UI settings: %v", err)
} else if err = Cfg.Section("markdown").MapTo(&Markdown); err != nil {
log.Fatal(4, "Failed to map Markdown settings: %v", err)
} else if err = Cfg.Section("admin").MapTo(&Admin); err != nil {
log.Fatal(4, "Fail to map Admin settings: %v", err)
} else if err = Cfg.Section("cron").MapTo(&Cron); err != nil {
log.Fatal(4, "Failed to map Cron settings: %v", err)
} else if err = Cfg.Section("git").MapTo(&Git); err != nil {
log.Fatal(4, "Failed to map Git settings: %v", err)
} else if err = Cfg.Section("api").MapTo(&API); err != nil {
log.Fatal(4, "Failed to map API settings: %v", err)
}
sec = Cfg.Section("mirror")
Mirror.MinInterval = sec.Key("MIN_INTERVAL").MustDuration(10 * time.Minute)
Mirror.DefaultInterval = sec.Key("DEFAULT_INTERVAL").MustDuration(8 * time.Hour)
if Mirror.MinInterval.Minutes() < 1 {
log.Warn("Mirror.MinInterval is too low")
Mirror.MinInterval = 1 * time.Minute
}
if Mirror.DefaultInterval < Mirror.MinInterval {
log.Warn("Mirror.DefaultInterval is less than Mirror.MinInterval")
Mirror.DefaultInterval = time.Hour * 8
}
Langs = Cfg.Section("i18n").Key("LANGS").Strings(",")
if len(Langs) == 0 {
Langs = defaultLangs
}
Names = Cfg.Section("i18n").Key("NAMES").Strings(",")
if len(Names) == 0 {
Names = defaultLangNames
}
dateLangs = Cfg.Section("i18n.datelang").KeysHash()
ShowFooterBranding = Cfg.Section("other").Key("SHOW_FOOTER_BRANDING").MustBool(false)
ShowFooterVersion = Cfg.Section("other").Key("SHOW_FOOTER_VERSION").MustBool(true)
ShowFooterTemplateLoadTime = Cfg.Section("other").Key("SHOW_FOOTER_TEMPLATE_LOAD_TIME").MustBool(true)
UI.ShowUserEmail = Cfg.Section("ui").Key("SHOW_USER_EMAIL").MustBool(true)
HasRobotsTxt = com.IsFile(path.Join(CustomPath, "robots.txt"))
extensionReg := regexp.MustCompile(`\.\w`)
for _, sec := range Cfg.Section("markup").ChildSections() {
name := strings.TrimLeft(sec.Name(), "markup.")
if name == "" {
log.Warn("name is empty, markup " + sec.Name() + "ignored")
continue
}
extensions := sec.Key("FILE_EXTENSIONS").Strings(",")
var exts = make([]string, 0, len(extensions))
for _, extension := range extensions {
if !extensionReg.MatchString(extension) {
log.Warn(sec.Name() + " file extension " + extension + " is invalid. Extension ignored")
} else {
exts = append(exts, extension)
}
}
if len(exts) == 0 {
log.Warn(sec.Name() + " file extension is empty, markup " + name + " ignored")
continue
}
command := sec.Key("RENDER_COMMAND").MustString("")
if command == "" {
log.Warn(" RENDER_COMMAND is empty, markup " + name + " ignored")
continue
}
ExternalMarkupParsers = append(ExternalMarkupParsers, MarkupParser{
Enabled: sec.Key("ENABLED").MustBool(false),
MarkupName: name,
FileExtensions: exts,
Command: command,
IsInputFile: sec.Key("IS_INPUT_FILE").MustBool(false),
})
}
}
// Service settings
var Service struct {
ActiveCodeLives int
ResetPwdCodeLives int
RegisterEmailConfirm bool
DisableRegistration bool
ShowRegistrationButton bool
RequireSignInView bool
EnableNotifyMail bool
EnableReverseProxyAuth bool
EnableReverseProxyAutoRegister bool
EnableCaptcha bool
DefaultKeepEmailPrivate bool
DefaultAllowCreateOrganization bool
DefaultEnableTimetracking bool
DefaultAllowOnlyContributorsToTrackTime bool
NoReplyAddress string
// OpenID settings
EnableOpenIDSignIn bool
EnableOpenIDSignUp bool
OpenIDWhitelist []*regexp.Regexp
OpenIDBlacklist []*regexp.Regexp
}
func newService() {
sec := Cfg.Section("service")
Service.ActiveCodeLives = sec.Key("ACTIVE_CODE_LIVE_MINUTES").MustInt(180)
Service.ResetPwdCodeLives = sec.Key("RESET_PASSWD_CODE_LIVE_MINUTES").MustInt(180)
Service.DisableRegistration = sec.Key("DISABLE_REGISTRATION").MustBool()
Service.ShowRegistrationButton = sec.Key("SHOW_REGISTRATION_BUTTON").MustBool(!Service.DisableRegistration)
Service.RequireSignInView = sec.Key("REQUIRE_SIGNIN_VIEW").MustBool()
Service.EnableReverseProxyAuth = sec.Key("ENABLE_REVERSE_PROXY_AUTHENTICATION").MustBool()
Service.EnableReverseProxyAutoRegister = sec.Key("ENABLE_REVERSE_PROXY_AUTO_REGISTRATION").MustBool()
Service.EnableCaptcha = sec.Key("ENABLE_CAPTCHA").MustBool()
Service.DefaultKeepEmailPrivate = sec.Key("DEFAULT_KEEP_EMAIL_PRIVATE").MustBool()
Service.DefaultAllowCreateOrganization = sec.Key("DEFAULT_ALLOW_CREATE_ORGANIZATION").MustBool(true)
Service.DefaultEnableTimetracking = sec.Key("DEFAULT_ENABLE_TIMETRACKING").MustBool(true)
Service.DefaultAllowOnlyContributorsToTrackTime = sec.Key("DEFAULT_ALLOW_ONLY_CONTRIBUTORS_TO_TRACK_TIME").MustBool(true)
Service.NoReplyAddress = sec.Key("NO_REPLY_ADDRESS").MustString("noreply.example.org")
sec = Cfg.Section("openid")
Service.EnableOpenIDSignIn = sec.Key("ENABLE_OPENID_SIGNIN").MustBool(!InstallLock)
Service.EnableOpenIDSignUp = sec.Key("ENABLE_OPENID_SIGNUP").MustBool(!Service.DisableRegistration && Service.EnableOpenIDSignIn)
pats := sec.Key("WHITELISTED_URIS").Strings(" ")
if len(pats) != 0 {
Service.OpenIDWhitelist = make([]*regexp.Regexp, len(pats))
for i, p := range pats {
Service.OpenIDWhitelist[i] = regexp.MustCompilePOSIX(p)
}
}
pats = sec.Key("BLACKLISTED_URIS").Strings(" ")
if len(pats) != 0 {
Service.OpenIDBlacklist = make([]*regexp.Regexp, len(pats))
for i, p := range pats {
Service.OpenIDBlacklist[i] = regexp.MustCompilePOSIX(p)
}
}
}
var logLevels = map[string]string{
"Trace": "0",
"Debug": "1",
"Info": "2",
"Warn": "3",
"Error": "4",
"Critical": "5",
}
func getLogLevel(section string, key string, defaultValue string) string {
validLevels := []string{"Trace", "Debug", "Info", "Warn", "Error", "Critical"}
return Cfg.Section(section).Key(key).In(defaultValue, validLevels)
}
func newLogService() {
log.Info("Gitea v%s%s", AppVer, AppBuiltWith)
LogModes = strings.Split(Cfg.Section("log").Key("MODE").MustString("console"), ",")
LogConfigs = make([]string, len(LogModes))
useConsole := false
for i := 0; i < len(LogModes); i++ {
LogModes[i] = strings.TrimSpace(LogModes[i])
if LogModes[i] == "console" {
useConsole = true
}
}
if !useConsole {
log.DelLogger("console")
}
for i, mode := range LogModes {
sec, err := Cfg.GetSection("log." + mode)
if err != nil {
sec, _ = Cfg.NewSection("log." + mode)
}
// Log level.
levelName := getLogLevel("log."+mode, "LEVEL", LogLevel)
level, ok := logLevels[levelName]
if !ok {
log.Fatal(4, "Unknown log level: %s", levelName)
}
// Generate log configuration.
switch mode {
case "console":
LogConfigs[i] = fmt.Sprintf(`{"level":%s}`, level)
case "file":
logPath := sec.Key("FILE_NAME").MustString(path.Join(LogRootPath, "gitea.log"))
if err = os.MkdirAll(path.Dir(logPath), os.ModePerm); err != nil {
panic(err.Error())
}
LogConfigs[i] = fmt.Sprintf(
`{"level":%s,"filename":"%s","rotate":%v,"maxlines":%d,"maxsize":%d,"daily":%v,"maxdays":%d}`, level,
logPath,
sec.Key("LOG_ROTATE").MustBool(true),
sec.Key("MAX_LINES").MustInt(1000000),
1<<uint(sec.Key("MAX_SIZE_SHIFT").MustInt(28)),
sec.Key("DAILY_ROTATE").MustBool(true),
sec.Key("MAX_DAYS").MustInt(7))
case "conn":
LogConfigs[i] = fmt.Sprintf(`{"level":%s,"reconnectOnMsg":%v,"reconnect":%v,"net":"%s","addr":"%s"}`, level,
sec.Key("RECONNECT_ON_MSG").MustBool(),
sec.Key("RECONNECT").MustBool(),
sec.Key("PROTOCOL").In("tcp", []string{"tcp", "unix", "udp"}),
sec.Key("ADDR").MustString(":7020"))
case "smtp":
LogConfigs[i] = fmt.Sprintf(`{"level":%s,"username":"%s","password":"%s","host":"%s","sendTos":["%s"],"subject":"%s"}`, level,
sec.Key("USER").MustString("[email protected]"),
sec.Key("PASSWD").MustString("******"),
sec.Key("HOST").MustString("127.0.0.1:25"),
strings.Replace(sec.Key("RECEIVERS").MustString("[email protected]"), ",", "\",\"", -1),
sec.Key("SUBJECT").MustString("Diagnostic message from serve"))
case "database":
LogConfigs[i] = fmt.Sprintf(`{"level":%s,"driver":"%s","conn":"%s"}`, level,
sec.Key("DRIVER").String(),
sec.Key("CONN").String())
}
log.NewLogger(Cfg.Section("log").Key("BUFFER_LEN").MustInt64(10000), mode, LogConfigs[i])
log.Info("Log Mode: %s(%s)", strings.Title(mode), levelName)
}
}
// NewXORMLogService initializes xorm logger service
func NewXORMLogService(disableConsole bool) {
logModes := strings.Split(Cfg.Section("log").Key("MODE").MustString("console"), ",")
var logConfigs string
for _, mode := range logModes {
mode = strings.TrimSpace(mode)
if disableConsole && mode == "console" {
continue
}
sec, err := Cfg.GetSection("log." + mode)
if err != nil {
sec, _ = Cfg.NewSection("log." + mode)
}
// Log level.
levelName := getLogLevel("log."+mode, "LEVEL", LogLevel)
level, ok := logLevels[levelName]
if !ok {
log.Fatal(4, "Unknown log level: %s", levelName)
}
// Generate log configuration.
switch mode {
case "console":
logConfigs = fmt.Sprintf(`{"level":%s}`, level)
case "file":
logPath := sec.Key("FILE_NAME").MustString(path.Join(LogRootPath, "xorm.log"))
if err = os.MkdirAll(path.Dir(logPath), os.ModePerm); err != nil {
panic(err.Error())
}
logPath = path.Join(filepath.Dir(logPath), "xorm.log")
logConfigs = fmt.Sprintf(
`{"level":%s,"filename":"%s","rotate":%v,"maxlines":%d,"maxsize":%d,"daily":%v,"maxdays":%d}`, level,
logPath,
sec.Key("LOG_ROTATE").MustBool(true),
sec.Key("MAX_LINES").MustInt(1000000),
1<<uint(sec.Key("MAX_SIZE_SHIFT").MustInt(28)),
sec.Key("DAILY_ROTATE").MustBool(true),
sec.Key("MAX_DAYS").MustInt(7))
case "conn":
logConfigs = fmt.Sprintf(`{"level":%s,"reconnectOnMsg":%v,"reconnect":%v,"net":"%s","addr":"%s"}`, level,
sec.Key("RECONNECT_ON_MSG").MustBool(),
sec.Key("RECONNECT").MustBool(),
sec.Key("PROTOCOL").In("tcp", []string{"tcp", "unix", "udp"}),
sec.Key("ADDR").MustString(":7020"))
case "smtp":
logConfigs = fmt.Sprintf(`{"level":%s,"username":"%s","password":"%s","host":"%s","sendTos":"%s","subject":"%s"}`, level,
sec.Key("USER").MustString("[email protected]"),
sec.Key("PASSWD").MustString("******"),
sec.Key("HOST").MustString("127.0.0.1:25"),
sec.Key("RECEIVERS").MustString("[]"),
sec.Key("SUBJECT").MustString("Diagnostic message from serve"))
case "database":
logConfigs = fmt.Sprintf(`{"level":%s,"driver":"%s","conn":"%s"}`, level,
sec.Key("DRIVER").String(),
sec.Key("CONN").String())
}
log.NewXORMLogger(Cfg.Section("log").Key("BUFFER_LEN").MustInt64(10000), mode, logConfigs)
if !disableConsole {
log.Info("XORM Log Mode: %s(%s)", strings.Title(mode), levelName)
}
var lvl core.LogLevel
switch levelName {
case "Trace", "Debug":
lvl = core.LOG_DEBUG
case "Info":
lvl = core.LOG_INFO
case "Warn":
lvl = core.LOG_WARNING
case "Error", "Critical":
lvl = core.LOG_ERR
}
log.XORMLogger.SetLevel(lvl)
}
if len(logConfigs) == 0 {
log.DiscardXORMLogger()
}
}
// Cache represents cache settings
type Cache struct {
Adapter string
Interval int
Conn string
TTL time.Duration
}
var (
// CacheService the global cache
CacheService *Cache
)
func newCacheService() {
sec := Cfg.Section("cache")
CacheService = &Cache{
Adapter: sec.Key("ADAPTER").In("memory", []string{"memory", "redis", "memcache"}),
}
switch CacheService.Adapter {
case "memory":
CacheService.Interval = sec.Key("INTERVAL").MustInt(60)
case "redis", "memcache":
CacheService.Conn = strings.Trim(sec.Key("HOST").String(), "\" ")
default:
log.Fatal(4, "Unknown cache adapter: %s", CacheService.Adapter)
}
CacheService.TTL = sec.Key("ITEM_TTL").MustDuration(16 * time.Hour)
log.Info("Cache Service Enabled")
}
func newSessionService() {
SessionConfig.Provider = Cfg.Section("session").Key("PROVIDER").In("memory",
[]string{"memory", "file", "redis", "mysql"})
SessionConfig.ProviderConfig = strings.Trim(Cfg.Section("session").Key("PROVIDER_CONFIG").MustString(path.Join(AppDataPath, "sessions")), "\" ")
if SessionConfig.Provider == "file" && !filepath.IsAbs(SessionConfig.ProviderConfig) {
SessionConfig.ProviderConfig = path.Join(AppWorkPath, SessionConfig.ProviderConfig)
}
SessionConfig.CookieName = Cfg.Section("session").Key("COOKIE_NAME").MustString("i_like_gitea")
SessionConfig.CookiePath = AppSubURL
SessionConfig.Secure = Cfg.Section("session").Key("COOKIE_SECURE").MustBool(false)
SessionConfig.Gclifetime = Cfg.Section("session").Key("GC_INTERVAL_TIME").MustInt64(86400)
SessionConfig.Maxlifetime = Cfg.Section("session").Key("SESSION_LIFE_TIME").MustInt64(86400)
log.Info("Session Service Enabled")
}
// Mailer represents mail service.
type Mailer struct {
// Mailer
QueueLength int
Name string
From string
FromName string
FromEmail string
SendAsPlainText bool
// SMTP sender
Host string
User, Passwd string
DisableHelo bool
HeloHostname string
SkipVerify bool
UseCertificate bool
CertFile, KeyFile string
// Sendmail sender
UseSendmail bool
SendmailPath string
SendmailArgs []string
}
var (
// MailService the global mailer
MailService *Mailer
)
func newMailService() {
sec := Cfg.Section("mailer")
// Check mailer setting.
if !sec.Key("ENABLED").MustBool() {
return
}
MailService = &Mailer{
QueueLength: sec.Key("SEND_BUFFER_LEN").MustInt(100),
Name: sec.Key("NAME").MustString(AppName),
SendAsPlainText: sec.Key("SEND_AS_PLAIN_TEXT").MustBool(false),
Host: sec.Key("HOST").String(),
User: sec.Key("USER").String(),
Passwd: sec.Key("PASSWD").String(),
DisableHelo: sec.Key("DISABLE_HELO").MustBool(),
HeloHostname: sec.Key("HELO_HOSTNAME").String(),
SkipVerify: sec.Key("SKIP_VERIFY").MustBool(),
UseCertificate: sec.Key("USE_CERTIFICATE").MustBool(),
CertFile: sec.Key("CERT_FILE").String(),
KeyFile: sec.Key("KEY_FILE").String(),
UseSendmail: sec.Key("USE_SENDMAIL").MustBool(),
SendmailPath: sec.Key("SENDMAIL_PATH").MustString("sendmail"),
}
MailService.From = sec.Key("FROM").MustString(MailService.User)
if sec.HasKey("ENABLE_HTML_ALTERNATIVE") {
log.Warn("ENABLE_HTML_ALTERNATIVE is deprecated, use SEND_AS_PLAIN_TEXT")
MailService.SendAsPlainText = !sec.Key("ENABLE_HTML_ALTERNATIVE").MustBool(false)
}
parsed, err := mail.ParseAddress(MailService.From)
if err != nil {
log.Fatal(4, "Invalid mailer.FROM (%s): %v", MailService.From, err)
}
MailService.FromName = parsed.Name
MailService.FromEmail = parsed.Address
if MailService.UseSendmail {
MailService.SendmailArgs, err = shellquote.Split(sec.Key("SENDMAIL_ARGS").String())
if err != nil {
log.Error(4, "Failed to parse Sendmail args: %v", CustomConf, err)
}
}
log.Info("Mail Service Enabled")
}
func newRegisterMailService() {
if !Cfg.Section("service").Key("REGISTER_EMAIL_CONFIRM").MustBool() {
return
} else if MailService == nil {
log.Warn("Register Mail Service: Mail Service is not enabled")
return
}
Service.RegisterEmailConfirm = true
log.Info("Register Mail Service Enabled")
}
func newNotifyMailService() {
if !Cfg.Section("service").Key("ENABLE_NOTIFY_MAIL").MustBool() {
return
} else if MailService == nil {
log.Warn("Notify Mail Service: Mail Service is not enabled")
return
}
Service.EnableNotifyMail = true
log.Info("Notify Mail Service Enabled")
}
func newWebhookService() {
sec := Cfg.Section("webhook")
Webhook.QueueLength = sec.Key("QUEUE_LENGTH").MustInt(1000)
Webhook.DeliverTimeout = sec.Key("DELIVER_TIMEOUT").MustInt(5)
Webhook.SkipTLSVerify = sec.Key("SKIP_TLS_VERIFY").MustBool()
Webhook.Types = []string{"gitea", "gogs", "slack", "discord", "dingtalk"}
Webhook.PagingNum = sec.Key("PAGING_NUM").MustInt(10)
}
// NewServices initializes the services
func NewServices() {
newService()
newLogService()
NewXORMLogService(false)
newCacheService()
newSessionService()
newMailService()
newRegisterMailService()
newNotifyMailService()
newWebhookService()
}
| [
"\"GITEA_WORK_DIR\"",
"\"GITEA_CUSTOM\""
]
| []
| [
"GITEA_CUSTOM",
"GITEA_WORK_DIR"
]
| [] | ["GITEA_CUSTOM", "GITEA_WORK_DIR"] | go | 2 | 0 | |
examples/pwr_run/checkpointing/final/high_overhead/job73.py | """
#Trains a ResNet on the CIFAR10 dataset.
"""
from __future__ import print_function
import keras
from keras.layers import Dense, Conv2D, BatchNormalization, Activation
from keras.layers import AveragePooling2D, Input, Flatten
from keras.optimizers import Adam
from keras.callbacks import ModelCheckpoint, LearningRateScheduler
from keras.callbacks import ReduceLROnPlateau, TensorBoard
from keras.preprocessing.image import ImageDataGenerator
from keras.regularizers import l2
from keras import backend as K
from keras.models import Model
from keras.datasets import cifar10
from keras.applications.vgg16 import VGG16
from keras.applications.vgg19 import VGG19
from keras import models, layers, optimizers
from datetime import datetime
import tensorflow as tf
import numpy as np
import os
import pdb
import sys
import argparse
import time
import signal
import glob
import json
import send_signal
parser = argparse.ArgumentParser(description='Tensorflow Cifar10 Training')
parser.add_argument('--tc', metavar='TESTCASE', type=str, help='specific testcase name')
parser.add_argument('--resume', dest='resume', action='store_true', help='if True, resume training from a checkpoint')
parser.add_argument('--gpu_num', metavar='GPU_NUMBER', type=str, help='select which gpu to use')
parser.add_argument('--node', metavar='HOST_NODE', type=str, help='node of the host (scheduler)')
parser.set_defaults(resume=False)
args = parser.parse_args()
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"]=args.gpu_num
# Training parameters
batch_size = 128
args_lr = 0.002
args_model = 'vgg19'
epoch_begin_time = 0
job_name = sys.argv[0].split('.')[0]
save_files = '/scratch/li.baol/checkpoint_final4/' + job_name + '*'
total_epochs = 67
starting_epoch = 0
# first step is to update the PID
pid = os.getpid()
message = job_name + ' pid ' + str(pid) # 'job50 pid 3333'
send_signal.send(args.node, 10002, message)
if args.resume:
save_file = glob.glob(save_files)[0]
# epochs = int(save_file.split('/')[4].split('_')[1].split('.')[0])
starting_epoch = int(save_file.split('/')[4].split('.')[0].split('_')[-1])
data_augmentation = True
num_classes = 10
# Subtracting pixel mean improves accuracy
subtract_pixel_mean = True
n = 3
# Model name, depth and version
model_type = args.tc #'P100_resnet50_he_256_1'
# Load the CIFAR10 data.
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
# Normalize data.
x_train = x_train.astype('float32') / 255
x_test = x_test.astype('float32') / 255
# If subtract pixel mean is enabled
if subtract_pixel_mean:
x_train_mean = np.mean(x_train, axis=0)
x_train -= x_train_mean
x_test -= x_train_mean
print('x_train shape:', x_train.shape)
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')
print('y_train shape:', y_train.shape)
# Convert class vectors to binary class matrices.
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
if args.resume:
print('resume from checkpoint')
time.sleep(100)
message = job_name + ' b_end'
send_signal.send(args.node, 10002, message)
model = keras.models.load_model(save_file)
message = job_name + ' c_end'
send_signal.send(args.node, 10002, message)
else:
print('train from start')
model = models.Sequential()
if '16' in args_model:
base_model = VGG16(weights=None, include_top=False, input_shape=(32, 32, 3), pooling=None)
elif '19' in args_model:
base_model = VGG19(weights=None, include_top=False, input_shape=(32, 32, 3), pooling=None)
#base_model.summary()
#pdb.set_trace()
model.add(base_model)
model.add(layers.Flatten())
model.add(layers.BatchNormalization())
model.add(layers.Dense(128, activation='relu'))#, kernel_initializer='he_uniform'))
#model.add(layers.Dropout(0.2))
model.add(layers.BatchNormalization())
model.add(layers.Dense(64, activation='relu'))#, kernel_initializer='he_uniform'))
#model.add(layers.Dropout(0.2))
model.add(layers.BatchNormalization())
model.add(layers.Dense(10, activation='softmax'))#, kernel_initializer='he_uniform'))
model.compile(loss='categorical_crossentropy',
optimizer=Adam(lr=args_lr),
metrics=['accuracy'])
#model.summary()
print(model_type)
#pdb.set_trace()
current_epoch = 0
################### connects interrupt signal to the process #####################
def terminateProcess(signalNumber, frame):
# first record the wasted epoch time
global epoch_begin_time
if epoch_begin_time == 0:
epoch_waste_time = 0
else:
epoch_waste_time = int(time.time() - epoch_begin_time)
message = job_name + ' waste ' + str(epoch_waste_time) # 'job50 waste 100'
if epoch_waste_time > 0:
send_signal.send(args.node, 10002, message)
print('checkpointing the model triggered by kill -15 signal')
# delete whatever checkpoint that already exists
for f in glob.glob(save_files):
os.remove(f)
model.save('/scratch/li.baol/checkpoint_final4/' + job_name + '_' + str(current_epoch) + '.h5')
print ('(SIGTERM) terminating the process')
message = job_name + ' checkpoint'
send_signal.send(args.node, 10002, message)
sys.exit()
signal.signal(signal.SIGTERM, terminateProcess)
#################################################################################
logdir = '/scratch/li.baol/tsrbrd_log/job_runs/' + model_type + '/' + job_name
tensorboard_callback = TensorBoard(log_dir=logdir)#, update_freq='batch')
first_epoch_start = 0
class PrintEpoch(keras.callbacks.Callback):
def on_epoch_begin(self, epoch, logs=None):
global current_epoch, first_epoch_start
#remaining_epochs = epochs - epoch
current_epoch = epoch
print('current epoch ' + str(current_epoch))
global epoch_begin_time
epoch_begin_time = time.time()
if epoch == starting_epoch and args.resume:
first_epoch_start = time.time()
message = job_name + ' d_end'
send_signal.send(args.node, 10002, message)
elif epoch == starting_epoch:
first_epoch_start = time.time()
if epoch == starting_epoch:
# send signal to indicate checkpoint is qualified
message = job_name + ' ckpt_qual'
send_signal.send(args.node, 10002, message)
def on_epoch_end(self, epoch, logs=None):
if epoch == starting_epoch:
first_epoch_time = int(time.time() - first_epoch_start)
message = job_name + ' 1st_epoch ' + str(first_epoch_time)
send_signal.send(args.node, 10002, message)
progress = round((epoch+1) / round(total_epochs/2), 2)
message = job_name + ' completion ' + str(progress)
send_signal.send(args.node, 10002, message)
my_callback = PrintEpoch()
callbacks = [tensorboard_callback, my_callback]
#[checkpoint, lr_reducer, lr_scheduler, tensorboard_callback]
# Run training
model.fit(x_train, y_train,
batch_size=batch_size,
epochs=round(total_epochs/2),
validation_data=(x_test, y_test),
shuffle=True,
callbacks=callbacks,
initial_epoch=starting_epoch,
verbose=1
)
# Score trained model.
scores = model.evaluate(x_test, y_test, verbose=1)
print('Test loss:', scores[0])
print('Test accuracy:', scores[1])
# send signal to indicate job has finished
message = job_name + ' finish'
send_signal.send(args.node, 10002, message)
| []
| []
| [
"CUDA_DEVICE_ORDER",
"CUDA_VISIBLE_DEVICES"
]
| [] | ["CUDA_DEVICE_ORDER", "CUDA_VISIBLE_DEVICES"] | python | 2 | 0 | |
test/artifact_bucket_test.go | // +build e2e
/*
Copyright 2018 Knative Authors LLC
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package test
import (
"fmt"
"io/ioutil"
"os"
"testing"
"time"
knativetest "github.com/knative/pkg/test"
"github.com/tektoncd/pipeline/pkg/apis/pipeline/v1alpha1"
tb "github.com/tektoncd/pipeline/test/builder"
corev1 "k8s.io/api/core/v1"
metav1 "k8s.io/apimachinery/pkg/apis/meta/v1"
)
const (
helloworldResourceName = "helloworldgit"
addFileTaskName = "add-file-to-resource-task"
readFileTaskName = "read-new-file-task"
bucketTestPipelineName = "bucket-test-pipeline"
bucketTestPipelineRunName = "bucket-test-pipeline-run"
systemNamespace = "tekton-pipelines"
bucketSecretName = "bucket-secret"
bucketSecretKey = "bucket-secret-key"
)
// TestStorageBucketPipelineRun is an integration test that will verify a pipeline
// can use a bucket for temporary storage of artifacts shared between tasks
func TestStorageBucketPipelineRun(t *testing.T) {
configFilePath := os.Getenv("GCP_SERVICE_ACCOUNT_KEY_PATH")
if configFilePath == "" {
t.Skip("GCP_SERVICE_ACCOUNT_KEY_PATH variable is not set.")
}
c, namespace := setup(t)
t.Parallel()
knativetest.CleanupOnInterrupt(func() { tearDown(t, c, namespace) }, t.Logf)
defer tearDown(t, c, namespace)
bucketName := fmt.Sprintf("build-pipeline-test-%s-%d", namespace, time.Now().Unix())
t.Logf("Creating Secret %s", bucketSecretName)
if _, err := c.KubeClient.Kube.CoreV1().Secrets(namespace).Create(getBucketSecret(t, configFilePath, namespace)); err != nil {
t.Fatalf("Failed to create Secret `%s`: %s", bucketSecretName, err)
}
defer deleteBucketSecret(c, t, namespace)
t.Logf("Creating GCS bucket %s", bucketName)
createbuckettask := tb.Task("createbuckettask", namespace, tb.TaskSpec(
tb.TaskVolume("bucket-secret-volume", tb.VolumeSource(corev1.VolumeSource{
Secret: &corev1.SecretVolumeSource{
SecretName: bucketSecretName,
},
})),
tb.Step("step1", "google/cloud-sdk:alpine",
tb.Command("/bin/bash"),
tb.Args("-c", fmt.Sprintf("gcloud auth activate-service-account --key-file /var/secret/bucket-secret/bucket-secret-key && gsutil mb gs://%s", bucketName)),
tb.VolumeMount("bucket-secret-volume", fmt.Sprintf("/var/secret/%s", bucketSecretName)),
tb.EnvVar("CREDENTIALS", fmt.Sprintf("/var/secret/%s/%s", bucketSecretName, bucketSecretKey)),
),
),
)
t.Logf("Creating Task %s", "createbuckettask")
if _, err := c.TaskClient.Create(createbuckettask); err != nil {
t.Fatalf("Failed to create Task `%s`: %s", "createbuckettask", err)
}
createbuckettaskrun := tb.TaskRun("createbuckettaskrun", namespace,
tb.TaskRunSpec(tb.TaskRunTaskRef("createbuckettask")))
t.Logf("Creating TaskRun %s", "createbuckettaskrun")
if _, err := c.TaskRunClient.Create(createbuckettaskrun); err != nil {
t.Fatalf("Failed to create TaskRun `%s`: %s", "createbuckettaskrun", err)
}
if err := WaitForTaskRunState(c, "createbuckettaskrun", TaskRunSucceed("createbuckettaskrun"), "TaskRunSuccess"); err != nil {
t.Errorf("Error waiting for TaskRun %s to finish: %s", "createbuckettaskrun", err)
}
defer runTaskToDeleteBucket(c, t, namespace, bucketName, bucketSecretName, bucketSecretKey)
originalConfigMap, err := c.KubeClient.Kube.CoreV1().ConfigMaps(systemNamespace).Get(v1alpha1.BucketConfigName, metav1.GetOptions{})
if err != nil {
t.Fatalf("Failed to get ConfigMap `%s`: %s", v1alpha1.BucketConfigName, err)
}
originalConfigMapData := originalConfigMap.Data
t.Logf("Creating ConfigMap %s", v1alpha1.BucketConfigName)
configMapData := map[string]string{
v1alpha1.BucketLocationKey: fmt.Sprintf("gs://%s", bucketName),
v1alpha1.BucketServiceAccountSecretName: bucketSecretName,
v1alpha1.BucketServiceAccountSecretKey: bucketSecretKey,
}
updateConfigMap(c.KubeClient, systemNamespace, v1alpha1.BucketConfigName, configMapData)
defer resetConfigMap(c, systemNamespace, v1alpha1.BucketConfigName, originalConfigMapData)
t.Logf("Creating Git PipelineResource %s", helloworldResourceName)
helloworldResource := tb.PipelineResource(helloworldResourceName, namespace, tb.PipelineResourceSpec(
v1alpha1.PipelineResourceTypeGit,
tb.PipelineResourceSpecParam("Url", "https://github.com/pivotal-nader-ziada/gohelloworld"),
tb.PipelineResourceSpecParam("Revision", "master"),
),
)
if _, err := c.PipelineResourceClient.Create(helloworldResource); err != nil {
t.Fatalf("Failed to create Pipeline Resource `%s`: %s", helloworldResourceName, err)
}
t.Logf("Creating Task %s", addFileTaskName)
addFileTask := tb.Task(addFileTaskName, namespace, tb.TaskSpec(
tb.TaskInputs(tb.InputsResource(helloworldResourceName, v1alpha1.PipelineResourceTypeGit)),
tb.TaskOutputs(tb.OutputsResource(helloworldResourceName, v1alpha1.PipelineResourceTypeGit)),
tb.Step("addfile", "ubuntu", tb.Command("/bin/bash"),
tb.Args("-c", "echo stuff > /workspace/helloworldgit/newfile"),
),
))
if _, err := c.TaskClient.Create(addFileTask); err != nil {
t.Fatalf("Failed to create Task `%s`: %s", addFileTaskName, err)
}
t.Logf("Creating Task %s", readFileTaskName)
readFileTask := tb.Task(readFileTaskName, namespace, tb.TaskSpec(
tb.TaskInputs(tb.InputsResource(helloworldResourceName, v1alpha1.PipelineResourceTypeGit)),
tb.Step("readfile", "ubuntu", tb.Command("/bin/bash"),
tb.Args("-c", "cat /workspace/helloworldgit/newfile"),
),
))
if _, err := c.TaskClient.Create(readFileTask); err != nil {
t.Fatalf("Failed to create Task `%s`: %s", readFileTaskName, err)
}
t.Logf("Creating Pipeline %s", bucketTestPipelineName)
bucketTestPipeline := tb.Pipeline(bucketTestPipelineName, namespace, tb.PipelineSpec(
tb.PipelineDeclaredResource("source-repo", "git"),
tb.PipelineTask("addfile", addFileTaskName,
tb.PipelineTaskInputResource("helloworldgit", "source-repo"),
tb.PipelineTaskOutputResource("helloworldgit", "source-repo"),
),
tb.PipelineTask("readfile", readFileTaskName,
tb.PipelineTaskInputResource("helloworldgit", "source-repo", tb.From("addfile")),
),
))
if _, err := c.PipelineClient.Create(bucketTestPipeline); err != nil {
t.Fatalf("Failed to create Pipeline `%s`: %s", bucketTestPipelineName, err)
}
t.Logf("Creating PipelineRun %s", bucketTestPipelineRunName)
bucketTestPipelineRun := tb.PipelineRun(bucketTestPipelineRunName, namespace, tb.PipelineRunSpec(
bucketTestPipelineName,
tb.PipelineRunResourceBinding("source-repo", tb.PipelineResourceBindingRef(helloworldResourceName)),
))
if _, err := c.PipelineRunClient.Create(bucketTestPipelineRun); err != nil {
t.Fatalf("Failed to create PipelineRun `%s`: %s", bucketTestPipelineRunName, err)
}
// Verify status of PipelineRun (wait for it)
if err := WaitForPipelineRunState(c, bucketTestPipelineRunName, timeout, PipelineRunSucceed(bucketTestPipelineRunName), "PipelineRunCompleted"); err != nil {
t.Errorf("Error waiting for PipelineRun %s to finish: %s", bucketTestPipelineRunName, err)
taskruns, err := c.TaskRunClient.List(metav1.ListOptions{})
if err != nil {
t.Errorf("Error getting TaskRun list for PipelineRun %s %s", bucketTestPipelineRunName, err)
}
for _, tr := range taskruns.Items {
if tr.Status.PodName != "" {
CollectBuildLogs(c, tr.Status.PodName, namespace, t.Logf)
}
}
t.Fatalf("PipelineRun execution failed")
}
}
// updateConfigMap updates the config map for specified @name with values. We can't use the one from knativetest because
// it assumes that Data is already a non-nil map, and by default, it isn't!
func updateConfigMap(client *knativetest.KubeClient, name string, configName string, values map[string]string) error {
configMap, err := client.GetConfigMap(name).Get(configName, metav1.GetOptions{})
if err != nil {
return err
}
if configMap.Data == nil {
configMap.Data = make(map[string]string)
}
for key, value := range values {
configMap.Data[key] = value
}
_, err = client.GetConfigMap(name).Update(configMap)
return err
}
func getBucketSecret(t *testing.T, configFilePath, namespace string) *corev1.Secret {
t.Helper()
f, err := ioutil.ReadFile(configFilePath)
if err != nil {
t.Fatalf("Failed to read json key file %s at path %s", err, configFilePath)
}
return &corev1.Secret{
ObjectMeta: metav1.ObjectMeta{
Namespace: namespace,
Name: bucketSecretName,
},
StringData: map[string]string{
bucketSecretKey: string(f),
},
}
}
func deleteBucketSecret(c *clients, t *testing.T, namespace string) {
if err := c.KubeClient.Kube.CoreV1().Secrets(namespace).Delete(bucketSecretName, &metav1.DeleteOptions{}); err != nil {
t.Fatalf("Failed to delete Secret `%s`: %s", bucketSecretName, err)
}
}
func resetConfigMap(c *clients, namespace, configName string, values map[string]string) error {
return updateConfigMap(c.KubeClient, namespace, configName, values)
}
func runTaskToDeleteBucket(c *clients, t *testing.T, namespace, bucketName, bucketSecretName, bucketSecretKey string) {
deletelbuckettask := tb.Task("deletelbuckettask", namespace, tb.TaskSpec(
tb.TaskVolume("bucket-secret-volume", tb.VolumeSource(corev1.VolumeSource{
Secret: &corev1.SecretVolumeSource{
SecretName: bucketSecretName,
},
})),
tb.Step("step1", "google/cloud-sdk:alpine",
tb.Command("/bin/bash"),
tb.Args("-c", fmt.Sprintf("gcloud auth activate-service-account --key-file /var/secret/bucket-secret/bucket-secret-key && gsutil rm -r gs://%s", bucketName)),
tb.VolumeMount("bucket-secret-volume", fmt.Sprintf("/var/secret/%s", bucketSecretName)),
tb.EnvVar("CREDENTIALS", fmt.Sprintf("/var/secret/%s/%s", bucketSecretName, bucketSecretKey)),
),
),
)
t.Logf("Creating Task %s", "deletelbuckettask")
if _, err := c.TaskClient.Create(deletelbuckettask); err != nil {
t.Fatalf("Failed to create Task `%s`: %s", "deletelbuckettask", err)
}
deletelbuckettaskrun := tb.TaskRun("deletelbuckettaskrun", namespace,
tb.TaskRunSpec(tb.TaskRunTaskRef("deletelbuckettask")))
t.Logf("Creating TaskRun %s", "deletelbuckettaskrun")
if _, err := c.TaskRunClient.Create(deletelbuckettaskrun); err != nil {
t.Fatalf("Failed to create TaskRun `%s`: %s", "deletelbuckettaskrun", err)
}
if err := WaitForTaskRunState(c, "deletelbuckettaskrun", TaskRunSucceed("deletelbuckettaskrun"), "TaskRunSuccess"); err != nil {
t.Errorf("Error waiting for TaskRun %s to finish: %s", "deletelbuckettaskrun", err)
}
}
| [
"\"GCP_SERVICE_ACCOUNT_KEY_PATH\""
]
| []
| [
"GCP_SERVICE_ACCOUNT_KEY_PATH"
]
| [] | ["GCP_SERVICE_ACCOUNT_KEY_PATH"] | go | 1 | 0 | |
docs/conf.py | # -*- coding: utf-8 -*-
#
# discord.py documentation build configuration file, created by
# sphinx-quickstart on Fri Aug 21 05:43:30 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import re
on_rtd = os.getenv('READTHEDOCS') == 'True'
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.extlinks',
]
if on_rtd:
extensions.append('sphinxcontrib.napoleon')
else:
extensions.append('sphinx.ext.napoleon')
autodoc_member_order = 'bysource'
extlinks = {
'issue': ('https://github.com/Rapptz/discord.py/issues/%s', 'issue '),
}
rst_prolog = """
.. |coro| replace:: This function is a |corourl|_.
.. |corourl| replace:: *coroutine*
.. _corourl: https://docs.python.org/3/library/asyncio-task.html#coroutine
"""
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'discord.py'
copyright = u'2015, Rapptz'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = ''
with open('../discord/__init__.py') as f:
version = re.search(r'^__version__\s*=\s*[\'"]([^\'"]*)[\'"]', f.read(), re.MULTILINE).group(1)
# The full version, including alpha/beta/rc tags.
release = version
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'sphinx_rtd_theme'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {
# }
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'hu', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'ru', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'discord.pydoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'discord.py.tex', u'discord.py Documentation',
u'Rapptz', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'discord.py', u'discord.py Documentation',
[u'Rapptz'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'discord.py', u'discord.py Documentation',
u'Rapptz', 'discord.py', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| []
| []
| [
"READTHEDOCS"
]
| [] | ["READTHEDOCS"] | python | 1 | 0 | |
mysql_test_slave_status.py | #! /usr/bin/env python
"""Slave Status/Monitor
Usage:
mysql_slave_status.py [options]
Options:
-h, --help Show this screen
-v, --verbose Verbose output
-V, --verbose-pretty Pretty print output
--version Show version
Exit Codes:
0 all is okay
1 sql thread is dead
2 slave isn't running
3 behind
-1 error
"""
from __future__ import print_function
import json
import sys
import os
from datetime import datetime
import MySQLdb
from docopt import docopt
__version__ = '0.1.1'
'''
0 master
1 slave is broken
2 slave is running
select COUNT(1) SlaveThreads from information_schema.processlist where user = 'system user'
GRANT SELECT ON `performance_schema`.`threads` TO 'slavemon'@'localhost';
grant REPLICATION CLIENT on *.* to 'slavemon'@'localhost';
'''
def master_status(values):
'''
File
Position
Binlog_Do_DB
Executed_Gtid_Set
'''
file, postion = (None,)*2
try:
file = values['File']
position = values['Postion']
except KeyError, e:
# this may mean not a master or no rows
print(e)
def slave_status(values):
(slave_running, seconds_behind, slave_running,slave_io_running,
last_error_no, last_error, last_error_time, last_error_time_diff) = (None,)*8
date = datetime.strftime(datetime.now(), '%Y/%m/%d %H:%M:%S')
try:
if values['Slave_IO_Running'] == 'Yes' and values['Slave_SQL_Running'] == 'Yes' :
slave_running = 1
slave_io_running = 1
seconds_behind = int(values['Seconds_Behind_Master'])
else:
slave_running = 0
slave_io_running = 1 if values['Slave_IO_Running'] == 'Yes' else 0
last_error_no = values['Last_Errno']
if last_error_no > 0L:
last_error = values['Last_Error']
last_error_time_str = values['Last_SQL_Error_Timestamp']
last_error_time_diff = (datetime.now() -
datetime.strptime(last_error_time_str,
'%y%m%d %H:%M:%S')).seconds
last_error_time = datetime.strftime(
datetime.strptime(last_error_time_str,
'%y%m%d %H:%M:%S'),
'%Y/%m/%d %H:%M:%S')
except KeyError, e:
print(e)
return ((slave_running, slave_io_running, seconds_behind),
(last_error_no,last_error,last_error_time,
last_error_time_diff), date)
def pretty_print_status(status):
if status[0][0:2] == (1,1):
print('Slave is running and {0} seconds behind'.format(status[0][2]))
elif status[0][0:2] == (0,0):
print('Slave is not running')
elif status[0][0:2] == (1,0) or status[1][0]>0L:
print('Error No: {0}\n'.format(status[1][0]))
print('{0}\n'.format(status[1][1]))
print('Error occured at {0} and has been down for {1} seconds\n'.format(*status[1][2:]))
def calc_exit_status(status):
''' calculate exit status for shell exit
0 all is okay
1 sql thread is dead
2 slave isn't running
3 behind
-1 error
'''
assert isinstance(status, (tuple, long)), \
'Bad status passed to calc_exit_status'
if isinstance(status, tuple):
if status[2] > cnf["max_seconds_behind"]:
exit_status = 3
elif status[1]==1 and status[0]==0:
exit_status = 1
elif status[1]==0 and status[0]==1:
exit_status = 2
elif status[1]==0 and status[0]==0:
exit_status = 2
else:
exit_status = 0
else:
if status == 2:
exit_status = 0
elif status == 1:
exit_status = 1
else:
exit_satus = 2
return exit_status
def main(**args):
global cnf
user = os.environ['LOGNAME']
if user == 'root':
passwd_file = os.path.join('/', user, '.mysql_passwd')
else:
passwd_file = os.path.join('/home', user, '.mysql_passwd')
with open(passwd_file) as data_file:
cnf = json.load(data_file)
#host = cnf['host']
try:
db = MySQLdb.connect(host=cnf['host'],
user='slavemon',
passwd=cnf["passwords"]['slavemon'],
db='',
port=cnf['port']
)
except KeyError, e:
print("The key {0} doesn't exist in config file".format(str(e)))
print("Exiting ...")
sys.exit(-1)
except Exception, e:
print(e)
sys.exit(-1)
c = db.cursor()
dc = db.cursor(cursorclass=MySQLdb.cursors.DictCursor)
try:
#n = c.execute("select COUNT(1) SlaveThreads from information_schema.processlist where user = 'system user'")
n = c.execute("select count(*) from performance_schema.threads where name like 'thread/sql/slave%'")
count_values = c.fetchone()
n1 = dc.execute("show slave status")
slave_values = dc.fetchone()
n = dc.execute("show master status")
master_values = dc.fetchone()
except MySQLdb.Error, e:
print('MySQL ERROR: {0}: {1}'.format(*e))
if e[0] == 1142:
print('You may need to grant select on performance_schema.threads to the user')
print(" grant select on performance_schema.threads to {0}@'127.0.0.1'".format('slavemon'))
else:
status = slave_status(slave_values)
exit_status = calc_exit_status(status[0])
if args['--verbose']:
print(status)
elif args['--verbose-pretty']:
pretty_print_status(status)
dc.close()
c.close()
db.close()
return exit_status, status[0][2]
if __name__ == '__main__':
args = docopt(__doc__, version=__version__)
exit_status, sec_behind = main(**args)
if not(args['--verbose'] or args['--verbose-pretty']):
print('Exiting Status: {0} Seconds Behind: {1}'.format(exit_status, sec_behind))
sys.exit(exit_status)
#print(arguments)
| []
| []
| [
"LOGNAME"
]
| [] | ["LOGNAME"] | python | 1 | 0 | |
climatecontrol/core.py | """Climate parser."""
import logging
from contextlib import contextmanager
from copy import deepcopy
from itertools import chain
from pathlib import Path
from pprint import pformat
from typing import (
Any,
Callable,
Iterable,
Iterator,
List,
Mapping,
MutableMapping,
MutableSequence,
Optional,
Sequence,
Tuple,
TypeVar,
Union,
)
import wrapt
from climatecontrol.constants import REMOVED
from climatecontrol.env_parser import EnvParser
from climatecontrol.file_loaders import FileLoader, iter_load
from climatecontrol.fragment import Fragment, FragmentPath
from climatecontrol.logtools import DEFAULT_LOG_SETTINGS, logging_config
from climatecontrol.processors import (
replace_from_content_vars,
replace_from_env_vars,
replace_from_file_vars,
)
from climatecontrol.utils import merge_nested
try:
import click
except ImportError:
click = None # type: ignore
logger = logging.getLogger(__name__)
T = TypeVar("T", bound=wrapt.ObjectProxy)
class ObjectProxy(wrapt.ObjectProxy):
"""Simple object proxy with added representation of wrapped object."""
def __repr__(self) -> str:
return repr(self.__wrapped__)
class SettingsItem(ObjectProxy):
"""Object proxy for representing a nested settings item.
An object proxy acts like the underlying but adds functionality on top.
In this case the SettingsItem object ensures immutability of the object as
changing a settings object can have unexpected behaviour as the underlying
:class:`Climate` data is not changed and updates the same way.
The settings item object ensures that any "nested" objects are also
represented as :class:`SettingsItem` object.
Examples:
>>> climate = Climate()
>>> s = SettingsItem({'a': 5, 'b': {'c': 6}}, climate, FragmentPath([]))
>>> s
{'a': 5, 'b': {'c': 6}}
>>> s.a
5
>>> s.b
{'c': 6}
>>> type(s.b)
<class 'climatecontrol.core.SettingsItem'>
"""
def __init__(self, wrapped, climate: "Climate", path: FragmentPath) -> None:
super().__init__(wrapped)
self._self_climate = climate
self._self_path = path
def __repr__(self) -> str:
self._self_climate.ensure_initialized()
return super().__repr__()
def __getattr__(self, key):
self._self_climate.ensure_initialized()
try:
result = getattr(self.__wrapped__, key)
except AttributeError as e:
try:
result = self.__wrapped__[key]
except (TypeError, KeyError):
raise e
if self._self_is_mutable(result):
return type(self)(
result,
self._self_climate,
type(self._self_path)(list(self._self_path) + [key]),
)
return result
def __deepcopy__(self: T, memo: dict) -> T:
return type(self)(
deepcopy(self.__wrapped__, memo), self._self_climate, self._self_path
)
def __setattr__(self, key: str, value) -> None:
is_proxy_key = hasattr(key, "startswith") and key.startswith("_self_")
if not is_proxy_key:
raise TypeError(f"{type(self)} does not support attribute assignment")
super().__setattr__(key, value)
def __delattr__(self, key: str) -> None:
is_proxy_key = hasattr(key, "startswith") and key.startswith("_self_")
if not is_proxy_key:
raise TypeError(f"{type(self)} does not support attribute deletion")
super().__delattr__(key)
def __getitem__(self, key):
self._self_climate.ensure_initialized()
result = self.__wrapped__.__getitem__(key)
if self._self_is_mutable(result):
return type(self)(
result,
self._self_climate,
type(self._self_path)(list(self._self_path) + [key]),
)
return result
def __setitem__(self, key, value) -> None:
raise TypeError(f"{type(self)} does not support item assignment")
def __delitem__(self, key) -> None:
raise TypeError(f"{type(self)} does not support item deletion")
@classmethod
def _self_is_mutable(cls, value: Any) -> bool:
return isinstance(value, (MutableMapping, MutableSequence))
class Climate:
"""A Climate instance allows settings to be loaded from a settings file or environment variables.
Attributes:
settings_files: If set, a sequence of paths to settings files (json, yaml or toml
format) from which all settings are loaded. The files are
loaded one after another with variables set in later files
overwriting values set in previous files.
env_parser: `EnvParser` object handling the parsing of environment variables
parser: If given, defines a custom function to further process the
result of the settings. The function should take a single
nested dictionary argument (the settings map) as an argument
and output a nested dictionary.
Args:
settings_files: See attribute
parser: See attribute
**env_parser_kwargs: Arguments passed to :class:`EnvParser` constructor.
Example:
>>> import os
>>> os.environ['MY_APP_VALUE0'] = 'test0'
>>> os.environ['MY_APP_SECTION1__SUB1'] = 'test1'
>>> os.environ['MY_APP_SECTION2__SUB2'] = 'test2'
>>> os.environ['MY_APP_SECTION2__SUB3'] = 'test3'
>>> climate = Climate(prefix='MY_APP')
>>> dict(climate.settings)
{'value0': 'test0', 'section1': {'sub1': 'test1'}, 'section2': {'sub2': 'test2', 'sub3': 'test3'}}
See Also:
EnvParser
"""
settings_files: List[Union[str, Path]]
_combined_fragment: Fragment
_updates: List
_fragments: List[Fragment]
_data: Any
_initialized: bool
_processors: Tuple[Callable[[Fragment], Iterator[Fragment]], ...] = (
replace_from_file_vars,
replace_from_env_vars,
replace_from_content_vars,
)
def __init__(
self,
settings_files: Union[str, Path, Sequence[Union[str, Path]]] = (),
parser: Optional[Callable[[Mapping], Mapping]] = None,
**env_parser_kwargs: Any,
) -> None:
"""Initialize settings object."""
self.env_parser = EnvParser(**(env_parser_kwargs or {}))
self.parser = parser
if isinstance(settings_files, (str, Path)):
self.settings_files = [settings_files]
else:
self.settings_files = list(settings_files)
self._updates = []
self._fragments = []
self._initialized = False
# We use an object proxy here so that the referene to the object is always the same.
# Note that instead of assigning _data directly, we reinitialize it using self._set_data(new_obj).
self._data = ObjectProxy(None)
self._combined_fragment = Fragment(None)
def __repr__(self) -> str:
return self.__class__.__qualname__ + "[\n{}\n]".format(pformat(self._data))
@property
def parser(self) -> Optional[Callable[[Mapping], Mapping]]:
"""Return settings parser function."""
return self._parse
@parser.setter
def parser(self, value: Optional[Callable[[Mapping], Mapping]]) -> None:
"""Set the settings parser function."""
self._parse = value
@property
def settings(self) -> Any:
"""Return a settings item proxy for easy access to settings hierarchy."""
self.ensure_initialized()
return SettingsItem(self._data, self, FragmentPath())
@property
def inferred_settings_files(self) -> List[Path]:
"""Infer settings files from current directory and parent directories.
1. Search upward until a repository root is found (symbolized by a get repository)
2. Along the directories starting with the project root up until the current directory search for the following files:
* Files matching the pattern: `*<prefix>*settings*<loadable filetype>`
* Files matching the pattern above but within subdirectories named `*<prefix>*settings*`
* Files matching the pattern above in any recursive subdirectories of the subdirectory mentioned above
Note that the prefix is lower cased even if it is given as upper or mixed case.
Given a filestructure:
::
|-- myuser/
|-- unused_climatecontrol_settings.yaml
|-- myrepo/
|-- .git/
|-- base-climatecontrol-settings.json
|-- climatecontrol_settings/
|-- 01.toml
|-- 02.yml
|-- 0/
|-- settings.yml
|-- 1/
|-- settings.json
|-- myproject/
|-- climatecontrol.general.settings.yaml
|-- mysubproject/
|-- .climatecontrol.settings.yaml
and assuming the current working directory is `myuser/myproject/mysubproject`, the inferred settings files would be:
::
myuser/myrepo/base-climatecontrol-settings.json
myuser/myrepo/climatecontrol_settings/01.toml
myuser/myrepo/climatecontrol_settings/02.yml
myuser/myrepo/climatecontrol_settings/0/settings.yml
myuser/myrepo/climatecontrol_settings/1/settings.json
myuser/myproject/climatecontrol.general.settings.yaml
myuser/mysubproject/.climatecontrol.settings.yaml
"""
prefix = self.env_parser.prefix.strip(self.env_parser.split_char).lower()
base_pattern = f"*{prefix}*settings"
extensions = [
ext
for loader in FileLoader.registered_loaders
for ext in loader.valid_file_extensions
]
def find_settings_files(path: Path, glob_pattern: str, recursive=False):
glob = path.rglob if recursive else path.glob
filepaths = []
for ext in extensions:
for filepath in glob(f"{glob_pattern}{ext}"):
if filepath.is_file():
filepaths.append(filepath)
return sorted(filepaths)
# Find all directories between current directory and project root
search_directories: List[Path] = []
project_root_candidates = [
".git",
".hg",
"setup.py",
"requirements.txt",
"environment.yml",
"environment.yaml",
"pyproject.toml",
]
current_path: Path = Path(".")
while True:
search_directories.append(current_path)
new_current_path = current_path / ".."
if (
any(
(current_path / candidate).exists()
for candidate in project_root_candidates
)
or not new_current_path.is_dir()
or new_current_path.resolve() == current_path.resolve()
):
break
current_path = new_current_path
# Iterate over all directories and find files
filepaths: List[Path] = []
for directory in reversed(search_directories):
filepaths.extend(find_settings_files(directory, base_pattern))
for sub_dir in directory.glob(base_pattern):
if not sub_dir.is_dir():
continue
# Use all files with valid file extensions if already in settings directory.
filepaths.extend(find_settings_files(sub_dir, "*", recursive=True))
return filepaths
@property
def update_log(self) -> str:
"""Log of all each loaded settings variable."""
def iter_fragment_lines(fragment: Fragment) -> Iterator[str]:
for leaf in fragment.iter_leaves():
action = "removed" if leaf.value == REMOVED else "loaded"
yield action + " " + ".".join(
str(p) for p in leaf.path
) + " from " + str(leaf.source)
lines = chain.from_iterable(
iter_fragment_lines(fragment) for fragment in self._fragments
)
result = "\n".join(lines)
return result
def clear(self) -> None:
"""Remove all data and reset to initial state."""
self._updates.clear()
self._fragments.clear()
self._initialized = False # next access should reload all fragments
def ensure_initialized(self):
"""Ensure that object is initialized and reload if it is not."""
if not self._initialized:
self.reload()
def reload(self) -> None:
"""Reload data from all sources.
Updates that were applied manually (through code) are not discarded. Use
:method:`clear` for that.
"""
parsed, combined, fragments = self._stateless_reload(self._updates)
self._set_state(parsed, combined, fragments, self._updates)
def update(
self, update_data: Mapping = None, path: Union[str, int, Sequence] = None
) -> None:
"""Update settings using a patch dictionary.
Args:
update_data: Updates for settings. This is equivilant to `dict.update` except
that the update is recursive for nested dictionaries.
Example:
>>> import os
>>> os.environ['CLIMATECONTROL_VALUE'] = 'test'
>>> climate = Climate()
>>> dict(climate.settings)
{'value': 'test'}
>>>
>>> # now update the settings
>>> climate.update({'new_value': 'new'})
>>> climate.settings.value
'test'
>>> climate.settings.new_value
'new'
Alternatively a path can be specified that will be expanded:
>>> climate.update('test', 'level_1.level_2.0.inlist')
>>> climate.settings.level_1.level_2[0].inlist
'test'
"""
if path is not None:
update_data = FragmentPath.from_spec(path).expand(update_data)
if not self._initialized:
new_updates = (
self._updates + [update_data] if update_data else self._updates
)
parsed, combined, fragments = self._stateless_reload(new_updates)
self._set_state(parsed, combined, fragments, new_updates)
return
if not update_data:
return
# we can start directly from the previously consolidated fragment
base_fragments: List[Fragment] = [self._combined_fragment]
new_updates = [update_data]
update_fragments = list(self._iter_update_fragments(new_updates))
combined = self._combine_fragments(chain(base_fragments, update_fragments))
expanded = combined.expand_value_with_path()
clean_removed_items(expanded)
parsed = self.parse(expanded)
fragments = self._fragments + update_fragments
updates = self._updates + new_updates
self._set_state(parsed, combined, fragments, updates)
def parse(self, data: Any) -> Any:
"""Parse data into settings.
Args:
data: Raw mapping to be parsed
Returns:
Parsed data that has run through all preparsers and the `Climate`.
"""
if self._parse:
return self._parse(data)
else:
return data
def setup_logging(self, logging_section: str = "logging") -> None:
"""Initialize logging.
Uses the ``'logging'`` section from the global ``SETTINGS`` object if
available. Otherwise uses sane defaults provided by the
``climatecontrol`` package.
"""
logging_settings = DEFAULT_LOG_SETTINGS
try:
logging_settings_update = getattr(self.settings, logging_section)
except (KeyError, TypeError, AttributeError):
logging_settings_update = None
if logging_settings_update:
logging_settings = merge_nested(logging_settings, logging_settings_update)
logging_config.dictConfig(logging_settings)
def click_settings_file_option(self, **kw) -> Callable[..., Any]:
"""See :func:`cli_utils.click_settings_file_option`."""
from climatecontrol import cli_utils
return cli_utils.click_settings_file_option(self, **kw)
@contextmanager
def temporary_changes(self):
"""Open a context where any changes to the settings are rolled back on context exit.
This context manager can be used for testing or to temporarily change
settings.
Example:
>>> from climatecontrol.core import Climate
>>> climate = Climate()
>>> climate.update({'a': 1})
>>> with climate.temporary_changes():
... climate.update({'a': 2})
... assert climate.settings['a'] == 2
>>> assert climate.settings['a'] == 1
"""
archived_data = deepcopy(self._data.__wrapped__)
archived_settings = {
k: deepcopy(getattr(self, k))
for k in [
"settings_files",
"_updates",
"_fragments",
"_combined_fragment",
]
}
yield self
# reinstate all saved data after context block is finished
self._set_data(archived_data)
for k, v in archived_settings.items():
setattr(self, k, v)
def _set_state(
self, parsed: Any, combined: Fragment, fragments: List[Fragment], updates: list
):
"""Set all relevant state fields related to loading of settings on object."""
self._fragments = fragments
self._combined_fragment = combined
self._set_data(parsed)
self._updates = updates
self._initialized = True
def _set_data(self, value: Any) -> None:
self._data.__init__(value)
def _stateless_reload(self, updates: list) -> Tuple[List[Fragment], Fragment, Any]:
"""Calculate result of reload but do not use any object state.
Obtain updates from input instead of from :attrib:`_updates` and return
the results as output instead of storing them in state.
Args:
updates: List of updates.
Returns:
Tuple conisting of a list of: The parsed result object, the combined
final fragment, the list of fragments that were used to reach this
result.
"""
base_fragments = self._iter_base_fragments()
update_fragments = self._iter_update_fragments(updates)
fragments = list(chain(base_fragments, update_fragments))
combined = self._combine_fragments(fragments)
expanded = combined.expand_value_with_path()
clean_removed_items(expanded)
parsed = self.parse(expanded)
return parsed, combined, fragments
def _process_fragment(self, fragment: Fragment) -> Iterator[Fragment]:
"""Preprocess a settings fragment and return the new version."""
for process in self._processors:
for new_fragment in process(fragment):
yield new_fragment
# recursively process new fragments as well
yield from self._process_fragment(new_fragment)
def _iter_process_fragments(
self, fragments: Iterable[Fragment]
) -> Iterator[Fragment]:
for fragment in fragments:
yield fragment
yield from self._process_fragment(fragment)
def _iter_update_fragments(self, updates: Sequence[Mapping] = ()):
fragments = (
Fragment(value=update_data, source="external")
for update_data in updates
if update_data
)
yield from self._iter_process_fragments(fragments)
def _iter_base_fragments(self) -> Iterator[Fragment]:
"""Iterate through all relevant fragments."""
fragments = chain(self._iter_load_files(), self.env_parser.iter_load())
yield from self._iter_process_fragments(fragments)
def _combine_fragments(self, fragments: Iterable[Fragment]) -> Fragment:
"""Combine the fragments into one final fragment."""
combined_fragment: Optional[Fragment] = None
for fragment in fragments:
if combined_fragment is None:
combined_fragment = fragment
else:
combined_fragment = combined_fragment.merge(fragment)
if not combined_fragment:
combined_fragment = Fragment({})
return combined_fragment
def _iter_load_files(self) -> Iterator[Fragment]:
for inferred_entry in self.inferred_settings_files:
yield from iter_load(inferred_entry)
for entry in self.settings_files:
yield from iter_load(entry)
def clean_removed_items(obj):
"""Remove all keys that contain a removed key indicated by a :data:``REMOVED`` object."""
items: Iterable[Tuple[Any, Any]]
if isinstance(obj, MutableMapping):
items = obj.items()
elif isinstance(obj, MutableSequence):
items = enumerate(obj)
else:
return
keys_to_remove = []
for key, value in items:
if value == REMOVED:
keys_to_remove.append(key)
else:
clean_removed_items(value)
for key in keys_to_remove:
del obj[key]
| []
| []
| [
"CLIMATECONTROL_VALUE",
"MY_APP_VALUE0",
"MY_APP_SECTION2__SUB2",
"MY_APP_SECTION1__SUB1",
"MY_APP_SECTION2__SUB3"
]
| [] | ["CLIMATECONTROL_VALUE", "MY_APP_VALUE0", "MY_APP_SECTION2__SUB2", "MY_APP_SECTION1__SUB1", "MY_APP_SECTION2__SUB3"] | python | 5 | 0 | |
engine/cmd_start.go | package main
import (
"context"
"encoding/json"
"fmt"
"os"
"os/signal"
"sort"
"strings"
"syscall"
"time"
"github.com/ovh/cds/engine/api"
"github.com/ovh/cds/engine/api/observability"
"github.com/ovh/cds/engine/api/services"
"github.com/ovh/cds/engine/cdn"
"github.com/ovh/cds/engine/elasticsearch"
"github.com/ovh/cds/engine/hatchery/kubernetes"
"github.com/ovh/cds/engine/hatchery/local"
"github.com/ovh/cds/engine/hatchery/marathon"
"github.com/ovh/cds/engine/hatchery/openstack"
"github.com/ovh/cds/engine/hatchery/swarm"
"github.com/ovh/cds/engine/hatchery/vsphere"
"github.com/ovh/cds/engine/hooks"
"github.com/ovh/cds/engine/migrateservice"
"github.com/ovh/cds/engine/repositories"
"github.com/ovh/cds/engine/service"
"github.com/ovh/cds/engine/ui"
"github.com/ovh/cds/engine/vcs"
"github.com/ovh/cds/sdk"
"github.com/ovh/cds/sdk/log"
"github.com/spf13/cobra"
)
func init() {
startCmd.Flags().StringVar(&flagStartConfigFile, "config", "", "config file")
startCmd.Flags().StringVar(&flagStartRemoteConfig, "remote-config", "", "(optional) consul configuration store")
startCmd.Flags().StringVar(&flagStartRemoteConfigKey, "remote-config-key", "cds/config.api.toml", "(optional) consul configuration store key")
startCmd.Flags().StringVar(&flagStartVaultAddr, "vault-addr", "", "(optional) Vault address to fetch secrets from vault (example: https://vault.mydomain.net:8200)")
startCmd.Flags().StringVar(&flagStartVaultToken, "vault-token", "", "(optional) Vault token to fetch secrets from vault")
}
var (
flagStartConfigFile string
flagStartRemoteConfig string
flagStartRemoteConfigKey string
flagStartVaultAddr string
flagStartVaultToken string
)
var startCmd = &cobra.Command{
Use: "start",
Short: "Start CDS",
Long: `
Start CDS Engine Services
#### API
This is the core component of CDS.
#### Hatcheries
They are the components responsible for spawning workers. Supported integrations/orchestrators are:
* Local machine
* Openstack
* Docker Swarm
* Openstack
* Vsphere
#### Hooks
This component operates CDS workflow hooks
#### Repositories
This component operates CDS workflow repositories
#### VCS
This component operates CDS VCS connectivity
#### CDN
This component operates CDS CDN to handle storage
Start all of this with a single command:
$ engine start [api] [cdn] [hatchery:local] [hatchery:marathon] [hatchery:openstack] [hatchery:swarm] [hatchery:vsphere] [elasticsearch] [hooks] [vcs] [repositories] [migrate] [ui]
All the services are using the same configuration file format.
You have to specify where the toml configuration is. It can be a local file, provided by consul or vault.
You can also use or override toml file with environment variable.
See $ engine config command for more details.
`,
Run: func(cmd *cobra.Command, args []string) {
if len(args) == 0 {
args = strings.Split(os.Getenv("CDS_SERVICE"), " ")
}
if len(args) == 0 {
cmd.Help() // nolint
return
}
// Initialize config
conf := configImport(args, flagStartConfigFile, flagStartRemoteConfig, flagStartRemoteConfigKey, flagStartVaultAddr, flagStartVaultToken, false)
ctx, cancel := context.WithCancel(context.Background())
// initialize context
defer cancel()
// gracefully shutdown all
c := make(chan os.Signal, 1)
signal.Notify(c, os.Interrupt, syscall.SIGTERM)
go func() {
<-c
signal.Stop(c)
cancel()
}()
type serviceConf struct {
arg string
service service.Service
cfg interface{}
}
var (
serviceConfs []serviceConf
names []string
types []string
)
for _, a := range args {
fmt.Printf("Starting service %s\n", a)
switch a {
case services.TypeAPI:
if conf.API == nil {
sdk.Exit("Unable to start: missing service %s configuration", a)
}
serviceConfs = append(serviceConfs, serviceConf{arg: a, service: api.New(), cfg: *conf.API})
names = append(names, conf.API.Name)
types = append(types, services.TypeAPI)
case services.TypeUI:
if conf.UI == nil {
sdk.Exit("Unable to start: missing service %s configuration", a)
}
serviceConfs = append(serviceConfs, serviceConf{arg: a, service: ui.New(), cfg: *conf.UI})
names = append(names, conf.UI.Name)
types = append(types, services.TypeUI)
case "migrate":
if conf.DatabaseMigrate == nil {
sdk.Exit("Unable to start: missing service %s configuration", a)
}
serviceConfs = append(serviceConfs, serviceConf{arg: a, service: migrateservice.New(), cfg: *conf.DatabaseMigrate})
names = append(names, conf.DatabaseMigrate.Name)
types = append(types, services.TypeDBMigrate)
case services.TypeHatchery + ":local":
if conf.Hatchery.Local == nil {
sdk.Exit("Unable to start: missing service %s configuration", a)
}
serviceConfs = append(serviceConfs, serviceConf{arg: a, service: local.New(), cfg: *conf.Hatchery.Local})
names = append(names, conf.Hatchery.Local.Name)
types = append(types, services.TypeHatchery)
case services.TypeHatchery + ":kubernetes":
if conf.Hatchery.Kubernetes == nil {
sdk.Exit("Unable to start: missing service %s configuration", a)
}
serviceConfs = append(serviceConfs, serviceConf{arg: a, service: kubernetes.New(), cfg: *conf.Hatchery.Kubernetes})
names = append(names, conf.Hatchery.Kubernetes.Name)
types = append(types, services.TypeHatchery)
case services.TypeHatchery + ":marathon":
if conf.Hatchery.Marathon == nil {
sdk.Exit("Unable to start: missing service %s configuration", a)
}
serviceConfs = append(serviceConfs, serviceConf{arg: a, service: marathon.New(), cfg: *conf.Hatchery.Marathon})
names = append(names, conf.Hatchery.Marathon.Name)
types = append(types, services.TypeHatchery)
case services.TypeHatchery + ":openstack":
if conf.Hatchery.Openstack == nil {
sdk.Exit("Unable to start: missing service %s configuration", a)
}
serviceConfs = append(serviceConfs, serviceConf{arg: a, service: openstack.New(), cfg: *conf.Hatchery.Openstack})
names = append(names, conf.Hatchery.Openstack.Name)
types = append(types, services.TypeAPI)
case services.TypeHatchery + ":swarm":
if conf.Hatchery.Swarm == nil {
sdk.Exit("Unable to start: missing service %s configuration", a)
}
serviceConfs = append(serviceConfs, serviceConf{arg: a, service: swarm.New(), cfg: *conf.Hatchery.Swarm})
names = append(names, conf.Hatchery.Swarm.Name)
types = append(types, services.TypeHatchery)
case services.TypeHatchery + ":vsphere":
if conf.Hatchery.VSphere == nil {
sdk.Exit("Unable to start: missing service %s configuration", a)
}
serviceConfs = append(serviceConfs, serviceConf{arg: a, service: vsphere.New(), cfg: *conf.Hatchery.VSphere})
names = append(names, conf.Hatchery.VSphere.Name)
types = append(types, services.TypeHatchery)
case services.TypeHooks:
if conf.Hooks == nil {
sdk.Exit("Unable to start: missing service %s configuration", a)
}
serviceConfs = append(serviceConfs, serviceConf{arg: a, service: hooks.New(), cfg: *conf.Hooks})
names = append(names, conf.Hooks.Name)
types = append(types, services.TypeHooks)
case services.TypeCDN:
if conf.CDN == nil {
sdk.Exit("Unable to start: missing service %s configuration", a)
}
serviceConfs = append(serviceConfs, serviceConf{arg: a, service: cdn.New(), cfg: *conf.CDN})
names = append(names, conf.CDN.Name)
types = append(types, services.TypeCDN)
case services.TypeVCS:
if conf.VCS == nil {
sdk.Exit("Unable to start: missing service %s configuration", a)
}
serviceConfs = append(serviceConfs, serviceConf{arg: a, service: vcs.New(), cfg: *conf.VCS})
names = append(names, conf.VCS.Name)
types = append(types, services.TypeVCS)
case services.TypeRepositories:
if conf.Repositories == nil {
sdk.Exit("Unable to start: missing service %s configuration", a)
}
serviceConfs = append(serviceConfs, serviceConf{arg: a, service: repositories.New(), cfg: *conf.Repositories})
names = append(names, conf.Repositories.Name)
types = append(types, services.TypeRepositories)
case "elasticsearch":
if conf.ElasticSearch == nil {
sdk.Exit("Unable to start: missing service %s configuration", a)
}
serviceConfs = append(serviceConfs, serviceConf{arg: a, service: elasticsearch.New(), cfg: *conf.ElasticSearch})
names = append(names, conf.ElasticSearch.Name)
types = append(types, services.TypeElasticsearch)
default:
fmt.Printf("Error: service '%s' unknown\n", a)
os.Exit(1)
}
}
//Initialize logs
logConf := log.Conf{
Level: conf.Log.Level,
GraylogProtocol: conf.Log.Graylog.Protocol,
GraylogHost: conf.Log.Graylog.Host,
GraylogPort: fmt.Sprintf("%d", conf.Log.Graylog.Port),
GraylogExtraKey: conf.Log.Graylog.ExtraKey,
GraylogExtraValue: conf.Log.Graylog.ExtraValue,
GraylogFieldCDSVersion: sdk.VERSION,
GraylogFieldCDSOS: sdk.GOOS,
GraylogFieldCDSArch: sdk.GOARCH,
GraylogFieldCDSServiceName: strings.Join(names, "_"),
GraylogFieldCDSServiceType: strings.Join(types, "_"),
}
log.Initialize(ctx, &logConf)
// Sort the slice of services we have to start to be sure to start the API au first
sort.Slice(serviceConfs, func(i, j int) bool {
return serviceConfs[i].arg < serviceConfs[j].arg
})
//Configure the services
for _, s := range serviceConfs {
if err := s.service.ApplyConfiguration(s.cfg); err != nil {
sdk.Exit("Unable to init service %s: %v", s.arg, err)
}
log.Info(ctx, "%s> %s configuration applied", s.arg, s.service.Name())
if srv, ok := s.service.(service.BeforeStart); ok {
if err := srv.BeforeStart(ctx); err != nil {
sdk.Exit("Unable to start service %s: %v", s.arg, err)
}
}
c, err := observability.Init(ctx, conf.Telemetry, s.service)
if err != nil {
sdk.Exit("Unable to start tracing exporter: %v", err)
}
go start(c, s.service, s.cfg, s.arg)
// Stupid trick: when API is starting wait a bit before start the other
if s.arg == "API" || s.arg == "api" {
time.Sleep(2 * time.Second)
}
}
//Wait for the end
<-ctx.Done()
if ctx.Err() != nil {
fmt.Printf("Exiting (%v)\n", ctx.Err())
}
},
}
func start(c context.Context, s service.Service, cfg interface{}, serviceName string) {
if err := serve(c, s, serviceName, cfg); err != nil {
sdk.Exit("Service has been stopped: %s %+v", serviceName, err)
}
}
func serve(c context.Context, s service.Service, serviceName string, cfg interface{}) error {
ctx, cancel := context.WithCancel(c)
defer cancel()
x, err := s.Init(cfg)
if err != nil {
return err
}
// first signin
if err := s.Start(ctx, x); err != nil {
log.Error(ctx, "%s> Unable to start service: %v", serviceName, err)
return err
}
var srvConfig sdk.ServiceConfig
b, _ := json.Marshal(cfg)
json.Unmarshal(b, &srvConfig) // nolint
// then register
if err := s.Register(c, srvConfig); err != nil {
log.Error(ctx, "%s> Unable to register: %v", serviceName, err)
return err
}
log.Info(ctx, "%s> Service registered", serviceName)
// finally start the heartbeat goroutine
go func() {
if err := s.Heartbeat(ctx, s.Status); err != nil {
log.Error(ctx, "%v", err)
cancel()
}
}()
go func() {
if err := s.Serve(ctx); err != nil {
log.Error(ctx, "%s> Serve: %+v", serviceName, err)
cancel()
}
}()
<-ctx.Done()
if ctx.Err() != nil {
log.Error(ctx, "%s> Service exiting with err: %+v", serviceName, ctx.Err())
} else {
log.Info(ctx, "%s> Service exiting", serviceName)
}
return ctx.Err()
}
| [
"\"CDS_SERVICE\""
]
| []
| [
"CDS_SERVICE"
]
| [] | ["CDS_SERVICE"] | go | 1 | 0 | |
config.py | import os
basedir = os.path.abspath(os.path.dirname(__file__))
class Config:
SECRET_KEY=os.environ.get('SECRET_KEY') or 'fhvnbmgmgkh9ttolslso'
WEATHER_API_KEY=os.environ.get('WEATHER_API_KEY') or 'a778d9642410a11ed2cbd17c20c246bc'
SQLALCHEMY_DATABASE_URI = os.environ.get('DATABASE_URL') or \
'sqlite:///' + os.path.join(basedir, 'app.db')
class ProdConfig(Config):
pass
class DevConfig(Config):
DEBUG=True
config_options={
'development': DevConfig,
'production': ProdConfig
} | []
| []
| [
"WEATHER_API_KEY",
"SECRET_KEY",
"DATABASE_URL"
]
| [] | ["WEATHER_API_KEY", "SECRET_KEY", "DATABASE_URL"] | python | 3 | 0 | |
mlrun/platforms/other.py | # Copyright 2018 Iguazio
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# this file is based on the code from kubeflow pipelines git
import os
from .iguazio import mount_v3io
def mount_pvc(pvc_name, volume_name="pipeline", volume_mount_path="/mnt/pipeline"):
"""
Modifier function to apply to a Container Op to simplify volume, volume mount addition and
enable better reuse of volumes, volume claims across container ops.
Usage:
train = train_op(...)
train.apply(mount_pvc('claim-name', 'pipeline', '/mnt/pipeline'))
"""
def _mount_pvc(task):
from kubernetes import client as k8s_client
local_pvc = k8s_client.V1PersistentVolumeClaimVolumeSource(claim_name=pvc_name)
return task.add_volume(
k8s_client.V1Volume(name=volume_name, persistent_volume_claim=local_pvc)
).add_volume_mount(
k8s_client.V1VolumeMount(mount_path=volume_mount_path, name=volume_name)
)
return _mount_pvc
def auto_mount(pvc_name="", volume_mount_path="", volume_name=None):
"""choose the mount based on env variables and params
volume will be selected by the following order:
- k8s PVC volume when both pvc_name and volume_mount_path are set
- iguazio v3io volume when V3IO_ACCESS_KEY and V3IO_USERNAME env vars are set
- k8s PVC volume when env var is set: MLRUN_PVC_MOUNT=<pvc-name>:<mount-path>
"""
if pvc_name and volume_mount_path:
return mount_pvc(
pvc_name=pvc_name,
volume_mount_path=volume_mount_path,
volume_name=volume_name or "pvc",
)
if "V3IO_ACCESS_KEY" in os.environ:
return mount_v3io(name=volume_name or "v3io")
if "MLRUN_PVC_MOUNT" in os.environ:
mount = os.environ.get("MLRUN_PVC_MOUNT")
items = mount.split(":")
if len(items) != 2:
raise ValueError("MLRUN_PVC_MOUNT should include <pvc-name>:<mount-path>")
return mount_pvc(
pvc_name=items[0],
volume_mount_path=items[1],
volume_name=volume_name or "pvc",
)
raise ValueError("failed to auto mount, need to set env vars")
def mount_secret(secret_name, mount_path, volume_name="secret", items=None):
"""Modifier function to mount kubernetes secret as files(s)
:param secret_name: k8s secret name
:param mount_path: path to mount inside the container
:param volume_name: unique volume name
:param items: If unspecified, each key-value pair in the Data field
of the referenced Secret will be projected into the
volume as a file whose name is the key and content is
the value.
If specified, the listed keys will be projected into
the specified paths, and unlisted keys will not be
present.
"""
def _mount_secret(task):
from kubernetes import client as k8s_client
vol = k8s_client.V1SecretVolumeSource(secret_name=secret_name, items=items)
return task.add_volume(
k8s_client.V1Volume(name=volume_name, secret=vol)
).add_volume_mount(
k8s_client.V1VolumeMount(mount_path=mount_path, name=volume_name)
)
return _mount_secret
| []
| []
| [
"MLRUN_PVC_MOUNT"
]
| [] | ["MLRUN_PVC_MOUNT"] | python | 1 | 0 | |
examples/pwr_run/checkpointing/final_3level/final4_3level_obsolete/job71.py | """
#Trains a ResNet on the CIFAR10 dataset.
"""
from __future__ import print_function
import keras
from keras.layers import Dense, Conv2D, BatchNormalization, Activation
from keras.layers import AveragePooling2D, Input, Flatten
from keras.optimizers import Adam
from keras.callbacks import ModelCheckpoint, LearningRateScheduler
from keras.callbacks import ReduceLROnPlateau, TensorBoard
from keras.preprocessing.image import ImageDataGenerator
from keras.regularizers import l2
from keras import backend as K
from keras.models import Model
from keras.datasets import cifar10
from keras.applications.vgg16 import VGG16
from keras.applications.vgg19 import VGG19
from keras import models, layers, optimizers
from datetime import datetime
import tensorflow as tf
import numpy as np
import os
import pdb
import sys
import argparse
import time
import signal
import glob
import json
import send_signal
parser = argparse.ArgumentParser(description='Tensorflow Cifar10 Training')
parser.add_argument('--tc', metavar='TESTCASE', type=str, help='specific testcase name')
parser.add_argument('--resume', dest='resume', action='store_true', help='if True, resume training from a checkpoint')
parser.add_argument('--gpu_num', metavar='GPU_NUMBER', type=str, help='select which gpu to use')
parser.add_argument('--node', metavar='HOST_NODE', type=str, help='node of the host (scheduler)')
parser.set_defaults(resume=False)
args = parser.parse_args()
os.environ["CUDA_DEVICE_ORDER"]="PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"]=args.gpu_num
# Training parameters
batch_size = 64
args_lr = 0.0008
args_model = 'vgg16'
epoch_begin_time = 0
job_name = sys.argv[0].split('.')[0]
save_files = '/scratch/li.baol/checkpoint_final4_3level/' + job_name + '*'
total_epochs = 49
starting_epoch = 0
# first step is to update the PID
pid = os.getpid()
message = job_name + ' pid ' + str(pid) # 'job50 pid 3333'
send_signal.send(args.node, 10002, message)
if args.resume:
save_file = glob.glob(save_files)[0]
# epochs = int(save_file.split('/')[4].split('_')[1].split('.')[0])
starting_epoch = int(save_file.split('/')[4].split('.')[0].split('_')[-1])
data_augmentation = True
num_classes = 10
# Subtracting pixel mean improves accuracy
subtract_pixel_mean = True
n = 3
# Model name, depth and version
model_type = args.tc #'P100_resnet50_he_256_1'
# Load the CIFAR10 data.
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
# Normalize data.
x_train = x_train.astype('float32') / 255
x_test = x_test.astype('float32') / 255
# If subtract pixel mean is enabled
if subtract_pixel_mean:
x_train_mean = np.mean(x_train, axis=0)
x_train -= x_train_mean
x_test -= x_train_mean
print('x_train shape:', x_train.shape)
print(x_train.shape[0], 'train samples')
print(x_test.shape[0], 'test samples')
print('y_train shape:', y_train.shape)
# Convert class vectors to binary class matrices.
y_train = keras.utils.to_categorical(y_train, num_classes)
y_test = keras.utils.to_categorical(y_test, num_classes)
if args.resume:
print('resume from checkpoint')
message = job_name + ' b_end'
send_signal.send(args.node, 10002, message)
model = keras.models.load_model(save_file)
message = job_name + ' c_end'
send_signal.send(args.node, 10002, message)
else:
print('train from start')
model = models.Sequential()
if '16' in args_model:
base_model = VGG16(weights=None, include_top=False, input_shape=(32, 32, 3), pooling=None)
elif '19' in args_model:
base_model = VGG19(weights=None, include_top=False, input_shape=(32, 32, 3), pooling=None)
#base_model.summary()
#pdb.set_trace()
model.add(base_model)
model.add(layers.Flatten())
model.add(layers.BatchNormalization())
model.add(layers.Dense(128, activation='relu'))#, kernel_initializer='he_uniform'))
#model.add(layers.Dropout(0.2))
model.add(layers.BatchNormalization())
model.add(layers.Dense(64, activation='relu'))#, kernel_initializer='he_uniform'))
#model.add(layers.Dropout(0.2))
model.add(layers.BatchNormalization())
model.add(layers.Dense(10, activation='softmax'))#, kernel_initializer='he_uniform'))
model.compile(loss='categorical_crossentropy',
optimizer=Adam(lr=args_lr),
metrics=['accuracy'])
#model.summary()
print(model_type)
#pdb.set_trace()
current_epoch = 0
################### connects interrupt signal to the process #####################
def terminateProcess(signalNumber, frame):
# first record the wasted epoch time
global epoch_begin_time
if epoch_begin_time == 0:
epoch_waste_time = 0
else:
epoch_waste_time = int(time.time() - epoch_begin_time)
message = job_name + ' waste ' + str(epoch_waste_time) # 'job50 waste 100'
if epoch_waste_time > 0:
send_signal.send(args.node, 10002, message)
print('checkpointing the model triggered by kill -15 signal')
# delete whatever checkpoint that already exists
for f in glob.glob(save_files):
os.remove(f)
model.save('/scratch/li.baol/checkpoint_final4_3level/' + job_name + '_' + str(current_epoch) + '.h5')
print ('(SIGTERM) terminating the process')
message = job_name + ' checkpoint'
send_signal.send(args.node, 10002, message)
sys.exit()
signal.signal(signal.SIGTERM, terminateProcess)
#################################################################################
logdir = '/scratch/li.baol/tsrbrd_log/job_runs/' + model_type + '/' + job_name
tensorboard_callback = TensorBoard(log_dir=logdir)#, update_freq='batch')
first_epoch_start = 0
class PrintEpoch(keras.callbacks.Callback):
def on_epoch_begin(self, epoch, logs=None):
global current_epoch, first_epoch_start
#remaining_epochs = epochs - epoch
current_epoch = epoch
print('current epoch ' + str(current_epoch))
global epoch_begin_time
epoch_begin_time = time.time()
if epoch == starting_epoch and args.resume:
first_epoch_start = time.time()
message = job_name + ' d_end'
send_signal.send(args.node, 10002, message)
elif epoch == starting_epoch:
first_epoch_start = time.time()
if epoch == starting_epoch:
# send signal to indicate checkpoint is qualified
message = job_name + ' ckpt_qual'
send_signal.send(args.node, 10002, message)
def on_epoch_end(self, epoch, logs=None):
if epoch == starting_epoch:
first_epoch_time = int(time.time() - first_epoch_start)
message = job_name + ' 1st_epoch ' + str(first_epoch_time)
send_signal.send(args.node, 10002, message)
progress = round((epoch+1) / round(total_epochs/2), 2)
message = job_name + ' completion ' + str(progress)
send_signal.send(args.node, 10002, message)
my_callback = PrintEpoch()
callbacks = [tensorboard_callback, my_callback]
#[checkpoint, lr_reducer, lr_scheduler, tensorboard_callback]
# Run training
model.fit(x_train, y_train,
batch_size=batch_size,
epochs=round(total_epochs/2),
validation_data=(x_test, y_test),
shuffle=True,
callbacks=callbacks,
initial_epoch=starting_epoch,
verbose=1
)
# Score trained model.
scores = model.evaluate(x_test, y_test, verbose=1)
print('Test loss:', scores[0])
print('Test accuracy:', scores[1])
# send signal to indicate job has finished
message = job_name + ' finish'
send_signal.send(args.node, 10002, message)
| []
| []
| [
"CUDA_DEVICE_ORDER",
"CUDA_VISIBLE_DEVICES"
]
| [] | ["CUDA_DEVICE_ORDER", "CUDA_VISIBLE_DEVICES"] | python | 2 | 0 | |
Modules/SpeechRecognize.py | import os
import requests
from vk_api.bot_longpoll import VkBotEvent, VkBotEventType, DotDict
from vk_api.utils import get_random_id
from FusionBotMODULES import Fusion, ModuleManager
def find_audio_in_fwd_messages(msg):
if msg.attachments:
if msg.attachments[0]["type"] == "audio_message":
return msg.attachments[0]
if "reply_message" in msg:
return find_audio_in_fwd_messages(DotDict(msg.reply_message))
elif msg.fwd_messages:
for message in msg.fwd_messages:
res = find_audio_in_fwd_messages(DotDict(message))
if res:
return res
def recognize_audio(audio_attachment):
url = audio_attachment["audio_message"]["link_ogg"]
file = requests.get(url, allow_redirects=True)
response = requests.post("https://stt.api.cloud.yandex.net/speech/v1/stt:recognize", data=file.content, headers={
'Authorization': 'Api-Key ' + os.environ.get("yandex_api_token"),
})
code = int(response.status_code)
if code == 200:
return response.json()["result"]
else:
return response.json()
class Module(ModuleManager.Module):
name = "SpeechRecognition"
description = "Модуль распознавания голосовых сообщений"
def run(self, client: Fusion, registry):
class RecognizeCommand(ModuleManager.Command):
name = "recognize"
description = "Распознать пересланное голосовое сообщение"
args = "<пересланное голосовое сообщение>"
def run(self, event: VkBotEvent, args, keys):
client.get_api().messages.setActivity(type="typing", peer_id=event.obj.peer_id)
audio_attachment = find_audio_in_fwd_messages(event.obj)
if not audio_attachment:
return False
res = recognize_audio(audio_attachment)
text = None
if type(res) == dict:
error = res
text = "Произошла неизвестная ошибка.\n\nКод:%s\nОписание:%s" % (
error["error_code"], error["error_message"]
)
elif type(res) == str:
text = "Распознано голосовое сообщение:\n\n%s" % res
client.get_api().messages.send(
message=text,
peer_id=event.obj.peer_id,
random_id=get_random_id(),
)
return True
client.module_manager.add_command(RecognizeCommand(), self)
def on_event(self, client: Fusion, event: VkBotEvent):
if event.type == VkBotEventType.MESSAGE_NEW:
if event.obj.attachments:
attachment = event.obj.attachments[0]
if attachment["type"] == "audio_message":
client.get_api().messages.setActivity(type="typing", peer_id=event.obj.peer_id)
res = recognize_audio(attachment)
text = None
if type(res) == dict:
error = res
text = "Произошла неизвестная ошибка.\n\nКод:%s\nОписание:%s" % (
error["error_code"], error["error_message"])
elif type(res) == str:
text = "Распознано голосовое сообщение:\n\n%s" % res
client.get_api().messages.send(
message=text,
peer_id=event.obj.peer_id,
random_id=get_random_id(),
reply_to=event.obj.id,
)
| []
| []
| [
"yandex_api_token"
]
| [] | ["yandex_api_token"] | python | 1 | 0 | |
wireleap_tun/main.go | // Copyright (c) 2021 Wireleap
package main
import (
"io/ioutil"
"log"
"net"
"os"
"os/signal"
"path"
"runtime"
"strconv"
"syscall"
"github.com/fsnotify/fsnotify"
"github.com/vishvananda/netlink"
"github.com/wireleap/client/wireleap_tun/tun"
"net/http"
_ "net/http/pprof"
)
func main() {
if err := syscall.Seteuid(0); err != nil {
log.Fatal("could not gain privileges; check if setuid flag is set?")
}
sh := os.Getenv("WIRELEAP_HOME")
h2caddr := os.Getenv("WIRELEAP_ADDR_H2C")
tunaddr := os.Getenv("WIRELEAP_ADDR_TUN")
if sh == "" || h2caddr == "" || tunaddr == "" {
log.Fatal("Running wireleap_tun separately from wireleap is not supported. Please use `sudo wireleap tun start`.")
}
t, err := tun.New()
if err != nil {
log.Fatalf("could not create tun device: %s", err)
}
rlim := syscall.Rlimit{Cur: 65535, Max: 65535}
if err = syscall.Setrlimit(syscall.RLIMIT_NOFILE, &rlim); err != nil {
log.Fatalf("could not set RLIMIT_NOFILE to %+v", rlim)
}
routes, err := getroutes(sh)
if err != nil {
log.Fatalf("could not get routes: %s", err)
}
watcher, err := fsnotify.NewWatcher()
if err != nil {
log.Fatalf("could not set up file watcher: %s", err)
}
defer watcher.Close()
err = watcher.Add(path.Join(sh, "bypass.json"))
if err != nil {
log.Fatalf("could not add bypass.json to file watcher: %s", err)
}
link, err := netlink.LinkByName(t.Name())
if err != nil {
log.Fatalf("could not get link for %s: %s", t.Name(), err)
}
err = netlink.LinkSetTxQLen(link, 1000)
if err != nil {
log.Fatalf("could not set link txqueue length for %s to %d: %s", t.Name(), 1000, err)
}
err = netlink.LinkSetUp(link)
if err != nil {
log.Fatalf("could not set %s up: %s", link, err)
}
tunhost, _, err := net.SplitHostPort(tunaddr)
if err != nil {
log.Fatalf("could not parse WIRELEAP_ADDR_TUN `%s`: %s", tunaddr, err)
}
addr, err := netlink.ParseAddr(tunhost + "/31")
if err != nil {
log.Fatalf("could not parse address of %s: %s", tunaddr, err)
}
err = netlink.AddrAdd(link, addr)
if err != nil {
log.Fatalf("could not set address of %s to %s: %s", link, addr, err)
}
// avoid clobbering the default route by being just a _little_ bit more specific
for _, r := range append([]netlink.Route{{
// lower half of all v4 addresses
LinkIndex: link.Attrs().Index,
Dst: &net.IPNet{IP: net.IPv4(0, 0, 0, 0), Mask: net.CIDRMask(1, net.IPv4len*8)},
}, {
// upper half of all v4 addresses
LinkIndex: link.Attrs().Index,
Dst: &net.IPNet{IP: net.IPv4(128, 0, 0, 0), Mask: net.CIDRMask(1, net.IPv4len*8)},
}, {
// v6 global-adressable range
LinkIndex: link.Attrs().Index,
Dst: &net.IPNet{IP: net.ParseIP("2000::"), Mask: net.CIDRMask(3, net.IPv6len*8)},
}}, routes...) {
log.Printf("adding route: %+v", r)
err = netlink.RouteReplace(&r)
if err != nil {
log.Fatalf("could not add route to %s: %s", r.Dst, err)
}
}
pidfile := path.Join(sh, "wireleap_tun.pid")
finalize := func() {
// don't need to delete catch-all routes via tun dev as they will be
// removed when the device is down
for _, r := range routes {
netlink.RouteDel(&r)
}
os.Remove(pidfile)
}
defer finalize()
sig := make(chan os.Signal)
signal.Notify(sig, syscall.SIGINT, syscall.SIGTERM, syscall.SIGQUIT)
os.Remove(pidfile)
pidtext := []byte(strconv.Itoa(os.Getpid()))
err = ioutil.WriteFile(pidfile, pidtext, 0644)
if err != nil {
finalize()
log.Fatalf("could not write pidfile %s: %s", pidfile, err)
}
defer os.Remove(pidfile)
// setup debugging & profiling
if os.Getenv("WIRELEAP_TUN_DEBUG") != "" {
DEBUG = true
}
if os.Getenv("WIRELEAP_TUN_PPROF") != "" {
go func() { log.Println(http.ListenAndServe("localhost:6060", nil)) }()
}
if bpr := os.Getenv("WIRELEAP_TUN_BLOCK_PROFILE_RATE"); bpr != "" {
n, err := strconv.Atoi(bpr)
if err != nil {
log.Fatalf("invalid WIRELEAP_TUN_BLOCK_PROFILE_RATE value: %s", bpr)
}
runtime.SetBlockProfileRate(n)
}
if mpf := os.Getenv("WIRELEAP_TUN_MUTEX_PROFILE_FRACTION"); mpf != "" {
n, err := strconv.Atoi(mpf)
if err != nil {
log.Fatalf("invalid WIRELEAP_TUN_MUTEX_PROFILE_FRACTION value: %s", mpf)
}
runtime.SetMutexProfileFraction(n)
}
if err = tunsplice(t, h2caddr, tunaddr); err != nil {
log.Fatal("tunsplice returned error:", err)
}
for {
select {
case s := <-sig:
finalize()
log.Fatalf("terminating on signal %s", s)
case _, ok := <-watcher.Events:
if !ok {
return
}
routes2, err := getroutes(sh)
if err != nil {
log.Fatal(err)
}
for _, r := range routes {
netlink.RouteDel(&r)
}
for _, r := range routes2 {
err = netlink.RouteReplace(&r)
if err != nil {
log.Fatalf("could not remove route %s: %s", r, err)
}
}
routes = routes2
case err, ok := <-watcher.Errors:
if !ok {
return
}
log.Println("error while watching files:", err)
}
}
}
| [
"\"WIRELEAP_HOME\"",
"\"WIRELEAP_ADDR_H2C\"",
"\"WIRELEAP_ADDR_TUN\"",
"\"WIRELEAP_TUN_DEBUG\"",
"\"WIRELEAP_TUN_PPROF\"",
"\"WIRELEAP_TUN_BLOCK_PROFILE_RATE\"",
"\"WIRELEAP_TUN_MUTEX_PROFILE_FRACTION\""
]
| []
| [
"WIRELEAP_HOME",
"WIRELEAP_TUN_MUTEX_PROFILE_FRACTION",
"WIRELEAP_ADDR_H2C",
"WIRELEAP_TUN_PPROF",
"WIRELEAP_TUN_BLOCK_PROFILE_RATE",
"WIRELEAP_ADDR_TUN",
"WIRELEAP_TUN_DEBUG"
]
| [] | ["WIRELEAP_HOME", "WIRELEAP_TUN_MUTEX_PROFILE_FRACTION", "WIRELEAP_ADDR_H2C", "WIRELEAP_TUN_PPROF", "WIRELEAP_TUN_BLOCK_PROFILE_RATE", "WIRELEAP_ADDR_TUN", "WIRELEAP_TUN_DEBUG"] | go | 7 | 0 | |
tests/notebook_test.py | import datetime
import os
import re
import subprocess
import sys
import unittest
import argparse
import time
for i in sys.path:
if 'pyross' in i or i == '':
sys.path.remove(i)
def run_notebook_tests(path):
"""
Runs Jupyter notebook tests. Exits if they fail.
"""
basepath = os.path.dirname(__file__)
nbpath = os.path.abspath(os.path.join(basepath, "..", path))
# Ignore books with deliberate errors, but check they still exist
ignore_list = []
for ignored_book in ignore_list:
if not os.path.isfile(ignored_book):
raise Exception('Ignored notebook not found: ' + ignored_book)
# Scan and run
print('Testing notebooks')
ok = True
for notebook, cwd in list_notebooks(nbpath, True, ignore_list):
os.chdir(cwd) # necessary for relative imports in notebooks
ok &= test_notebook(notebook)
# print(notebook, cwd, sys.path)
if not ok:
print('\nErrors encountered in notebooks')
sys.exit(1)
print('\nOK')
def list_notebooks(root, recursive=True, ignore_list=None, notebooks=None):
"""
Returns a list of all notebooks in a directory.
"""
if notebooks is None:
notebooks = []
if ignore_list is None:
ignore_list = []
try:
for filename in os.listdir(root):
path = os.path.join(root, filename)
cwd = os.path.dirname(path)
if path in ignore_list:
print('Skipping ignored notebook: ' + path)
continue
# Add notebooks
if os.path.splitext(path)[1] == '.ipynb':
notebooks.append((path,cwd))
# Recurse into subdirectories
elif recursive and os.path.isdir(path):
# Ignore hidden directories
if filename[:1] == '.':
continue
list_notebooks(path, recursive, ignore_list, notebooks)
except NotADirectoryError:
path = root
cwd = os.path.dirname(path)
return [(path,cwd)]
return notebooks
def test_notebook(path):
"""
Tests a notebook in a subprocess, exists if it doesn't finish.
"""
import nbconvert
print('Running ' + path + ' ... ', end='')
sys.stdout.flush()
# Load notebook, convert to python
e = nbconvert.exporters.PythonExporter()
code, __ = e.from_filename(path)
# Remove coding statement, if present
ipylines = ['ipython', 'show(']
code = '\n'.join([x for x in code.splitlines() if not 'ipython' in x])
for x in code.splitlines():
if not any(s in ipylines for s in x):
code += '\n'.join([x])
# print(code)
# Tell matplotlib not to produce any figures
env = os.environ.copy()
env['MPLBACKEND'] = 'Template'
# Run in subprocess
start = time.time()
cmd = [sys.executable, '-c', code]
try:
p = subprocess.Popen(
cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=env
)
stdout, stderr = p.communicate()
# TODO: Use p.communicate(timeout=3600) if Python3 only
if p.returncode != 0:
# Show failing code, output and errors before returning
print('ERROR')
# print('-- script ' + '-' * (79 - 10))
# for i, line in enumerate(code.splitlines()):
# j = str(1 + i)
# print(j + ' ' * (5 - len(j)) + line)
print('-- stdout ' + '-' * (79 - 10))
print(stdout)
print('-- stderr ' + '-' * (79 - 10))
print(stderr)
print('-' * 79)
return False
except KeyboardInterrupt:
p.terminate()
stop = time.time()
print('ABORTED after', round(stop-start,4), "s")
sys.exit(1)
# Successfully run
stop = time.time()
print('ok. Run took ', round(stop-start,4), "s")
return True
def export_notebook(ipath, opath):
"""
Exports the notebook at `ipath` to a python file at `opath`.
"""
import nbconvert
from traitlets.config import Config
# Create nbconvert configuration to ignore text cells
c = Config()
c.TemplateExporter.exclude_markdown = True
# Load notebook, convert to python
e = nbconvert.exporters.PythonExporter(config=c)
code, __ = e.from_filename(ipath)
# Remove "In [1]:" comments
r = re.compile(r'(\s*)# In\[([^]]*)\]:(\s)*')
code = r.sub('\n\n', code)
# Store as executable script file
with open(opath, 'w') as f:
f.write('#!/usr/bin/env python')
f.write(code)
os.chmod(opath, 0o775)
if __name__ == '__main__':
# Set up argument parsing
parser = argparse.ArgumentParser(
description='Run notebook unit tests for PyRoss.',
)
# Unit tests
parser.add_argument(
'--path', default = '.',
help='Run specific notebook or folder containing notebooks',)
# Parse!
args = parser.parse_args()
print(args.path)
run_notebook_tests(args.path)
| []
| []
| []
| [] | [] | python | 0 | 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.