blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
258340149c613722be8d1cfed6c2f43bf4840d6b | 69eb034a1d086d1b2ce1f1083df2b2fd74b9f5cc | /train_model_rmre.py | 7396f5c35cbd7cd3cf08f2c365cdd892b4978ffc | [] | no_license | webdxq/genarate_blessing | 2c0f6afc55f4c507750911802a80fe299a4690d6 | a08a09071edf687dcb512713daea1daf00450383 | refs/heads/master | 2020-03-29T01:38:33.419803 | 2018-09-19T06:10:26 | 2018-09-19T06:10:26 | 145,077,459 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,938 | py | #!/usr/bin/python3
#-*- coding: UTF-8 -*-
import collections
import numpy as np
import tensorflow as tf
import os
import sys
import chardet
import re
import json
import time
from datetime import datetime
reload(sys)
sys.setdefaultencoding('utf8')
# os.environ['CUDA_VISIBLE_DEVICES']='1'
#-------------------------------数据预处理---------------------------#
# poetry_file ='../data/poetry.txt'
minlen = 4
maxlen = 15
blessing_file ='/home/pingan_ai/dxq/project/gen_blessing/dataset/data/line_lyrics.txt'
blessings = []
all_words = []
cantoneses = open('/home/pingan_ai/dxq/project/cantonese.txt','r').readline().split(' ')
# print(cantoneses)
cantonese = [re.compile(i.decode('utf-8')) for i in cantoneses]
LEARNING_RATE_BASE = 0.02
MODEL_SAVE_PATH = '/media/pingan_ai/dxq/gen_blessing/new_model/'
N_GPU = 2
MODEL_NAME = "blessing.ckpt"
EPOCHS = 100
LEARNING_RATE_DECAY = 0.99
filename = blessing_file.split('/')[-1].split('.')[0]
# print(blessing_file)
can_count = 0
MOVING_AVERAGE_DECAY = 0.99
def HasReapeatWord(string):
flag = False
for i,char in enumerate(string):
# print i
s = i
m = i+1
e = i+2
# print string[s],string[m],string[e]
if flag:
return True
elif e >= (len(string)-1):
return False
else:
if string[s] == string[m] and string[m] == string[e]:
# print string[s],string[m],string[e]
flag = True
else:
continue
def IsCantonese(line):
for i, patten in enumerate(cantonese):
if patten.search(line)!= None:
# print(line)
# can_count = can_count+1
return True
return False
with open(blessing_file, "r") as f:
for i,line in enumerate(f):
if i == 0:
continue
# try:
# print(line)
line = line.decode('UTF-8')
line = line.strip(u'\n')
line = line.replace(u' ',u'')
if u'_' in line or u'(' in line or u'(' in line or u'《' in line or u'[' in line:
continue
if len(line) < minlen or len(line) > maxlen:
continue
if IsCantonese(line):
can_count = can_count+1
continue
if HasReapeatWord(line):
continue
all_words += [word for word in line]
line = u'[' + unicode(chr(len(line)+61)) +line + u']'
blessings.append(line)
# except Exception as e:
# print('no')
if i%100000== 0:
print(u'处理到%d'%i)
blessings = sorted(blessings,key=lambda line: len(line))
print(u'歌词总行数: %s'% len(blessings))
print(can_count)
counter = collections.Counter(all_words)
count_pairs = sorted(counter.items(), key=lambda x: -x[1])
print('*******************')
words, _ = zip(*count_pairs)
print(len(words))
for i in range(65,66+maxlen-minlen):
words = words[:len(words)] + (unicode(chr(i)),)
words = words[:len(words)] + (u'[',)
words = words[:len(words)] + (u']',)
words = words[:len(words)] + (u' ',)
print(u'词表总数: %s'% len(words))
word_num_map = dict(zip(words, range(len(words))))
print(word_num_map[u'['])
print(word_num_map[u']'])
print(word_num_map[u' '])
print(word_num_map[u'A'])
print(word_num_map[u'L'])
to_num = lambda word: word_num_map.get(word, len(words)-1)
blessings_vector = [ list(map(to_num,blessing)) for blessing in blessings]
to_words = lambda num: words[num]
print(blessings_vector[-4:-1])
print(blessings_vector[1])
for i in blessings[-4:-1]:
print(i)
print(blessings[1])
with open(filename+'2id_re.json','w') as outfile:
json.dump(word_num_map,outfile,ensure_ascii=False)
# outfile.write('\n')
with open(filename+'2word_re.json','w') as outfile2:
# word2id = dict((value, key) for key,value in word_num_map.iteritems())
json.dump(words,outfile2,ensure_ascii=False)
# outfile2.write('\n')
batch_size = 256
n_chunk = len(blessings_vector) // batch_size
# sys.exit()
class DataSet(object):
def __init__(self,data_size):
self._data_size = data_size
self._epochs_completed = 0
self._index_in_epoch = 0
self._data_index = np.arange(data_size)
def next_batch(self,batch_size):
start = self._index_in_epoch
if start + batch_size > self._data_size:
np.random.shuffle(self._data_index)
self._epochs_completed = self._epochs_completed + 1
self._index_in_epoch = batch_size
full_batch_features ,full_batch_labels = self.data_batch(0,batch_size)
return full_batch_features ,full_batch_labels
else:
self._index_in_epoch += batch_size
end = self._index_in_epoch
full_batch_features ,full_batch_labels = self.data_batch(start,end)
if self._index_in_epoch == self._data_size:
self._index_in_epoch = 0
self._epochs_completed = self._epochs_completed + 1
np.random.shuffle(self._data_index)
return full_batch_features,full_batch_labels
def data_batch(self,start,end):
batches = []
for i in range(start,end):
batches.append(blessings_vector[self._data_index[i]])
length = max(map(len,batches))
# print(word_num_map[' '])
xdata = np.full((end - start,length), word_num_map[']'], np.int32)
for row in range(end - start):
xdata[row,:len(batches[row])] = batches[row]
ydata = np.copy(xdata)
ydata[:,:-1] = xdata[:,1:]
return xdata,ydata
#---------------------------------------RNN--------------------------------------#
# 定义RNN
def neural_network(input_data,model='lstm', rnn_size=128, num_layers=2):
if model == 'rnn':
cell_fun = tf.nn.rnn_cell.BasicRNNCell
elif model == 'gru':
cell_fun = tf.nn.rnn_cell.GRUCell
elif model == 'lstm':
cell_fun = tf.nn.rnn_cell.BasicLSTMCell
cell = cell_fun(rnn_size, state_is_tuple=True)
cell = tf.nn.rnn_cell.MultiRNNCell([cell] * num_layers, state_is_tuple=True)
initial_state = cell.zero_state(batch_size, tf.float32)
with tf.variable_scope('rnnlm'):
softmax_w = tf.get_variable("softmax_w", [rnn_size, len(words)])
softmax_b = tf.get_variable("softmax_b", [len(words)])
with tf.device("/cpu:0"):
embedding = tf.get_variable("embedding", [len(words), rnn_size])
inputs = tf.nn.embedding_lookup(embedding, input_data)
outputs, last_state = tf.nn.dynamic_rnn(cell, inputs, initial_state=initial_state, scope='rnnlm')
output = tf.reshape(outputs,[-1, rnn_size])
logits = tf.matmul(output, softmax_w) + softmax_b
probs = tf.nn.softmax(logits)
return logits, last_state, probs, cell, initial_state
def load_model(sess, saver,ckpt_path):
latest_ckpt = tf.train.latest_checkpoint(ckpt_path)
if latest_ckpt:
print ('resume from', latest_ckpt)
saver.restore(sess, latest_ckpt)
return int(latest_ckpt[latest_ckpt.rindex('-') + 1:])
else:
print ('building model from scratch')
sess.run(tf.global_variables_initializer())
return -1
def to_word(weights):
sample = np.argmax(weights)
return words[sample]
def train_to_word(x):
# print(u'x的长度',len(x))
x_words = map(to_words, x)
# print(str(x_words).decode("unicode-escape"))
outstr = ''.join(x_words)
token = outstr[1]
outstr = outstr[2:-1]
print(u'[ '+ token +u' '+ outstr+u' ]')
def AlignSentence(sentence):
sentence = sentence[:-2]
sentence_re = ''
for i in range(len(sentence)):
if not (sentence[i] >= u'\u4e00' and sentence[i]<=u'\u9fa5'):
sentence_re += sentence[i]+u' '
else:
sentence_re += sentence[i]
# return u'[ '+sentence[i] + u' ]'
print sentence_re + u' ]'
def get_loss(input_data, targets, reuse_variables=None):
# 沿用5.5节中定义的函数来计算神经网络的前向传播结果。
with tf.variable_scope(tf.get_variable_scope(), reuse=reuse_variables):
logits, last_state, probs, _, _ = neural_network(input_data)
loss = tf.contrib.legacy_seq2seq.sequence_loss_by_example(
[logits],
[targets],
[tf.ones_like(targets, dtype=tf.float32)],
len(words)
)
cost = tf.reduce_mean(loss)
return cost
# 计算每一个变量梯度的平均值。
def average_gradients(tower_grads):
average_grads = []
# 枚举所有的变量和变量在不同GPU上计算得出的梯度。
for grad_and_vars in zip(*tower_grads):
# 计算所有GPU上的梯度平均值。
grads = []
for g, _ in grad_and_vars:
expanded_g = tf.expand_dims(g, 0)
grads.append(expanded_g)
grad = tf.concat(grads, 0)
grad = tf.reduce_mean(grad, 0)
v = grad_and_vars[0][1]
grad_and_var = (grad, v)
# 将变量和它的平均梯度对应起来。
average_grads.append(grad_and_var)
# 返回所有变量的平均梯度,这个将被用于变量的更新。
return average_grads
# def main(argv=None):
def main(argv=None):
# 将简单的运算放在CPU上,只有神经网络的训练过程放在GPU上。
TRAINING_STEPS = EPOCHS*n_chunk/N_GPU
with tf.Graph().as_default(), tf.device('/cpu:0'):
input_data = tf.placeholder(tf.int32, [batch_size, None])
output_targets = tf.placeholder(tf.int32, [batch_size, None])
trainds = DataSet(len(blessings_vector))
targets = tf.reshape(output_targets, [-1])
global_step = tf.get_variable('global_step', [], initializer=tf.constant_initializer(0), trainable=False)
learning_rate = tf.train.exponential_decay(
LEARNING_RATE_BASE, global_step, 60000 / batch_size, LEARNING_RATE_DECAY)
optimizer = tf.train.AdamOptimizer(learning_rate)
tower_grads = []
reuse_variables = False
# 将神经网络的优化过程跑在不同的GPU上。
for i in range(N_GPU):
# 将优化过程指定在一个GPU上。
with tf.device('/gpu:%d' % i):
with tf.name_scope('GPU_%d' % i) as scope:
cur_loss = get_loss(input_data,targets,reuse_variables)
# 在第一次声明变量之后,将控制变量重用的参数设置为True。这样可以
# 让不同的GPU更新同一组参数。
reuse_variables = True
grads = optimizer.compute_gradients(cur_loss)
tower_grads.append(grads)
# 计算变量的平均梯度。
grads = average_gradients(tower_grads)
for grad, var in grads:
if grad is not None:
tf.summary.histogram('gradients_on_average/%s' % var.op.name, grad)
# 使用平均梯度更新参数。
apply_gradient_op = optimizer.apply_gradients(grads, global_step=global_step)
for var in tf.trainable_variables():
tf.summary.histogram(var.op.name, var)
# 计算变量的滑动平均值。
variable_averages = tf.train.ExponentialMovingAverage(MOVING_AVERAGE_DECAY, global_step)
variables_to_average = (tf.trainable_variables() +tf.moving_average_variables())
variables_averages_op = variable_averages.apply(variables_to_average)
# 每一轮迭代需要更新变量的取值并更新变量的滑动平均值。
train_op = tf.group(apply_gradient_op, variables_averages_op)
saver = tf.train.Saver()
summary_op = tf.summary.merge_all()
init = tf.global_variables_initializer()
with tf.Session(config=tf.ConfigProto(
allow_soft_placement=True, log_device_placement=True)) as sess:
# 初始化所有变量并启动队列。
init.run()
summary_writer = tf.summary.FileWriter(MODEL_SAVE_PATH, sess.graph)
for step in range(TRAINING_STEPS):
# 执行神经网络训练操作,并记录训练操作的运行时间。
start_time = time.time()
x,y = trainds.next_batch(batch_size)
_, loss_value = sess.run([train_op, cur_loss],feed_dict={input_data: x, output_targets: y})
duration = time.time() - start_time
# 每隔一段时间数据当前的训练进度,并统计训练速度。
if step != 0 and step % 10 == 0:
# 计算使用过的训练数据个数。因为在每一次运行训练操作时,每一个GPU
# 都会使用一个batch的训练数据,所以总共用到的训练数据个数为
# batch大小 × GPU个数。
num_examples_per_step = batch_size * N_GPU
# num_examples_per_step为本次迭代使用到的训练数据个数,
# duration为运行当前训练过程使用的时间,于是平均每秒可以处理的训
# 练数据个数为num_examples_per_step / duration。
examples_per_sec = num_examples_per_step / duration
# duration为运行当前训练过程使用的时间,因为在每一个训练过程中,
# 每一个GPU都会使用一个batch的训练数据,所以在单个batch上的训
# 练所需要时间为duration / GPU个数。
sec_per_batch = duration / N_GPU
# 输出训练信息。
format_str = ('%s: step %d, loss = %.2f (%.1f examples/sec; %.3f sec/batch)')
print (format_str % (datetime.now(), step, loss_value, examples_per_sec, sec_per_batch))
# 通过TensorBoard可视化训练过程。
summary = sess.run(summary_op)
summary_writer.add_summary(summary, step)
# 每隔一段时间保存当前的模型。
if step == n_chunk:
checkpoint_path = os.path.join(MODEL_SAVE_PATH, MODEL_NAME)
saver.save(sess, checkpoint_path, global_step=step)
main()
# if __name__ == '__main__':
# tf.app.run()
| [
"[email protected]"
] | |
8562913d19df6e29366246a74cfb3818c2b42ba8 | 55c250525bd7198ac905b1f2f86d16a44f73e03a | /Python/pygame/pygameweb/pygameweb/config.py | 93d8fc8e758f4623cd6c55d2070b53f047f96a2d | [
"BSD-2-Clause"
] | permissive | NateWeiler/Resources | 213d18ba86f7cc9d845741b8571b9e2c2c6be916 | bd4a8a82a3e83a381c97d19e5df42cbababfc66c | refs/heads/master | 2023-09-03T17:50:31.937137 | 2023-08-28T23:50:57 | 2023-08-28T23:50:57 | 267,368,545 | 2 | 1 | null | 2022-09-08T15:20:18 | 2020-05-27T16:18:17 | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:3ead591f9a215ef79ce67657f0809e549584a72ef37757eb3272ca4fbba1ab78
size 2948
| [
"[email protected]"
] | |
fefc253d22ba5bb0ef9b94bef1230f18761a0a2b | afa456bb3792e433d84684260cdce1dbc6302cde | /authors/apps/tests/test_validation.py | d124f479a99ca4cf8c7e3e77f3b359a31f4e9213 | [
"BSD-3-Clause"
] | permissive | andela/ah-backend-poseidon | 23ac16e9fcdce49f78df04126f9f486b8c39ebd4 | d2b561e83ed1e9a585853f4a4e2e37805e86c35c | refs/heads/develop | 2022-12-09T07:38:04.843476 | 2019-07-19T13:44:13 | 2019-07-19T13:44:13 | 158,799,017 | 1 | 4 | BSD-3-Clause | 2022-12-08T01:19:16 | 2018-11-23T07:55:00 | Python | UTF-8 | Python | false | false | 3,979 | py | from .base import BaseTestCase
from rest_framework import status
from authors.apps.authentication.models import User
from . import (new_user, data2, invalid_email, invalid_password,
short_password, dup_username, user_login)
class AccountTests(BaseTestCase):
"""handles user registration tests"""
def test_new_user_registration(self):
"""check if new user can be registered"""
response = self.register_user(new_user)
self.assertEqual(response.status_code, status.HTTP_201_CREATED)
self.assertIn("token", response.data)
def test_user_login(self):
"""new user can be logged in\
and token returned on successful login"""
self.verify_user(new_user)
response = self.login_user(user_login)
#raise Exception(response.data)
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertIn("token", response.data)
def test_wrong_token_header_prefix(self):
"""invalid prefix header provided"""
self.client.credentials(HTTP_AUTHORIZATION='hgfds ' + 'poiuytfd')
response = self.client.get("/api/user/", format="json")
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_for_invalid_token(self):
"""validates token"""
self.client.credentials(HTTP_AUTHORIZATION='Token ' + 'yyuug')
response = self.client.get("/api/user/", format="json")
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_no_token_in_header(self):
"""no token in header"""
self.add_credentials(response='')
response = self.client.get("/api/user/", format="json")
self.assertEqual(response.status_code, status.HTTP_403_FORBIDDEN)
def test_create_super_user(self):
"""checks for registration of a super user in the User model"""
user = User.objects.create_superuser(
username='ayebare',
password='sampletestcase')
self.assertIn(str(user), str(user.username))
def test_create_non_user(self):
"""check for registration of a client user in the User model"""
user = User.objects.create_user(
email='[email protected]',
username='ayebare',
password='sampletestcase')
self.assertIn(str(user), str(user.email))
def test_get_user_details(self):
"""get user details"""
self.user_access()
response = self.client.get('/api/user/', format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_update_user_details(self):
"""assert update route for user details is accessed"""
self.user_access()
response = self.client.put('/api/user/', format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_invalid_email_message(self):
"""test invalid email provided."""
response = self.register_user(invalid_email)
self.assertIn(response.data["errors"]["email"][0],
'Please enter a valid email in the format [email protected]')
def test_invalid_password(self):
"""asserts invalid password provided."""
response = self.register_user(invalid_password)
self.assertIn(response.data["errors"]["password"][0],
'Password should be alphanuemric (a-z,A_Z,0-9).')
def test_short_password(self):
"""test short password provided."""
response = self.register_user(short_password)
self.assertIn(response.data["errors"]["password"][0],
'Password should not be less than 8 characters.')
def test_duplicate_username(self):
"user with same username provided exists"""
self.register_user(new_user)
response = self.register_user(dup_username)
self.assertIn(response.data["errors"]["username"][0],
'user with this username already exists.')
| [
"[email protected]"
] | |
d31cbc5e81c667f85f43dbf60c55f2703673fc8c | 5e66a11717a4760646c0e02bf9ffff2f82f66d18 | /chemistry/qchem_make_opt_input_from_opt.py | ca09c35ad793d6cf3c29ac90a3ae9a121f288104 | [] | no_license | berquist/personal_scripts | 4517678fa57e524e9765dc71f05594e34bdd9c72 | d6c40ba6e5a607d26ffabf809cfdfdf3ce29bfb3 | refs/heads/master | 2023-07-21T08:44:36.401893 | 2023-07-07T19:55:55 | 2023-07-07T19:55:55 | 37,238,106 | 7 | 4 | null | null | null | null | UTF-8 | Python | false | false | 7,017 | py | #!/usr/bin/env python
"""qchem_make_opt_input_from_opt.py: Make an input file for a Q-Chem
geometry optimization based on the last possible geometry from a
Q-Chem geometry optimization; this effectively 'restarts' the geometry
with a new filename.
The script assumes the output file being read from is called
'*opt(\d*).out', where 'opt' might be followed by a number. The script
will write an input file called '*opt(\d*)+1.in', with the previous
number incremented by one.
"""
import os.path
import re
from collections import OrderedDict
import cclib
from cclib.parser.utils import PeriodicTable
def make_file_iterator(filename):
"""Return an iterator over the contents of the given file name."""
# pylint: disable=C0103
with open(filename) as f:
contents = f.read()
return iter(contents.splitlines())
def getargs():
"""Get command-line arguments."""
import argparse
# pylint: disable=C0103
parser = argparse.ArgumentParser()
parser.add_argument("outputfilename", nargs="+")
parser.add_argument("--fragment", action="store_true")
args = parser.parse_args()
return args
def parse_user_input(outputfilename):
"""Parse the $rem section in the repeated 'User input:' section of the
output.
The reason we do it this way rather than with shell tools is to
handle any $section more easily and in a case-insensitive manner.
"""
user_input = dict()
outputfile = make_file_iterator(outputfilename)
line = ""
while "User input:" not in line:
line = next(outputfile)
line = next(outputfile)
assert "----" in line
line = next(outputfile)
while "--------------------------------------------------------------" not in line:
if line.strip() == "":
pass
elif line[0] == "$" and line.strip().lower() != "$end":
section_header = line[1:].lower()
user_input[section_header] = []
elif line.strip().lower() == "$end":
user_input[section_header] = "\n".join(user_input[section_header])
else:
user_input[section_header].append(line)
line = next(outputfile)
return user_input
def parse_fragments_from_molecule(molecule):
"""Given a $molecule section (without the $ lines), identify the
charges and multiplicities of each fragment and the zero-based indices
for the starting atom of each fragment.
"""
charges = []
multiplicities = []
start_indices = []
it = iter(molecule.splitlines())
line = next(it)
# sys_charge, sys_multiplicity = line.split()
counter = 0
# Gather the charges, spin multiplicities, and starting positions
# of each fragment.
for line in it:
if "--" in line:
line = next(it)
charge, multiplicity = line.split()
charges.append(charge)
multiplicities.append(multiplicity)
start_indices.append(counter)
else:
counter += 1
assert len(charges) == len(multiplicities) == len(start_indices)
return charges, multiplicities, start_indices
def form_molecule_section_from_fragments(
elements, geometry, charges, multiplicities, start_indices
):
"""Form the Q-Chem $molecule section containing the charge,
multiplicity, and atomic symbols and coordinates for multiple
fragments.
Returns a list that will need to be joined with newlines.
"""
assert len(charges) == len(multiplicities) == (len(start_indices) + 1)
s = "{:3s} {:15.10f} {:15.10f} {:15.10f}"
# The first elements of the charge and multiplicity lists are for
# the supersystem (whole molecule).
molecule_section = ["{} {}".format(charges[0], multiplicities[0])]
from itertools import count
for (charge, multiplicity, idx_iter) in zip(charges[1:], multiplicities[1:], count(0)):
molecule_section.append("--")
molecule_section.append("{} {}".format(charge, multiplicity))
idx_start = start_indices[idx_iter]
try:
idx_end = start_indices[idx_iter + 1]
except IndexError:
idx_end = len(elements)
for element, coords in zip(elements[idx_start:idx_end], geometry[idx_start:idx_end]):
molecule_section.append(s.format(element, *coords))
return molecule_section
def form_molecule_section(elements, geometry, charge, multiplicity):
"""Form the Q-Chem $molecule section containing the charge,
multiplicity, and atomic symbols and coordinates.
Returns a list that will need to be joined with newlines.
"""
s = "{:3s} {:15.10f} {:15.10f} {:15.10f}"
molecule_section = ["{} {}".format(charge, multiplicity)]
for (
element,
coords,
) in zip(elements, geometry):
molecule_section.append(s.format(element, *coords))
return molecule_section
if __name__ == "__main__":
args = getargs()
pt = PeriodicTable()
for outputfilename in args.outputfilename:
job = cclib.io.ccopen(outputfilename)
assert isinstance(job, cclib.parser.qchemparser.QChem)
try:
data = job.parse()
# this is to deal with the Q-Chem parser not handling
# incomplete SCF cycles properly
except StopIteration:
print("no output made: StopIteration in {}".format(outputfilename))
continue
# Determine the name of the file we're writing.
assert outputfilename.endswith(".out")
numstr = re.search(r"opt(\d*)", outputfilename).groups()[0]
if numstr == "":
optnum = 2
else:
optnum = int(numstr) + 1
inputfilename = re.sub(r"opt\d*", "opt{}".format(optnum), outputfilename)
inputfilename = inputfilename.replace(".out", ".in")
inputfilename = os.path.basename(inputfilename)
user_input = parse_user_input(outputfilename)
# Form the atomic symbols and coordinates for each atom in
# $molecule.
element_list = [pt.element[Z] for Z in data.atomnos]
last_geometry = data.atomcoords[-1]
if args.fragment:
charges, multiplicities, start_indices = parse_fragments_from_molecule(
user_input["molecule"]
)
charges.insert(0, data.charge)
multiplicities.insert(0, data.mult)
molecule_section = form_molecule_section_from_fragments(
element_list, last_geometry, charges, multiplicities, start_indices
)
else:
molecule_section = form_molecule_section(
element_list, last_geometry, data.charge, data.mult
)
user_input["molecule"] = "\n".join(molecule_section)
with open(inputfilename, "w") as fh:
for section_header in user_input:
fh.write("${}\n".format(section_header))
fh.write(user_input[section_header])
fh.write("\n$end\n\n")
print(inputfilename)
| [
"[email protected]"
] | |
73a435e8064d91919dec34b8cd6bebc8580cccd6 | 47b4d76e9c87e6c45bab38e348ae12a60a60f94c | /Mutation_Modules/ASP_ABU.py | daf6d4fc6047cc403fb95ef273d03a28cd399101 | [] | no_license | PietroAronica/Parasol.py | 9bc17fd8e177e432bbc5ce4e7ee2d721341b2707 | 238abcdc2caee7bbfea6cfcdda1ca705766db204 | refs/heads/master | 2021-01-10T23:57:40.225140 | 2020-10-14T02:21:15 | 2020-10-14T02:21:15 | 70,791,648 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 18,941 | py | # ASP to ABU Mutation
import Frcmod_creator
import PDBHandler
import Leapy
from parmed.tools.actions import *
from parmed.amber.readparm import *
def parmed_command(vxi='VXI', lipid='No'):
bc = {}
with open('Param_files/AminoAcid/ASP.param', 'r') as b:
data = b.readlines()[1:]
for line in data:
key, value = line.split()
bc[key] = float(value)
b.close()
fc = {}
with open('Param_files/AminoAcid/ABU.param', 'r') as b:
data = b.readlines()[1:]
for line in data:
key, value = line.split()
fc[key] = float(value)
b.close()
for i in range(11):
a = i*10
i = float(i)
parm = AmberParm('Solv_{}_{}.prmtop'.format(a, 100-a))
changeLJPair(parm, ':{}@HB2'.format(vxi), ':{}@OD1'.format(vxi), '0', '0').execute()
changeLJPair(parm, ':{}@HB'.format(vxi), ':{}@HG1'.format(vxi), '0', '0').execute()
change(parm, 'charge', ':{}@N'.format(vxi), bc['N']+((fc['N']-bc['N'])/10)*i).execute()
change(parm, 'charge', ':{}@H'.format(vxi), bc['H']+((fc['H']-bc['H'])/10)*i).execute()
change(parm, 'charge', ':{}@CA'.format(vxi), bc['CA']+((fc['CA']-bc['CA'])/10)*i).execute()
change(parm, 'charge', ':{}@HA'.format(vxi), bc['HA']+((fc['HA']-bc['HA'])/10)*i).execute()
change(parm, 'charge', ':{}@CB'.format(vxi), bc['CB']+((fc['CB']-bc['CB'])/10)*i).execute()
change(parm, 'charge', ':{}@HB'.format(vxi), bc['HB2']-(bc['HB2']/10)*i).execute()
change(parm, 'charge', ':{}@HB2'.format(vxi), fc['HB2']/10*i).execute()
change(parm, 'charge', ':{}@HB3'.format(vxi), bc['HB3']+((fc['HB3']-bc['HB3'])/10)*i).execute()
change(parm, 'charge', ':{}@CG'.format(vxi), fc['CG']/10*i).execute()
change(parm, 'charge', ':{}@HG1'.format(vxi), fc['HG1']/10*i).execute()
change(parm, 'charge', ':{}@HG2'.format(vxi), fc['HG2']/10*i).execute()
change(parm, 'charge', ':{}@HG3'.format(vxi), fc['HG3']/10*i).execute()
change(parm, 'charge', ':{}@CG1'.format(vxi), (bc['CG']-(bc['CG']/10)*i)*(10-i)/10).execute()
change(parm, 'charge', ':{}@OD1'.format(vxi), (bc['OD1']-(bc['OD1']/10)*i)*(10-i)/10).execute()
change(parm, 'charge', ':{}@OD2'.format(vxi), (bc['OD2']-(bc['OD2']/10)*i)*(10-i)/10).execute()
change(parm, 'charge', ':{}@C'.format(vxi), bc['C']+((fc['C']-bc['C'])/10)*i).execute()
change(parm, 'charge', ':{}@O'.format(vxi), bc['O']+((fc['O']-bc['O'])/10)*i).execute()
#print printDetails(parm, ':VXI')
d = netCharge(parm).execute()
change(parm, 'charge', ':PC', '{:.3f}'.format(-d)).execute()
setOverwrite(parm).execute()
parmout(parm, 'Solv_{}_{}.prmtop'.format(a, 100-a)).execute()
def makevxi(struct, out, aa, vxi='VXI'):
struct.residue_dict[aa].set_resname(vxi)
CB = struct.residue_dict[aa].atom_dict['CB']
HB2 = struct.residue_dict[aa].atom_dict['HB2']
CG = struct.residue_dict[aa].atom_dict['CG']
pdb = open(out, 'w')
try:
pdb.write(struct.other_dict['Cryst1'].formatted())
except KeyError:
pass
for res in struct.residue_list:
for atom in res.atom_list:
if atom.get_name() == 'HB2' and res.get_resname() == vxi:
pdb.write(atom.change_name('HB'))
pdb.write(atom.superimposed1('HB2', CG))
elif atom.get_name() == 'HB3' and res.get_resname() == vxi:
pdb.write(atom.formatted())
pdb.write(atom.halfway_between('CG', CB, HB2))
pdb.write(atom.superimposed1('HG1', HB2))
pdb.write(atom.superimposed2('HG2', HB2))
pdb.write(atom.superimposed3('HG3', HB2))
elif atom.get_name() == 'CG' and res.get_resname() == vxi:
pdb.write(atom.change_name('CG1'))
else:
pdb.write(atom.formatted())
try:
pdb.write(struct.other_dict[atom.get_number()].ter())
except:
pass
for oth in struct.other_dict:
try:
if oth.startswith('Conect'):
pdb.write(struct.other_dict[oth].formatted())
except:
pass
pdb.write('END\n')
def variablemake(sym='^'):
var1 = sym + '1'
var2 = sym + '2'
var3 = sym + '3'
var4 = sym + '4'
var5 = sym + '5'
var6 = sym + '6'
var7 = sym + '7'
var8 = sym + '8'
var9 = sym + '9'
var10 = sym + '0'
var11 = sym + 'a'
var12 = sym + 'b'
var13 = sym + 'c'
var14 = sym + 'd'
var15 = sym + 'e'
return var1, var2, var3, var4, var5, var6, var7, var8, var9, var10, var11, var12, var13, var14, var15
def lib_make(ff, outputfile, vxi='VXI', var=variablemake()):
metcar = var[0]
methyd = var[1]
hydhyd1 = var[2]
carcar = var[3]
caroxy = var[4]
hydhyd2 = var[5]
ctrl = open('lyp.in', 'w')
ctrl.write("source %s\n"%ff)
ctrl.write("%s=loadpdb Param_files/LibPDB/ASP-ABU.pdb\n"%vxi)
ctrl.write('set %s.1.1 element "N"\n'%vxi)
ctrl.write('set %s.1.2 element "H"\n'%vxi)
ctrl.write('set %s.1.3 element "C"\n'%vxi)
ctrl.write('set %s.1.4 element "H"\n'%vxi)
ctrl.write('set %s.1.5 element "C"\n'%vxi)
ctrl.write('set %s.1.6 element "H"\n'%vxi)
ctrl.write('set %s.1.7 element "H"\n'%vxi)
ctrl.write('set %s.1.8 element "H"\n'%vxi)
ctrl.write('set %s.1.9 element "C"\n'%vxi)
ctrl.write('set %s.1.10 element "H"\n'%vxi)
ctrl.write('set %s.1.11 element "H"\n'%vxi)
ctrl.write('set %s.1.12 element "H"\n'%vxi)
ctrl.write('set %s.1.13 element "C"\n'%vxi)
ctrl.write('set %s.1.14 element "O"\n'%vxi)
ctrl.write('set %s.1.15 element "O"\n'%vxi)
ctrl.write('set %s.1.16 element "C"\n'%vxi)
ctrl.write('set %s.1.17 element "O"\n'%vxi)
ctrl.write('set %s.1.1 name "N"\n'%vxi)
ctrl.write('set %s.1.2 name "H"\n'%vxi)
ctrl.write('set %s.1.3 name "CA"\n'%vxi)
ctrl.write('set %s.1.4 name "HA"\n'%vxi)
ctrl.write('set %s.1.5 name "CB"\n'%vxi)
ctrl.write('set %s.1.6 name "HB"\n'%vxi)
ctrl.write('set %s.1.7 name "HB2"\n'%vxi)
ctrl.write('set %s.1.8 name "HB3"\n'%vxi)
ctrl.write('set %s.1.9 name "CG"\n'%vxi)
ctrl.write('set %s.1.10 name "HG1"\n'%vxi)
ctrl.write('set %s.1.11 name "HG2"\n'%vxi)
ctrl.write('set %s.1.12 name "HG3"\n'%vxi)
ctrl.write('set %s.1.13 name "CG1"\n'%vxi)
ctrl.write('set %s.1.14 name "OD1"\n'%vxi)
ctrl.write('set %s.1.15 name "OD2"\n'%vxi)
ctrl.write('set %s.1.16 name "C"\n'%vxi)
ctrl.write('set %s.1.17 name "O"\n'%vxi)
ctrl.write('set %s.1.1 type "N"\n'%vxi)
ctrl.write('set %s.1.2 type "H"\n'%vxi)
ctrl.write('set %s.1.3 type "CT"\n'%vxi)
ctrl.write('set %s.1.4 type "H1"\n'%vxi)
ctrl.write('set %s.1.5 type "CT"\n'%vxi)
ctrl.write('set %s.1.6 type "%s"\n'%(vxi, hydhyd1))
ctrl.write('set %s.1.7 type "%s"\n'%(vxi, hydhyd2))
ctrl.write('set %s.1.8 type "HC"\n'%vxi)
ctrl.write('set %s.1.9 type "%s"\n'%(vxi, metcar))
ctrl.write('set %s.1.10 type "%s"\n'%(vxi, methyd))
ctrl.write('set %s.1.11 type "%s"\n'%(vxi, methyd))
ctrl.write('set %s.1.12 type "%s"\n'%(vxi, methyd))
ctrl.write('set %s.1.13 type "%s"\n'%(vxi, carcar))
ctrl.write('set %s.1.14 type "%s"\n'%(vxi, caroxy))
ctrl.write('set %s.1.15 type "%s"\n'%(vxi, caroxy))
ctrl.write('set %s.1.16 type "C"\n'%vxi)
ctrl.write('set %s.1.17 type "O"\n'%vxi)
ctrl.write('bond %s.1.1 %s.1.2\n'%(vxi, vxi))
ctrl.write('bond %s.1.1 %s.1.3\n'%(vxi, vxi))
ctrl.write('bond %s.1.3 %s.1.4\n'%(vxi, vxi))
ctrl.write('bond %s.1.3 %s.1.5\n'%(vxi, vxi))
ctrl.write('bond %s.1.3 %s.1.16\n'%(vxi, vxi))
ctrl.write('bond %s.1.5 %s.1.6\n'%(vxi, vxi))
ctrl.write('bond %s.1.5 %s.1.7\n'%(vxi, vxi))
ctrl.write('bond %s.1.5 %s.1.8\n'%(vxi, vxi))
ctrl.write('bond %s.1.5 %s.1.9\n'%(vxi, vxi))
ctrl.write('bond %s.1.5 %s.1.13\n'%(vxi, vxi))
ctrl.write('bond %s.1.9 %s.1.10\n'%(vxi, vxi))
ctrl.write('bond %s.1.9 %s.1.11\n'%(vxi, vxi))
ctrl.write('bond %s.1.9 %s.1.12\n'%(vxi, vxi))
ctrl.write('bond %s.1.13 %s.1.14\n'%(vxi, vxi))
ctrl.write('bond %s.1.13 %s.1.15\n'%(vxi, vxi))
ctrl.write('bond %s.1.16 %s.1.17\n'%(vxi, vxi))
ctrl.write('set %s.1 connect0 %s.1.N\n'%(vxi, vxi))
ctrl.write('set %s.1 connect1 %s.1.C\n'%(vxi, vxi))
ctrl.write('set %s name "%s"\n'%(vxi, vxi))
ctrl.write('set %s.1 name "%s"\n'%(vxi, vxi))
ctrl.write('set %s head %s.1.N\n'%(vxi, vxi))
ctrl.write('set %s tail %s.1.C\n'%(vxi, vxi))
ctrl.write('saveoff %s %s.lib\n'%(vxi, vxi))
ctrl.write("quit\n")
ctrl.close()
Leapy.run('lyp.in', outputfile)
def all_make():
for i in range(0,110,10):
Frcmod_creator.make ('{}_{}.frcmod'.format(i, 100-i))
def cal(x, y, i):
num = x+((y-x)/10)*i
return num
def lac(y, x, i):
num = x+((y-x)/10)*i
return num
def stock_add_to_all(var=variablemake()):
metcar = var[0]
methyd = var[1]
hydhyd1 = var[2]
carcar = var[3]
caroxy = var[4]
hydhyd2 = var[5]
Frcmod_creator.make_hyb()
Frcmod_creator.TYPE_insert(carcar, 'C', 'sp2')
Frcmod_creator.TYPE_insert(caroxy, 'O', 'sp2')
Frcmod_creator.TYPE_insert(hydhyd2, 'H', 'sp3')
Frcmod_creator.TYPE_insert(metcar, 'C', 'sp3')
Frcmod_creator.TYPE_insert(methyd, 'H', 'sp3')
Frcmod_creator.TYPE_insert(hydhyd1, 'H', 'sp3')
p = {}
with open('Param_files/Stock/Stock.param', 'r') as b:
data = b.readlines()[1:]
for line in data:
p[line.split()[0]] = []
for point in line.split()[1:]:
p[line.split()[0]].append(float(point))
b.close()
for i in range(11):
a = i*10
Frcmod_creator.MASS_insert('{}_{}.frcmod'.format(a, 100-a), carcar, cal(p['C'][0], p['0_C'][0], i), cal(p['C'][1], p['0_C'][1], i))
Frcmod_creator.MASS_insert('{}_{}.frcmod'.format(a, 100-a), caroxy, cal(p['O2'][0], p['0_O'][0], i), cal(p['O2'][1], p['0_O'][1], i))
Frcmod_creator.MASS_insert('{}_{}.frcmod'.format(a, 100-a), hydhyd2, cal(p['0_H'][0], p['HC'][0], i), cal(p['0_H'][1], p['HC'][1], i))
Frcmod_creator.BOND_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}'.format('CT', carcar), cal(p['CT_C'][0], p['CT_mH'][0], i), cal(p['CT_C'][1], p['CT_mH'][1], i))
Frcmod_creator.BOND_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}'.format('CT', hydhyd2), cal(p['HC_sC2'][0], p['CT_HC'][0], i), cal(p['HC_sC2'][1], p['CT_HC'][1], i))
Frcmod_creator.BOND_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}'.format(carcar, caroxy), cal(p['C_O2'][0], p['O2_mH'][0], i), cal(p['C_O2'][1], p['O2_mH'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('CT', carcar, caroxy), cal(p['C_C_O2'][0], p['Dritt'][0], i), cal(p['C_C_O2'][1], p['Dritt'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format(caroxy, carcar, caroxy), cal(p['O2_C_O2'][0], p['Close'][0], i), cal(p['O2_C_O2'][1], p['Close'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('CT', 'CT', carcar), cal(p['CT_CT_C'][0], p['C_C_H'][0], i), cal(p['CT_CT_C'][1], p['C_C_H'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format(hydhyd2, 'CT', carcar), cal(p['Close'][0], p['Close'][0], i), cal(p['Close'][1], p['Close'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('CT', 'CT', hydhyd2), cal(p['C_C_H'][0], p['C_C_H'][0], i), cal(p['C_C_H'][1], p['C_C_H'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('HC', 'CT', carcar), lac(p['C_C_H'][0], p['C_C_H'][0], i), lac(p['C_C_H'][1], p['C_C_H'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('HC', 'CT', hydhyd2), lac(p['H_C_H'][0], p['H_C_H'][0], i), lac(p['H_C_H'][1], p['H_C_H'][1], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format('CT', 'CT', carcar, caroxy), cal(p['Ring_Dihe_2'][0], p['Ring_Dihe_2'][0], i), cal(p['Ring_Dihe_2'][1], p['Ring_Dihe_2'][1], i), cal(p['Ring_Dihe_2'][2], p['Ring_Dihe_2'][2], i), cal(p['Ring_Dihe_2'][3], p['Ring_Dihe_2'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format('HC', 'CT', carcar, caroxy), lac(p['Ring_Dihe_2'][0], p['Ring_Dihe_2'][0], i), lac(p['Ring_Dihe_2'][1], p['Ring_Dihe_2'][1], i), lac(p['Ring_Dihe_2'][2], p['Ring_Dihe_2'][2], i), lac(p['Ring_Dihe_2'][3], p['Ring_Dihe_2'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format(hydhyd2, 'CT', carcar, caroxy), cal(p['0_Dihe'][0], p['0_Dihe'][0], i), cal(p['0_Dihe'][1], p['0_Dihe'][1], i), cal(p['0_Dihe'][2], p['0_Dihe'][2], i), cal(p['0_Dihe'][3], p['0_Dihe'][3], i))
Frcmod_creator.IMPROPER_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format('X ', caroxy, carcar, caroxy), cal(p['Car_imp'][0], p['Imp_0'][0], i), cal(p['Car_imp'][1], p['Imp_0'][1], i), cal(p['Car_imp'][2], p['Imp_0'][2], i))
Frcmod_creator.NONBON_insert('{}_{}.frcmod'.format(a, 100-a), carcar, cal(p['C'][2], p['0_C'][2], i), cal(p['C'][3], p['0_C'][3], i))
Frcmod_creator.NONBON_insert('{}_{}.frcmod'.format(a, 100-a), caroxy, cal(p['O2'][2], p['0_O'][2], i), cal(p['O2'][3], p['0_O'][3], i))
Frcmod_creator.NONBON_insert('{}_{}.frcmod'.format(a, 100-a), hydhyd2, cal(p['0_H'][2], p['HC'][2], i), cal(p['0_H'][3], p['HC'][3], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format(hydhyd1, 'CT', carcar), lac(p['C_C_H'][0], p['C_C_H'][0], i), lac(p['C_C_H'][1], p['C_C_H'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format(metcar, 'CT', carcar), lac(p['C_C_H'][0], p['C_C_H'][0], i), lac(p['C_C_H'][1], p['C_C_H'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format(hydhyd1, 'CT', hydhyd2), lac(p['H_C_H'][0], p['H_C_H'][0], i), lac(p['H_C_H'][1], p['H_C_H'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format(hydhyd2, 'CT', metcar), lac(p['C_C_H'][0], p['C_C_H'][0], i), lac(p['C_C_H'][1], p['C_C_H'][1], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format(hydhyd1, 'CT', carcar, caroxy), lac(p['Ring_Dihe_2'][0], p['Ring_Dihe_2'][0], i), lac(p['Ring_Dihe_2'][1], p['Ring_Dihe_2'][1], i), lac(p['Ring_Dihe_2'][2], p['Ring_Dihe_2'][2], i), lac(p['Ring_Dihe_2'][3], p['Ring_Dihe_2'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format(metcar, 'CT', carcar, caroxy), lac(p['0_Dihe'][0], p['0_Dihe'][0], i), lac(p['0_Dihe'][1], p['0_Dihe'][1], i), lac(p['0_Dihe'][2], p['0_Dihe'][2], i), lac(p['0_Dihe'][3], p['0_Dihe'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format(methyd, metcar, 'CT', carcar), lac(p['0_Dihe'][0], p['0_Dihe'][0], i), lac(p['0_Dihe'][1], p['0_Dihe'][1], i), lac(p['0_Dihe'][2], p['0_Dihe'][2], i), lac(p['0_Dihe'][3], p['0_Dihe'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format(hydhyd2, 'CT', metcar, methyd), lac(p['H_C_C_H'][0], p['0_1'][0], i), lac(p['H_C_C_H'][1], p['0_1'][1], i), lac(p['H_C_C_H'][2], p['0_1'][2], i), lac(p['H_C_C_H'][3], p['0_1'][3], i))
Frcmod_creator.MASS_insert('{}_{}.frcmod'.format(a, 100-a), metcar, lac(p['CT'][0], p['0_C'][0], i), lac(p['CT'][1], p['0_C'][1], i))
Frcmod_creator.MASS_insert('{}_{}.frcmod'.format(a, 100-a), methyd, lac(p['HC'][0], p['0_H'][0], i), lac(p['HC'][1], p['0_H'][1], i))
Frcmod_creator.MASS_insert('{}_{}.frcmod'.format(a, 100-a), hydhyd1, lac(p['0_H'][0], p['HC'][0], i), lac(p['0_H'][1], p['HC'][1], i))
Frcmod_creator.BOND_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}'.format('CT', metcar), lac(p['CT_CT'][0], p['CT_mH'][0], i), lac(p['CT_CT'][1], p['CT_mH'][1], i))
Frcmod_creator.BOND_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}'.format('CT', hydhyd1), lac(p['HC_sC'][0], p['CT_HC'][0], i), lac(p['HC_sC'][1], p['CT_HC'][1], i))
Frcmod_creator.BOND_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}'.format(metcar, methyd), lac(p['CT_HC'][0], p['HC_mH'][0], i), lac(p['CT_HC'][1], p['HC_mH'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format(hydhyd1, 'CT', metcar), lac(p['Close'][0], p['Close'][0], i), lac(p['Close'][1], p['Close'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('CT', metcar, methyd), lac(p['C_C_H'][0], p['Dritt'][0], i), lac(p['C_C_H'][1], p['Dritt'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format(methyd, metcar, methyd), lac(p['H_C_H'][0], p['Close'][0], i), lac(p['H_C_H'][1], p['Close'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('CT', 'CT', metcar), lac(p['C_C_C'][0], p['C_C_C'][0], i), lac(p['C_C_C'][1], p['C_C_C'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('HC', 'CT', metcar), lac(p['C_C_H'][0], p['C_C_H'][0], i), lac(p['C_C_H'][1], p['C_C_H'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('CT', 'CT', hydhyd1), lac(p['C_C_H'][0], p['C_C_H'][0], i), lac(p['C_C_H'][1], p['C_C_H'][1], i))
Frcmod_creator.ANGLE_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}'.format('HC', 'CT', hydhyd1), lac(p['H_C_H'][0], p['H_C_H'][0], i), lac(p['H_C_H'][1], p['H_C_H'][1], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format('CT', 'CT', metcar, methyd), lac(p['C_C_C_H'][0], p['0_1'][0], i), lac(p['C_C_C_H'][1], p['0_1'][1], i), lac(p['C_C_C_H'][2], p['0_1'][2], i), lac(p['C_C_C_H'][3], p['0_1'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format('HC', 'CT', metcar, methyd), lac(p['H_C_C_H'][0], p['0_1'][0], i), lac(p['H_C_C_H'][1], p['0_1'][1], i), lac(p['H_C_C_H'][2], p['0_1'][2], i), lac(p['H_C_C_H'][3], p['0_1'][3], i))
Frcmod_creator.DIHEDRAL_insert('{}_{}.frcmod'.format(a, 100-a), '{}-{}-{}-{}'.format(hydhyd1, 'CT', metcar, methyd), lac(p['0_Dihe'][0], p['0_Dihe'][0], i), lac(p['0_Dihe'][1], p['0_Dihe'][1], i), lac(p['0_Dihe'][2], p['0_Dihe'][2], i), lac(p['0_Dihe'][3], p['0_Dihe'][3], i))
Frcmod_creator.NONBON_insert('{}_{}.frcmod'.format(a, 100-a), metcar, lac(p['CT'][2], p['0_C'][2], i), lac(p['CT'][3], p['0_C'][3], i))
Frcmod_creator.NONBON_insert('{}_{}.frcmod'.format(a, 100-a), methyd, lac(p['HC'][2], p['0_H'][2], i), lac(p['HC'][3], p['0_H'][3], i))
Frcmod_creator.NONBON_insert('{}_{}.frcmod'.format(a, 100-a), hydhyd1, lac(p['0_H'][2], p['HC'][2], i), lac(p['0_H'][3], p['HC'][3], i))
| [
"[email protected]"
] | |
baf6d43bb76cf966f9aafce6ee12d8dd8e818f72 | a74cabbe1b11fc8ef575ea86f2543cd95db78ec9 | /python_program/q783_Minimum_Distance_Between_BST_Nodes.py | 4e4fe499e62126c9b084fde6bd89e951b18accbf | [] | no_license | tszandy/leetcode | 87e3ccf291b2879637d2d8238935a455b401a78a | f1f4361541dcffbb291285663c8820d7ffb37d2f | refs/heads/master | 2023-04-06T15:34:04.847875 | 2023-03-26T12:22:42 | 2023-03-26T12:22:42 | 204,069,234 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,628 | py | from typing import List
from collections import Counter,defaultdict
from math import *
from functools import reduce,lru_cache,total_ordering
import numpy as np
from heapq import *
from bisect import bisect_left,bisect_right
from itertools import count
import queue
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, val=0, left=None, right=None):
# self.val = val
# self.left = left
# self.right = right
class Solution:
def minDiffInBST(self, root: Optional[TreeNode]) -> int:
return self.min_difference(root)
def min_difference(self,node):
if node == None:
return float("inf")
node_left_min = float("inf")
if node.left!=None:
node_left_min = node.val-self.max_left(node.left)
node_right_min = float("inf")
if node.right!=None:
node_right_min = self.max_right(node.right)-node.val
left_min = self.min_difference(node.left)
right_min = self.min_difference(node.right)
return min(node_left_min,node_right_min,left_min,right_min)
def max_left(self,node):
if node.right == None:
return node.val
else:
return self.max_left(node.right)
def max_right(self,node):
if node.left == None:
return node.val
else:
return self.max_right(node.left)
sol = Solution()
# input
[4,2,6,1,3]
[1,0,48,null,null,12,49]
[1,0]
[2,0,5]
[2,0,6]
[5,0,13]
# output
output = sol.minDiffInBST(root)
# answer
answer = ""
print(output, answer, answer == output)
| [
"[email protected]"
] | |
2c4815d72b5155adfdf7058fe4a14ff7f245285f | 6497bc5638453877744c900f7accef0203f36e89 | /leedcode1_twosum.py | e4bfcfdfe9201a15782286e8a9d575f229c34ec0 | [] | no_license | budaLi/leetcode-python- | 82e9affb3317f63a82d89d7e82650de3c804a5ac | 4221172b46d286ab6bf4c74f4d015ee9ef3bda8d | refs/heads/master | 2022-01-30T00:55:26.209864 | 2022-01-05T01:01:47 | 2022-01-05T01:01:47 | 148,323,318 | 46 | 23 | null | null | null | null | UTF-8 | Python | false | false | 868 | py | #-*-coding:utf8-*-
#author : Lenovo
#date: 2018/7/23
class Solution(object):
def twoSum(self, nums, target):
"""
:type nums: List[int]
:type target: int
:rtype: List[int]
"""
d = {}# d is a dictionary to map the value of nums and the index in nums
size = 0
for size in range(len(nums)):
if not nums[size] in d:
d[nums[size]] = size #if nums[size] doesn't exist in d ,create it
if target - nums[size] in d: #if nums[size] and target - nums[size] are both in d
# if d[target-nums[size]] < size + 1: # one situation should be minded nums[size] == target - nums[size]
ans = [d[target - nums[size]] , size ]# for example [0,1,2] 0 and [0,1,2,0],0
return ans
ex=Solution()
e=ex.twoSum([1,2,5,7,8],16)
print(e) | [
"[email protected]"
] | |
a736d5a5660159fb0615d48680b0d70ffdac597c | a2080cbcf9694ad03690769cfc64d85a57f1d9d5 | /src/graphql/language/printer.py | 842f251878846b17bd2c7f9e94bba434648fd747 | [
"MIT"
] | permissive | wuyuanyi135/graphql-core | 84196a47aec0f9508db3f8aadb8951b9fc9b9fe0 | 169ae7bced0f515603e97f1def925f3d062e5009 | refs/heads/main | 2023-04-13T11:38:10.815573 | 2021-05-02T05:17:29 | 2021-05-02T05:21:58 | 363,327,364 | 1 | 0 | MIT | 2021-05-01T05:05:29 | 2021-05-01T05:05:28 | null | UTF-8 | Python | false | false | 13,157 | py | from functools import wraps
from json import dumps
from typing import Any, Callable, Collection, Optional
from ..language.ast import Node, OperationType
from .visitor import visit, Visitor
from .block_string import print_block_string
__all__ = ["print_ast"]
Strings = Collection[str]
class PrintedNode:
"""A union type for all nodes that have been processed by the printer."""
alias: str
arguments: Strings
block: bool
default_value: str
definitions: Strings
description: str
directives: str
fields: Strings
interfaces: Strings
locations: Strings
name: str
operation: OperationType
operation_types: Strings
repeatable: bool
selection_set: str
selections: Strings
type: str
type_condition: str
types: Strings
value: str
values: Strings
variable: str
variable_definitions: Strings
def print_ast(ast: Node) -> str:
"""Convert an AST into a string.
The conversion is done using a set of reasonable formatting rules.
"""
return visit(ast, PrintAstVisitor())
def add_description(method: Callable[..., str]) -> Callable:
"""Decorator adding the description to the output of a static visitor method."""
@wraps(method)
def wrapped(node: PrintedNode, *args: Any) -> str:
return join((node.description, method(node, *args)), "\n")
return wrapped
class PrintAstVisitor(Visitor):
@staticmethod
def leave_name(node: PrintedNode, *_args: Any) -> str:
return node.value
@staticmethod
def leave_variable(node: PrintedNode, *_args: Any) -> str:
return f"${node.name}"
# Document
@staticmethod
def leave_document(node: PrintedNode, *_args: Any) -> str:
return join(node.definitions, "\n\n") + "\n"
@staticmethod
def leave_operation_definition(node: PrintedNode, *_args: Any) -> str:
name, op, selection_set = node.name, node.operation, node.selection_set
var_defs = wrap("(", join(node.variable_definitions, ", "), ")")
directives = join(node.directives, " ")
# Anonymous queries with no directives or variable definitions can use the
# query short form.
return (
join((op.value, join((name, var_defs)), directives, selection_set), " ")
if (name or directives or var_defs or op != OperationType.QUERY)
else selection_set
)
@staticmethod
def leave_variable_definition(node: PrintedNode, *_args: Any) -> str:
return (
f"{node.variable}: {node.type}"
f"{wrap(' = ', node.default_value)}"
f"{wrap(' ', join(node.directives, ' '))}"
)
@staticmethod
def leave_selection_set(node: PrintedNode, *_args: Any) -> str:
return block(node.selections)
@staticmethod
def leave_field(node: PrintedNode, *_args: Any) -> str:
return join(
(
wrap("", node.alias, ": ")
+ node.name
+ wrap("(", join(node.arguments, ", "), ")"),
join(node.directives, " "),
node.selection_set,
),
" ",
)
@staticmethod
def leave_argument(node: PrintedNode, *_args: Any) -> str:
return f"{node.name}: {node.value}"
# Fragments
@staticmethod
def leave_fragment_spread(node: PrintedNode, *_args: Any) -> str:
return f"...{node.name}{wrap(' ', join(node.directives, ' '))}"
@staticmethod
def leave_inline_fragment(node: PrintedNode, *_args: Any) -> str:
return join(
(
"...",
wrap("on ", node.type_condition),
join(node.directives, " "),
node.selection_set,
),
" ",
)
@staticmethod
def leave_fragment_definition(node: PrintedNode, *_args: Any) -> str:
# Note: fragment variable definitions are experimental and may be changed or
# removed in the future.
return (
f"fragment {node.name}"
f"{wrap('(', join(node.variable_definitions, ', '), ')')}"
f" on {node.type_condition}"
f" {wrap('', join(node.directives, ' '), ' ')}"
f"{node.selection_set}"
)
# Value
@staticmethod
def leave_int_value(node: PrintedNode, *_args: Any) -> str:
return node.value
@staticmethod
def leave_float_value(node: PrintedNode, *_args: Any) -> str:
return node.value
@staticmethod
def leave_string_value(node: PrintedNode, key: str, *_args: Any) -> str:
if node.block:
return print_block_string(node.value, "" if key == "description" else " ")
return dumps(node.value)
@staticmethod
def leave_boolean_value(node: PrintedNode, *_args: Any) -> str:
return "true" if node.value else "false"
@staticmethod
def leave_null_value(_node: PrintedNode, *_args: Any) -> str:
return "null"
@staticmethod
def leave_enum_value(node: PrintedNode, *_args: Any) -> str:
return node.value
@staticmethod
def leave_list_value(node: PrintedNode, *_args: Any) -> str:
return f"[{join(node.values, ', ')}]"
@staticmethod
def leave_object_value(node: PrintedNode, *_args: Any) -> str:
return f"{{{join(node.fields, ', ')}}}"
@staticmethod
def leave_object_field(node: PrintedNode, *_args: Any) -> str:
return f"{node.name}: {node.value}"
# Directive
@staticmethod
def leave_directive(node: PrintedNode, *_args: Any) -> str:
return f"@{node.name}{wrap('(', join(node.arguments, ', '), ')')}"
# Type
@staticmethod
def leave_named_type(node: PrintedNode, *_args: Any) -> str:
return node.name
@staticmethod
def leave_list_type(node: PrintedNode, *_args: Any) -> str:
return f"[{node.type}]"
@staticmethod
def leave_non_null_type(node: PrintedNode, *_args: Any) -> str:
return f"{node.type}!"
# Type System Definitions
@staticmethod
@add_description
def leave_schema_definition(node: PrintedNode, *_args: Any) -> str:
return join(
("schema", join(node.directives, " "), block(node.operation_types)), " "
)
@staticmethod
def leave_operation_type_definition(node: PrintedNode, *_args: Any) -> str:
return f"{node.operation.value}: {node.type}"
@staticmethod
@add_description
def leave_scalar_type_definition(node: PrintedNode, *_args: Any) -> str:
return join(("scalar", node.name, join(node.directives, " ")), " ")
@staticmethod
@add_description
def leave_object_type_definition(node: PrintedNode, *_args: Any) -> str:
return join(
(
"type",
node.name,
wrap("implements ", join(node.interfaces, " & ")),
join(node.directives, " "),
block(node.fields),
),
" ",
)
@staticmethod
@add_description
def leave_field_definition(node: PrintedNode, *_args: Any) -> str:
args = node.arguments
args = (
wrap("(\n", indent(join(args, "\n")), "\n)")
if has_multiline_items(args)
else wrap("(", join(args, ", "), ")")
)
directives = wrap(" ", join(node.directives, " "))
return f"{node.name}{args}: {node.type}{directives}"
@staticmethod
@add_description
def leave_input_value_definition(node: PrintedNode, *_args: Any) -> str:
return join(
(
f"{node.name}: {node.type}",
wrap("= ", node.default_value),
join(node.directives, " "),
),
" ",
)
@staticmethod
@add_description
def leave_interface_type_definition(node: PrintedNode, *_args: Any) -> str:
return join(
(
"interface",
node.name,
wrap("implements ", join(node.interfaces, " & ")),
join(node.directives, " "),
block(node.fields),
),
" ",
)
@staticmethod
@add_description
def leave_union_type_definition(node: PrintedNode, *_args: Any) -> str:
return join(
(
"union",
node.name,
join(node.directives, " "),
"= " + join(node.types, " | ") if node.types else "",
),
" ",
)
@staticmethod
@add_description
def leave_enum_type_definition(node: PrintedNode, *_args: Any) -> str:
return join(
("enum", node.name, join(node.directives, " "), block(node.values)), " "
)
@staticmethod
@add_description
def leave_enum_value_definition(node: PrintedNode, *_args: Any) -> str:
return join((node.name, join(node.directives, " ")), " ")
@staticmethod
@add_description
def leave_input_object_type_definition(node: PrintedNode, *_args: Any) -> str:
return join(
("input", node.name, join(node.directives, " "), block(node.fields)), " "
)
@staticmethod
@add_description
def leave_directive_definition(node: PrintedNode, *_args: Any) -> str:
args = node.arguments
args = (
wrap("(\n", indent(join(args, "\n")), "\n)")
if has_multiline_items(args)
else wrap("(", join(args, ", "), ")")
)
repeatable = " repeatable" if node.repeatable else ""
locations = join(node.locations, " | ")
return f"directive @{node.name}{args}{repeatable} on {locations}"
@staticmethod
def leave_schema_extension(node: PrintedNode, *_args: Any) -> str:
return join(
("extend schema", join(node.directives, " "), block(node.operation_types)),
" ",
)
@staticmethod
def leave_scalar_type_extension(node: PrintedNode, *_args: Any) -> str:
return join(("extend scalar", node.name, join(node.directives, " ")), " ")
@staticmethod
def leave_object_type_extension(node: PrintedNode, *_args: Any) -> str:
return join(
(
"extend type",
node.name,
wrap("implements ", join(node.interfaces, " & ")),
join(node.directives, " "),
block(node.fields),
),
" ",
)
@staticmethod
def leave_interface_type_extension(node: PrintedNode, *_args: Any) -> str:
return join(
(
"extend interface",
node.name,
wrap("implements ", join(node.interfaces, " & ")),
join(node.directives, " "),
block(node.fields),
),
" ",
)
@staticmethod
def leave_union_type_extension(node: PrintedNode, *_args: Any) -> str:
return join(
(
"extend union",
node.name,
join(node.directives, " "),
"= " + join(node.types, " | ") if node.types else "",
),
" ",
)
@staticmethod
def leave_enum_type_extension(node: PrintedNode, *_args: Any) -> str:
return join(
("extend enum", node.name, join(node.directives, " "), block(node.values)),
" ",
)
@staticmethod
def leave_input_object_type_extension(node: PrintedNode, *_args: Any) -> str:
return join(
("extend input", node.name, join(node.directives, " "), block(node.fields)),
" ",
)
def join(strings: Optional[Strings], separator: str = "") -> str:
"""Join strings in a given collection.
Return an empty string if it is None or empty, otherwise join all items together
separated by separator if provided.
"""
return separator.join(s for s in strings if s) if strings else ""
def block(strings: Optional[Strings]) -> str:
"""Return strings inside a block.
Given a collection of strings, return a string with each item on its own line,
wrapped in an indented "{ }" block.
"""
return wrap("{\n", indent(join(strings, "\n")), "\n}")
def wrap(start: str, string: Optional[str], end: str = "") -> str:
"""Wrap string inside other strings at start and end.
If the string is not None or empty, then wrap with start and end, otherwise return
an empty string.
"""
return f"{start}{string}{end}" if string else ""
def indent(string: str) -> str:
"""Indent string with two spaces.
If the string is not None or empty, add two spaces at the beginning of every line
inside the string.
"""
return wrap(" ", string.replace("\n", "\n "))
def is_multiline(string: str) -> bool:
"""Check whether a string consists of multiple lines."""
return "\n" in string
def has_multiline_items(strings: Optional[Strings]) -> bool:
"""Check whether one of the items in the list has multiple lines."""
return any(is_multiline(item) for item in strings) if strings else False
| [
"[email protected]"
] | |
d76e46afa9347a3212afc1f391dab391766e7696 | a36501f44a09ca03dd1167e1d7965f782e159097 | /app/extensions/mongobeat/models.py | 27451e0eefe1a01350156a088481e408b9a33cd9 | [
"Apache-2.0"
] | permissive | ssfdust/full-stack-flask-smorest | 9429a2cdcaa3ff3538875cc74cff802765678d4b | 4f866b2264e224389c99bbbdb4521f4b0799b2a3 | refs/heads/master | 2023-08-05T08:48:03.474042 | 2023-05-07T01:08:20 | 2023-05-07T01:08:20 | 205,528,296 | 39 | 10 | Apache-2.0 | 2023-08-31T00:18:42 | 2019-08-31T10:12:25 | Python | UTF-8 | Python | false | false | 7,149 | py | # Copyright 2019 RedLotus <[email protected]>
# Author: RedLotus <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Copyright 2018 Regents of the University of Michigan
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
# use this file except in compliance with the License. You may obtain a copy
# of the License at http://www.apache.org/licenses/LICENSE-2.0
"""
app.extensions.mongobeat
~~~~~~~~~~~~~~~~~~~~~~~~~
MongoBeat的ORM模块
"""
import datetime
from ast import literal_eval
import celery.schedules
from celery import current_app
from mongoengine import (
BooleanField,
DateTimeField,
DictField,
DynamicDocument,
DynamicField,
EmbeddedDocument,
EmbeddedDocumentField,
IntField,
ListField,
StringField,
)
def get_periodic_task_collection():
"""获取表名"""
if (
hasattr(current_app.conf, "CELERY_MONGODB_SCHEDULER_COLLECTION")
and current_app.conf.CELERY_MONGODB_SCHEDULER_COLLECTION
):
return current_app.conf.CELERY_MONGODB_SCHEDULER_COLLECTION # pragma: no cover
return "schedules"
#: Authorized values for PeriodicTask.Interval.period
PERIODS = ("days", "hours", "minutes", "seconds", "microseconds")
class PeriodicTask(DynamicDocument):
"""
周期任务的ORM
:attr name: 定时名称
:attr task: 任务名称
:attr interval: 定时
:attr crontab: crontab
:attr args: 参数
:attr kwargs: 键值参数
:attr queue: 队列
:attr no_changes: nochanges
:attr exchange: AMPQ的交换器
:attr routing_key: AMPQ路由
:attr soft_time_limit: 软时间限制
:attr expires: 过期时间
:attr start_after: 在某时间后运行
:attr enabled: 启用
:attr last_run_at: 最后运行时间
:attr total_run_count: 总计运行次数
:attr max_run_count: 最大运行次数
:attr date_changed: 改变日期
:attr description: 描述
:attr run_immediately: 立刻运行
"""
meta = {"collection": get_periodic_task_collection(), "allow_inheritance": True}
class Interval(EmbeddedDocument):
"""
:attr every 每(周期)
:attr period 周期区间
"""
meta = {"allow_inheritance": True}
every = IntField(min_value=0, default=0, required=True, verbose_name="周期")
period = StringField(choices=PERIODS, verbose_name="每")
@property
def schedule(self):
return celery.schedules.schedule(
datetime.timedelta(**{self.period: self.every})
)
@property
def period_singular(self):
return self.period[:-1]
def __str__(self):
if self.every == 1:
return "every {0.period_singular}".format(self)
return "every {0.every} {0.period}".format(self)
class Crontab(EmbeddedDocument):
"""
:attr minute 分钟
:attr hour 小时
:attr day_of_week 周
:attr day_of_month 日
:attr mouth_of_year 月
"""
meta = {"allow_inheritance": True}
minute = StringField(default="*", required=True, verbose_name="分钟")
hour = StringField(default="*", required=True, verbose_name="小时")
day_of_week = StringField(default="*", required=True, verbose_name="周")
day_of_month = StringField(default="*", required=True, verbose_name="日")
month_of_year = StringField(default="*", required=True, verbose_name="月")
@property
def schedule(self):
return celery.schedules.crontab(
minute=self.minute,
hour=self.hour,
day_of_week=self.day_of_week,
day_of_month=self.day_of_month,
month_of_year=self.month_of_year,
)
def __str__(self):
def rfield(f):
return f and str(f).replace(" ", "") or "*"
return "{0} {1} {2} {3} {4} (分/时/周/日/月)".format(
rfield(self.minute),
rfield(self.hour),
rfield(self.day_of_week),
rfield(self.day_of_month),
rfield(self.month_of_year),
)
name = StringField(unique=True, verbose_name="定时名称")
task = StringField(required=True, verbose_name="任务名称")
args = ListField(DynamicField(), verbose_name="参数")
kwargs = DictField(verbose_name="键值参数")
queue = StringField(verbose_name="队列")
exchange = StringField(verbose_name="AMPQ的交换器")
routing_key = StringField(verbose_name="AMPQ路由")
soft_time_limit = IntField(verbose_name="软时间限制")
expires = DateTimeField(verbose_name="过期时间")
start_after = DateTimeField(verbose_name="在某时间后运行")
enabled = BooleanField(default=False, verbose_name="启用")
last_run_at = DateTimeField(verbose_name="最后运行时间")
total_run_count = IntField(min_value=0, default=0, verbose_name="总计运行次数")
max_run_count = IntField(min_value=0, default=0, verbose_name="最大运行次数")
date_changed = DateTimeField(verbose_name="改变日期")
description = StringField(verbose_name="描述")
run_immediately = BooleanField(verbose_name="立刻运行")
type = StringField(
required=True, verbose_name="类型", choices=["crontab", "interval"]
)
interval = EmbeddedDocumentField(Interval, verbose_name="定时")
crontab = EmbeddedDocumentField(Crontab, verbose_name="周期")
# objects = managers.PeriodicTaskManager()
no_changes = False
def clean(self):
"""透过MongoEngine验证interval和crontab不是同时存在"""
if self.type == "crontab":
self.interval = None
else:
self.crontab = None
if isinstance(self.args, str):
self.args = literal_eval(self.args)
if isinstance(self.kwargs, str):
self.kwargs = literal_eval(self.kwargs)
@property
def schedule(self):
if self.interval:
return self.interval.schedule
elif self.crontab:
return self.crontab.schedule
else:
raise Exception("must define interval or crontab schedule")
def __str__(self):
fmt = "{0.name}: {{no schedule}}"
if self.interval:
fmt = "{0.name}: {0.interval}"
elif self.crontab:
fmt = "{0.name}: {0.crontab}"
else:
raise Exception("must define interval or crontab schedule")
return fmt.format(self)
| [
"[email protected]"
] | |
f3287e42a48321132242a2d84b76e9deee52f5db | 7834e7a48399b156401ea62c0c6d2de80ad421f5 | /pysparkling/fileio/codec/codec.py | c057cfaa4b9cab5df56f5d5f9ac4badb66914438 | [
"MIT"
] | permissive | vojnovski/pysparkling | b9758942aba0d068f6c51797c8fb491cf59c3401 | 21b36464371f121dc7963dac09d300e7235f587e | refs/heads/master | 2020-04-08T18:33:55.707209 | 2016-07-27T15:12:59 | 2016-07-27T15:12:59 | 62,555,929 | 0 | 0 | null | 2016-07-04T11:06:18 | 2016-07-04T11:06:18 | null | UTF-8 | Python | false | false | 222 | py | import logging
log = logging.getLogger(__name__)
class Codec(object):
def __init__(self):
pass
def compress(self, stream):
return stream
def decompress(self, stream):
return stream
| [
"[email protected]"
] | |
354cd069b9195ce2cabedf5b537fbef6f1713e6b | 8c7b03f24517e86f6159e4d74c8528bfbcbf31af | /test/python_api/lldbutil/frame/TestFrameUtils.py | 04d398bc5fa1b95d457aa1aaae5bd15ded01ab94 | [
"NCSA"
] | permissive | markpeek/lldb | f849567fbd7791be10aacd41be44ee15f1a4fdc4 | 58c8d5af715a3da6cbb7e0efc6905e9d07410038 | refs/heads/master | 2021-01-15T17:01:57.014568 | 2011-12-24T01:08:58 | 2011-12-24T01:08:58 | 3,042,888 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 2,103 | py | """
Test utility functions for the frame object.
"""
import os
import unittest2
import lldb
from lldbtest import *
class FrameUtilsTestCase(TestBase):
mydir = os.path.join("python_api", "lldbutil", "frame")
def setUp(self):
# Call super's setUp().
TestBase.setUp(self)
# Find the line number to break inside main().
self.line = line_number('main.c',
"// Find the line number here.")
@python_api_test
def test_frame_utils(self):
"""Test utility functions for the frame object."""
self.buildDefault()
self.frame_utils()
def frame_utils(self):
exe = os.path.join(os.getcwd(), "a.out")
target = self.dbg.CreateTarget(exe)
self.assertTrue(target, VALID_TARGET)
breakpoint = target.BreakpointCreateByLocation("main.c", self.line)
self.assertTrue(breakpoint, VALID_BREAKPOINT)
# Now launch the process, and do not stop at entry point.
process = target.LaunchSimple(None, None, os.getcwd())
if not process:
self.fail("SBTarget.LaunchProcess() failed")
self.assertTrue(process.GetState() == lldb.eStateStopped,
PROCESS_STOPPED)
import lldbutil
thread = lldbutil.get_stopped_thread(process, lldb.eStopReasonBreakpoint)
frame0 = thread.GetFrameAtIndex(0)
frame1 = thread.GetFrameAtIndex(1)
parent = lldbutil.get_parent_frame(frame0)
self.assertTrue(parent and parent.GetFrameID() == frame1.GetFrameID())
frame0_args = lldbutil.get_args_as_string(frame0)
parent_args = lldbutil.get_args_as_string(parent)
self.assertTrue(frame0_args and parent_args and "(int)val=1" in frame0_args)
if self.TraceOn():
lldbutil.print_stacktrace(thread)
print "Current frame: %s" % frame0_args
print "Parent frame: %s" % parent_args
if __name__ == '__main__':
import atexit
lldb.SBDebugger.Initialize()
atexit.register(lambda: lldb.SBDebugger.Terminate())
unittest2.main()
| [
"[email protected]"
] | |
44ca2e8649630c0f338c6636d11ae3d772d89710 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03418/s842655187.py | e812523bc9e5891268bd0c4350311e175da8ddc3 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 161 | py | N,K=map(int,input().split())
a=0
for i in range(K+1,N+1):
t=N//i
n=N-t*i
a+=t*(i-K)
if K:
a+=max(0,n-K+1)
else:
a+=n
print(a) | [
"[email protected]"
] | |
1c3d4834bbc46156f17c4081635c11a08d327dd1 | 710e96b1435bc43cc260512df75af5dd3b2afd13 | /code/1044.py | 6b217c61734025d6ab42ff1303588769ee0ced7d | [
"MIT"
] | permissive | minssoj/Learning_Algorithm_Up | 94ca8166c9a5d87917cf033ad8415871684241c4 | 45ec4e2eb4c07c9ec907a74dbd31370e1645c50b | refs/heads/main | 2023-01-08T20:52:32.983756 | 2020-11-05T17:49:45 | 2020-11-05T17:49:45 | 301,926,859 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 196 | py | # [기초-산술연산] 정수 1개 입력받아 1 더해 출력하기(설명)
# [email protected]
'''
문제링크 : https://www.codeup.kr/problem.php?id=1044
'''
n = int(input())
print(n + 1) | [
"[email protected]"
] | |
0afbaee6a3d11e935314a77c986fca4852eeb54e | d326cd8d4ca98e89b32e6a6bf6ecb26310cebdc1 | /rosalind/bioinformatics/stronghold/tran/main.py | 131ebf08d6ecc2fe6278e7b4127c11468845a825 | [] | no_license | dswisher/rosalind | d6af5195cdbe03adb5a19ed60fcbf8c05beac784 | 4519740350e47202f7a45ce70e434f7ee15c6afc | refs/heads/master | 2021-08-09T02:58:17.131164 | 2017-11-12T01:26:26 | 2017-11-12T01:26:26 | 100,122,283 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 923 | py |
import sys
from rosalind.common import util
from rosalind.bioinformatics.common import fasta
def compute_ratio(seq1, seq2):
transitions = set(['AG', 'GA', 'CT', 'TC'])
transversions = set(['AC', 'CA', 'GT', 'TG', 'AT', 'TA', 'CG', 'GC'])
numTransitions = 0
numTransversions = 0
for i in xrange(len(seq1)):
x = seq1[i] + seq2[i]
if x in transitions:
numTransitions += 1
elif x in transversions:
numTransversions += 1
return float(numTransitions) / numTransversions
def main(fname):
seqs, _ = fasta.read(util.find_file(fname))
if len(seqs[0]) != len(seqs[1]):
print "Sequences have different lengths!"
sys.exit(1)
print compute_ratio(seqs[0], seqs[1])
if __name__ == '__main__':
if len(sys.argv) != 2:
print ("You must specify the name of the data file to load!")
sys.exit(1)
main(sys.argv[1])
| [
"[email protected]"
] | |
2150af8db3f4f64b86685075d6ed96e3845861c3 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_chauffeur.py | 97f43e805d5aba06eb05a5fd2bd9c150fd38b7be | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 354 | py |
#calss header
class _CHAUFFEUR():
def __init__(self,):
self.name = "CHAUFFEUR"
self.definitions = [u'someone whose job is to drive a car for a rich or important person: ']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
| [
"[email protected]"
] | |
7b1dfacee784f7b05375188302ab051e0b005603 | ad28a59209239be285d1127a87bc08893fb62cb9 | /python/aad/test_concept_drift_classifier.py | 76ebbe63d04d5f1a6934a18ec97cdc667445b71c | [
"MIT"
] | permissive | Karl-Wu/ad_examples | 9e6f894c2414640b23ccdeb39db9e9b8352ef077 | 6fb0a2a72db51d82645e377945327eb9e1ecf8b8 | refs/heads/master | 2020-03-26T19:33:45.128414 | 2018-08-17T21:42:15 | 2018-08-17T21:42:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,961 | py | from aad.data_stream import *
from common.gen_samples import read_anomaly_dataset
from aad.anomaly_dataset_support import *
from aad.classifier_trees import RandomForestAadWrapper
"""
Check data drift with a Random Forest classifier.
NOTE: The classifier is trained only once in this example with the
first window of data. The drift is tested for the rest of the
windows *without* updating the model.
To run:
pythonw -m aad.test_concept_drift_classifier --debug --plot --log_file=temp/test_concept_drift_classifier.log --dataset=weather
"""
def test_kl_data_drift_classifier():
logger = logging.getLogger(__name__)
args = get_command_args(debug=False)
configure_logger(args)
dataset_config = dataset_configs[args.dataset]
stream_window = dataset_config[2]
alpha = 0.05
n_trees = 100
X_full, y_full = read_anomaly_dataset(args.dataset)
logger.debug("dataset: %s (%d, %d), stream_window: %d, alpha: %0.3f" %
(args.dataset, X_full.shape[0], X_full.shape[1], stream_window, alpha))
stream = DataStream(X_full, y_full, IdServer(initial=0))
# get first window of data
training_set = stream.read_next_from_stream(stream_window)
x, y, ids = training_set.x, training_set.y, training_set.ids
logger.debug("First window loaded (%s): %d" % (args.dataset, x.shape[0]))
# train classifier with the window of data
rf = RFClassifier.fit(x, y, n_estimators=n_trees)
logger.debug("Random Forest classifier created with %d trees" % rf.clf.n_estimators)
# prepare wrapper over the classifier which will compute KL-divergences
# NOTE: rf.clf is the scikit-learn Random Forest classifier instance
model = RandomForestAadWrapper(x=x, y=y, clf=rf.clf)
logger.debug("Wrapper model created with %d nodes" % len(model.w))
# compute KL replacement threshold *without* p
ref_kls, kl_q_alpha = model.get_KL_divergence_distribution(x, p=None, alpha=alpha)
# now initialize reference p
p = model.get_node_sample_distributions(x)
window = 0
while not stream.empty():
window += 1
# get next window of data and check KL-divergence
training_set = stream.read_next_from_stream(n=stream_window)
x, y = training_set.x, training_set.y
logger.debug("window %d loaded: %d" % (window, x.shape[0]))
# compare KL-divergence of current data dist against reference dist p
comp_kls, _ = model.get_KL_divergence_distribution(x, p=p)
# find which trees exceed alpha-level threshold
trees_exceeding_kl_q_alpha = model.get_trees_to_replace(comp_kls, kl_q_alpha)
n_threshold = int(2 * alpha * n_trees)
logger.debug("[%d] #trees_exceeding_kl_q_alpha: %d, threshold number of trees: %d\n%s" %
(window, len(trees_exceeding_kl_q_alpha), n_threshold, str(list(trees_exceeding_kl_q_alpha))))
if __name__ == "__main__":
test_kl_data_drift_classifier()
| [
"[email protected]"
] | |
ff22a34e9a956fa4c76ccb221f9d964d39375203 | 6cd3de9d6aa0c52602010aa857966d5dc4d57442 | /mlprodict/onnxrt/ops_cpu/op_lp_normalization.py | b8f5b8b7274ac89c8fb4bd972bdfda60a72b2e4c | [
"MIT"
] | permissive | xadupre/mlprodict | 2307ca96eafeeafff08d5322184399bb5dc1c37e | f82c8a26a60104948c67849b1c4af95ca812c153 | refs/heads/master | 2022-12-10T18:50:36.953032 | 2020-09-03T08:53:58 | 2020-09-03T08:53:58 | 292,824,744 | 1 | 0 | NOASSERTION | 2020-09-04T10:56:45 | 2020-09-04T10:56:44 | null | UTF-8 | Python | false | false | 837 | py | # -*- encoding: utf-8 -*-
# pylint: disable=E0203,E1101,C0111
"""
@file
@brief Runtime operator.
"""
import numpy
from ._op import OpRunUnaryNum
class LpNormalization(OpRunUnaryNum):
atts = {'axis': -1, 'p': 2}
def __init__(self, onnx_node, desc=None, **options):
OpRunUnaryNum.__init__(self, onnx_node, desc=desc,
expected_attributes=LpNormalization.atts,
**options)
def _run(self, x): # pylint: disable=W0221
norm = numpy.power(numpy.power(x, self.p).sum(
axis=self.axis), 1. / self.p)
norm = numpy.expand_dims(norm, self.axis)
if self.inplaces.get(0, False):
return self._run_inplace(x, norm)
return (x / norm, )
def _run_inplace(self, x, norm):
x /= norm
return (x, )
| [
"[email protected]"
] | |
64ced12d14e6ef07689ff4230e0e91e5529ae4b7 | 44849991f507933ebc7ed4e8e37819a529be539e | /steps/step09.py | f4696c0a213f1cce610937e96f56827da22d84d5 | [
"MIT"
] | permissive | NukeA/deep-learning-from-scratch-3 | 4ff60e8ac5b157a05079fc3b8a2ea69acec9ece5 | e48a7b8788827a16cc9f81adc135a3a14989bea5 | refs/heads/master | 2022-11-02T00:58:56.621011 | 2020-06-16T04:36:23 | 2020-06-16T04:36:23 | 273,873,741 | 1 | 0 | MIT | 2020-06-21T09:34:25 | 2020-06-21T09:34:24 | null | UTF-8 | Python | false | false | 1,780 | py | import numpy as np
class Variable:
def __init__(self, data):
if data is not None:
if not isinstance(data, np.ndarray):
raise TypeError('{} is not supported'.format(type(data)))
self.data = data
self.grad = None
self.creator = None
def set_creator(self, func):
self.creator = func
def backward(self):
if self.grad is None:
self.grad = np.ones_like(self.data)
funcs = [self.creator]
while funcs:
f = funcs.pop()
x, y = f.input, f.output
x.grad = f.backward(y.grad)
if x.creator is not None:
funcs.append(x.creator)
def as_array(x):
if np.isscalar(x):
return np.array(x)
return x
class Function:
def __call__(self, input):
x = input.data
y = self.forward(x)
output = Variable(as_array(y))
output.set_creator(self)
self.input = input
self.output = output
return output
def forward(self, x):
raise NotImplementedError()
def backward(self, gy):
raise NotImplementedError()
class Square(Function):
def forward(self, x):
y = x ** 2
return y
def backward(self, gy):
x = self.input.data
gx = 2 * x * gy
return gx
class Exp(Function):
def forward(self, x):
y = np.exp(x)
return y
def backward(self, gy):
x = self.input.data
gx = np.exp(x) * gy
return gx
def square(x):
return Square()(x)
def exp(x):
return Exp()(x)
x = Variable(np.array(0.5))
y = square(exp(square(x)))
y.backward()
print(x.grad)
x = Variable(np.array(1.0)) # OK
x = Variable(None) # OK
x = Variable(1.0) # NG | [
"[email protected]"
] | |
b62ee27024d05328ebb4cf87044e452d5be84b1a | 34088b8e82bc64a10678a08c03db2732d52f0c1a | /Pinbot/app/dash/migrations/0018_auto__add_field_resumedailyreportdata_resume_down_proportion.py | ba80829f546552507a844e0a3e81dcce76bb676a | [] | no_license | winghou/myFirstProfile | 757d82f5391f3672e48db4aa5774e26a48a5ecc7 | 8fc5d16de7b6449cba058f4d2459bbb0c8438f77 | refs/heads/master | 2020-05-31T13:42:28.554703 | 2016-03-23T11:30:13 | 2016-03-23T11:30:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,426 | py | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'ResumeDailyReportData.resume_down_proportion'
db.add_column(u'dash_resumedailyreportdata', 'resume_down_proportion',
self.gf('django.db.models.fields.IntegerField')(default=0),
keep_default=False)
def backwards(self, orm):
# Deleting field 'ResumeDailyReportData.resume_down_proportion'
db.delete_column(u'dash_resumedailyreportdata', 'resume_down_proportion')
models = {
u'dash.coredailyreportdata': {
'Meta': {'ordering': "['-report_date']", 'object_name': 'CoreDailyReportData'},
'active_user_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lively_member_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'lively_user_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'member_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'register_user_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'repeat_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'report_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'})
},
u'dash.feeddailyreportdata': {
'Meta': {'ordering': "['-report_date']", 'object_name': 'FeedDailyReportData'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lively_feed_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'lively_feed_member_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'lively_feed_user_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'new_feed_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'report_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'})
},
u'dash.partnerdailyreportdata': {
'Meta': {'ordering': "['-report_date']", 'object_name': 'PartnerDailyReportData'},
'accept_task_user_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'accept_task_user_total_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'accusation_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'accusation_total_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'all_extra_reward_coin_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'all_reward_coin_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'do_task_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'do_task_total_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'entered_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'entered_total_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'interviewed_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'interviewed_total_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'report_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'resume_download_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'resume_download_total_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'resume_viewed_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'resume_viewed_total_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task_accedpted_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task_accedpted_count_contrast': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'task_accedpted_total_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task_total_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'task_viewed_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'today_commend_and_check_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'today_commend_and_download_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'today_extra_reward_coin_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'today_reward_coin_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'upload_resume_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'upload_resume_total_count': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'dash.pinbotdailyreport': {
'Meta': {'object_name': 'PinbotDailyReport'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'login_user_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'pay_user_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'pv': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'register_user_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'report_date': ('django.db.models.fields.DateField', [], {}),
'total_pay_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'total_user_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'uv': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'dash.resumedailyreportdata': {
'Meta': {'ordering': "['-report_date']", 'object_name': 'ResumeDailyReportData'},
'company_card_send_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'entered_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'interviewed_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'report_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'resume_commends_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'resume_down_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'resume_down_proportion': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'resume_fav_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'resume_view_count': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'dash.userdailyreportdata': {
'Meta': {'ordering': "['-report_date']", 'object_name': 'UserDailyReportData'},
'all_total_active_user_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lively_user_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'new_experience_user_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'new_manual_member_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'new_member_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'new_register_user_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'new_self_member_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'repeat_visit_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'report_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'total_experience_user_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'total_manual_member_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'total_member_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'total_register_user_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'total_self_member_count': ('django.db.models.fields.IntegerField', [], {'default': '0'})
},
u'dash.weixindailyreportdata': {
'Meta': {'ordering': "['-report_date']", 'object_name': 'WeixinDailyReportData'},
'feed_notify_send_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'feed_notify_view_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lively_member_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'lively_user_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'new_bind_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'new_feed_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'new_feed_favours_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'new_reg_count': ('django.db.models.fields.IntegerField', [], {'default': '0'}),
'report_date': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'total_bind_count': ('django.db.models.fields.IntegerField', [], {'default': '0'})
}
}
complete_apps = ['dash'] | [
"[email protected]"
] | |
10eaeac02a5dcc162ac6889c1a4182414870249d | 4f49c1de4683bd00f5b831a0c7fd2b431b627be5 | /object_properties_panel.py | 36028619ed216e14b535d3acd7be96cd2d144287 | [] | no_license | PyrokinesisStudio/BlenderArchitectureAppTemplate | 6ce1c4896b7eee423c24558f10bc32bf3a2bdaac | 6b18bdca380d658288cd605c2e794473f57a04b0 | refs/heads/master | 2020-03-18T17:13:01.424253 | 2017-12-22T07:42:59 | 2017-12-22T07:42:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 47,162 | py | import bpy
import math
from bpy.app.translations import pgettext_iface as iface_ #for decimate modifier
from . import unit, utils
enum_object_tabs = [('INFO'," ","Show the Main Information"),
('DISPLAY',"","Show Options for how the Object is Displayed"),
('MATERIAL',"","Show the materials assign to the object"),
('CONSTRAINTS',"","Show the constraints assigned to the object"),
('MODIFIERS',"","Show the modifiers assigned to the object"),
('MESHDATA',"","Show the Mesh Data Information"),
('CURVEDATA',"","Show the Curve Data Information"),
('TEXTDATA',"","Show the Text Data Information"),
('EMPTYDATA',"","Show the Empty Data Information"),
('LIGHTDATA',"","Show the Light Data Information"),
('CAMERADATA',"","Show the Camera Data Information"),
('DRIVERS',"","Show the Drivers assigned to the Object")]
def draw_modifier(mod,layout,obj):
def draw_show_expanded(mod,layout):
if mod.show_expanded:
layout.prop(mod,'show_expanded',text="",emboss=False)
else:
layout.prop(mod,'show_expanded',text="",emboss=False)
def draw_apply_close(layout,mod_name):
layout.operator('object.modifier_apply',text="",icon='EDIT',emboss=False).modifier = mod.name
layout.operator('object.modifier_remove',text="",icon='PANEL_CLOSE',emboss=False).modifier = mod.name
def draw_array_modifier(layout):
col = layout.column(align=True)
box = col.box()
row = box.row()
draw_show_expanded(mod,row)
row.prop(mod,'name',text="",icon='MOD_ARRAY')
draw_apply_close(row,mod.name)
if mod.show_expanded:
box = col.box()
box.prop(mod, "fit_type")
if mod.fit_type == 'FIXED_COUNT':
box.prop(mod, "count")
elif mod.fit_type == 'FIT_LENGTH':
box.prop(mod, "fit_length")
elif mod.fit_type == 'FIT_CURVE':
box.prop(mod, "curve")
box.separator()
split = box.split()
col = split.column()
col.prop(mod, "use_constant_offset")
sub = col.column()
sub.active = mod.use_constant_offset
sub.prop(mod, "constant_offset_displace", text="")
col.separator()
col.prop(mod, "use_merge_vertices", text="Merge")
sub = col.column()
sub.active = mod.use_merge_vertices
sub.prop(mod, "use_merge_vertices_cap", text="First Last")
sub.prop(mod, "merge_threshold", text="Distance")
col = split.column()
col.prop(mod, "use_relative_offset")
sub = col.column()
sub.active = mod.use_relative_offset
sub.prop(mod, "relative_offset_displace", text="")
col.separator()
col.prop(mod, "use_object_offset")
sub = col.column()
sub.active = mod.use_object_offset
sub.prop(mod, "offset_object", text="")
box.separator()
box.prop(mod, "start_cap")
box.prop(mod, "end_cap")
def draw_bevel_modifier(layout):
col = layout.column(align=True)
box = col.box()
row = box.row()
draw_show_expanded(mod,row)
row.prop(mod,'name',text="",icon='MOD_BEVEL')
draw_apply_close(row,mod.name)
if mod.show_expanded:
box = col.box()
split = box.split()
col = split.column()
col.prop(mod, "width")
col.prop(mod, "segments")
col.prop(mod, "profile")
col = split.column()
col.prop(mod, "use_only_vertices")
col.prop(mod, "use_clamp_overlap")
box.label(text="Limit Method:")
box.row().prop(mod, "limit_method", expand=True)
if mod.limit_method == 'ANGLE':
box.prop(mod, "angle_limit")
elif mod.limit_method == 'VGROUP':
box.label(text="Vertex Group:")
box.prop_search(mod, "vertex_group", obj, "vertex_groups", text="")
box.label(text="Width Method:")
box.row().prop(mod, "offset_type", expand=True)
def draw_boolean_modifier(layout):
col = layout.column(align=True)
box = col.box()
row = box.row()
draw_show_expanded(mod,row)
row.prop(mod,'name',text="",icon='MOD_BOOLEAN')
draw_apply_close(row,mod.name)
if mod.show_expanded:
box = col.box()
split = box.split()
col = split.column()
col.label(text="Operation:")
col.prop(mod, "operation", text="")
col = split.column()
col.label(text="Object:")
col.prop(mod, "object", text="")
def draw_curve_modifier(layout):
col = layout.column(align=True)
box = col.box()
row = box.row()
draw_show_expanded(mod,row)
row.prop(mod,'name',text="",icon='MOD_CURVE')
draw_apply_close(row,mod.name)
if mod.show_expanded:
box = col.box()
split = box.split()
col = split.column()
col.label(text="Object:")
col.prop(mod, "object", text="")
col = split.column()
col.label(text="Vertex Group:")
col.prop_search(mod, "vertex_group", obj, "vertex_groups", text="")
box.label(text="Deformation Axis:")
box.row().prop(mod, "deform_axis", expand=True)
def draw_decimate_modifier(layout):
col = layout.column(align=True)
box = col.box()
row = box.row()
draw_show_expanded(mod,row)
row.prop(mod,'name',text="",icon='MOD_DECIM')
draw_apply_close(row,mod.name)
if mod.show_expanded:
box = col.box()
decimate_type = mod.decimate_type
row = box.row()
row.prop(mod, "decimate_type", expand=True)
if decimate_type == 'COLLAPSE':
box.prop(mod, "ratio")
split = box.split()
row = split.row(align=True)
row.prop_search(mod, "vertex_group", obj, "vertex_groups", text="")
row.prop(mod, "invert_vertex_group", text="", icon='ARROW_LEFTRIGHT')
split.prop(mod, "use_collapse_triangulate")
elif decimate_type == 'UNSUBDIV':
box.prop(mod, "iterations")
else: # decimate_type == 'DISSOLVE':
box.prop(mod, "angle_limit")
box.prop(mod, "use_dissolve_boundaries")
box.label("Delimit:")
row = box.row()
row.prop(mod, "delimit")
box.label(text=iface_("Face Count: %d") % mod.face_count, translate=False)
def draw_edge_split_modifier(layout):
col = layout.column(align=True)
box = col.box()
row = box.row()
draw_show_expanded(mod,row)
row.prop(mod,'name',text="",icon='MOD_EDGESPLIT')
draw_apply_close(row,mod.name)
if mod.show_expanded:
box = col.box()
split = box.split()
col = split.column()
col.prop(mod, "use_edge_angle", text="Edge Angle")
sub = col.column()
sub.active = mod.use_edge_angle
sub.prop(mod, "split_angle")
split.prop(mod, "use_edge_sharp", text="Sharp Edges")
def draw_hook_modifier(layout):
col = layout.column(align=True)
box = col.box()
row = box.row()
draw_show_expanded(mod,row)
row.prop(mod,'name',text="",icon='HOOK')
draw_apply_close(row,mod.name)
if mod.show_expanded:
box = col.box()
split = box.split()
col = split.column()
col.label(text="Object:")
col.prop(mod, "object", text="")
if mod.object and mod.object.type == 'ARMATURE':
col.label(text="Bone:")
col.prop_search(mod, "subtarget", mod.object.data, "bones", text="")
col = split.column()
col.label(text="Vertex Group:")
col.prop_search(mod, "vertex_group", obj, "vertex_groups", text="")
layout.separator()
split = box.split()
# col = split.column()
# col.prop(mod, "falloff")
# col.prop(mod, "force", slider=True)
col = split.column()
col.operator("object.hook_reset", text="Reset")
col.operator("object.hook_recenter", text="Recenter")
if obj.mode == 'EDIT':
layout.separator()
row = layout.row()
row.operator("object.hook_select", text="Select")
row.operator("object.hook_assign", text="Assign")
def draw_mask_modifier(layout):
col = layout.column(align=True)
box = col.box()
row = box.row()
draw_show_expanded(mod,row)
row.prop(mod,'name',text="",icon='MOD_MASK')
draw_apply_close(row,mod.name)
if mod.show_expanded:
box = col.box()
split = box.split()
col = split.column()
col.label(text="Mode:")
col.prop(mod, "mode", text="")
col = split.column()
if mod.mode == 'ARMATURE':
col.label(text="Armature:")
col.prop(mod, "armature", text="")
elif mod.mode == 'VERTEX_GROUP':
col.label(text="Vertex Group:")
row = col.row(align=True)
row.prop_search(mod, "vertex_group", obj, "vertex_groups", text="")
sub = row.row(align=True)
sub.active = bool(mod.vertex_group)
sub.prop(mod, "invert_vertex_group", text="", icon='ARROW_LEFTRIGHT')
def draw_mirror_modifier(layout):
col = layout.column(align=True)
box = col.box()
row = box.row()
draw_show_expanded(mod,row)
row.prop(mod,'name',text="",icon='MOD_MIRROR')
draw_apply_close(row,mod.name)
if mod.show_expanded:
box = col.box()
split = box.split(percentage=0.25)
col = split.column()
col.label(text="Axis:")
col.prop(mod, "use_x")
col.prop(mod, "use_y")
col.prop(mod, "use_z")
col = split.column()
col.label(text="Options:")
col.prop(mod, "use_mirror_merge", text="Merge")
col.prop(mod, "use_clip", text="Clipping")
col.prop(mod, "use_mirror_vertex_groups", text="Vertex Groups")
col = split.column()
col.label(text="Textures:")
col.prop(mod, "use_mirror_u", text="U")
col.prop(mod, "use_mirror_v", text="V")
col = box.column()
if mod.use_mirror_merge is True:
col.prop(mod, "merge_threshold")
col.label(text="Mirror Object:")
col.prop(mod, "mirror_object", text="")
def draw_solidify_modifier(layout):
col = layout.column(align=True)
box = col.box()
row = box.row()
draw_show_expanded(mod,row)
row.prop(mod,'name',text="",icon='MOD_SOLIDIFY')
draw_apply_close(row,mod.name)
if mod.show_expanded:
box = col.box()
split = box.split()
col = split.column()
col.prop(mod, "thickness")
col.prop(mod, "thickness_clamp")
col.separator()
row = col.row(align=True)
row.prop_search(mod, "vertex_group", obj, "vertex_groups", text="")
sub = row.row(align=True)
sub.active = bool(mod.vertex_group)
sub.prop(mod, "invert_vertex_group", text="", icon='ARROW_LEFTRIGHT')
sub = col.row()
sub.active = bool(mod.vertex_group)
sub.prop(mod, "thickness_vertex_group", text="Factor")
col.label(text="Crease:")
col.prop(mod, "edge_crease_inner", text="Inner")
col.prop(mod, "edge_crease_outer", text="Outer")
col.prop(mod, "edge_crease_rim", text="Rim")
col = split.column()
col.prop(mod, "offset")
col.prop(mod, "use_flip_normals")
col.prop(mod, "use_even_offset")
col.prop(mod, "use_quality_normals")
col.prop(mod, "use_rim")
col.separator()
col.label(text="Material Index Offset:")
sub = col.column()
row = sub.split(align=True, percentage=0.4)
row.prop(mod, "material_offset", text="")
row = row.row(align=True)
row.active = mod.use_rim
row.prop(mod, "material_offset_rim", text="Rim")
def draw_subsurf_modifier(layout):
col = layout.column(align=True)
box = col.box()
row = box.row()
draw_show_expanded(mod,row)
row.prop(mod,'name',text="",icon='MOD_SUBSURF')
draw_apply_close(row,mod.name)
if mod.show_expanded:
box = col.box()
box.row().prop(mod, "subdivision_type", expand=True)
split = box.split()
col = split.column()
col.label(text="Subdivisions:")
col.prop(mod, "levels", text="View")
col.prop(mod, "render_levels", text="Render")
col = split.column()
col.label(text="Options:")
col.prop(mod, "use_subsurf_uv")
col.prop(mod, "show_only_control_edges")
def draw_skin_modifier(layout):
col = layout.column(align=True)
box = col.box()
row = box.row()
draw_show_expanded(mod,row)
row.prop(mod,'name',text="",icon='MOD_SKIN')
draw_apply_close(row,mod.name)
if mod.show_expanded:
box = col.box()
box.operator("object.skin_armature_create", text="Create Armature")
box.separator()
col = box.column(align=True)
col.prop(mod, "branch_smoothing")
col.prop(mod, "use_smooth_shade")
split = box.split()
col = split.column()
col.label(text="Selected Vertices:")
sub = col.column(align=True)
sub.operator("object.skin_loose_mark_clear", text="Mark Loose").action = 'MARK'
sub.operator("object.skin_loose_mark_clear", text="Clear Loose").action = 'CLEAR'
sub = col.column()
sub.operator("object.skin_root_mark", text="Mark Root")
sub.operator("object.skin_radii_equalize", text="Equalize Radii")
col = split.column()
col.label(text="Symmetry Axes:")
col.prop(mod, "use_x_symmetry")
col.prop(mod, "use_y_symmetry")
col.prop(mod, "use_z_symmetry")
def draw_triangulate_modifier(layout):
col = layout.column(align=True)
box = col.box()
row = box.row()
draw_show_expanded(mod,row)
row.prop(mod,'name',text="",icon='MOD_TRIANGULATE')
draw_apply_close(row,mod.name)
if mod.show_expanded:
box = col.box()
row = box.row()
col = row.column()
col.label(text="Quad Method:")
col.prop(mod, "quad_method", text="")
col = row.column()
col.label(text="Ngon Method:")
col.prop(mod, "ngon_method", text="")
def draw_simple_deform_modifier(layout):
col = layout.column(align=True)
box = col.box()
row = box.row()
draw_show_expanded(mod,row)
row.prop(mod,'name',text="",icon='MOD_SIMPLEDEFORM')
draw_apply_close(row,mod.name)
if mod.show_expanded:
box = col.box()
box.row().prop(mod, "deform_method", expand=True)
split = box.split()
col = split.column()
col.label(text="Vertex Group:")
col.prop_search(mod, "vertex_group", obj, "vertex_groups", text="")
split = box.split()
col = split.column()
col.label(text="Origin:")
col.prop(mod, "origin", text="")
if mod.deform_method in {'TAPER', 'STRETCH', 'TWIST'}:
col.label(text="Lock:")
col.prop(mod, "lock_x")
col.prop(mod, "lock_y")
col = split.column()
col.label(text="Deform:")
if mod.deform_method in {'TAPER', 'STRETCH'}:
col.prop(mod, "factor")
else:
col.prop(mod, "angle")
col.prop(mod, "limits", slider=True)
def draw_wireframe_modifier(layout):
col = layout.column(align=True)
box = col.box()
row = box.row()
draw_show_expanded(mod,row)
row.prop(mod,'name',text="",icon='MOD_WIREFRAME')
draw_apply_close(row,mod.name)
if mod.show_expanded:
box = col.box()
has_vgroup = bool(mod.vertex_group)
split = box.split()
col = split.column()
col.prop(mod, "thickness", text="Thickness")
row = col.row(align=True)
row.prop_search(mod, "vertex_group", obj, "vertex_groups", text="")
sub = row.row(align=True)
sub.active = has_vgroup
sub.prop(mod, "invert_vertex_group", text="", icon='ARROW_LEFTRIGHT')
row = col.row(align=True)
row.active = has_vgroup
row.prop(mod, "thickness_vertex_group", text="Factor")
col.prop(mod, "use_crease", text="Crease Edges")
col.prop(mod, "crease_weight", text="Crease Weight")
col = split.column()
col.prop(mod, "offset")
col.prop(mod, "use_even_offset", text="Even Thickness")
col.prop(mod, "use_relative_offset", text="Relative Thickness")
col.prop(mod, "use_boundary", text="Boundary")
col.prop(mod, "use_replace", text="Replace Original")
col.prop(mod, "material_offset", text="Material Offset")
if mod.type == 'ARRAY':
draw_array_modifier(layout)
elif mod.type == 'BEVEL':
draw_bevel_modifier(layout)
elif mod.type == 'BOOLEAN':
draw_boolean_modifier(layout)
elif mod.type == 'CURVE':
draw_curve_modifier(layout)
elif mod.type == 'DECIMATE':
draw_decimate_modifier(layout)
elif mod.type == 'EDGE_SPLIT':
draw_edge_split_modifier(layout)
elif mod.type == 'HOOK':
draw_hook_modifier(layout)
elif mod.type == 'MASK':
draw_mask_modifier(layout)
elif mod.type == 'MIRROR':
draw_mirror_modifier(layout)
elif mod.type == 'SOLIDIFY':
draw_solidify_modifier(layout)
elif mod.type == 'SUBSURF':
draw_subsurf_modifier(layout)
elif mod.type == 'SKIN':
draw_skin_modifier(layout)
elif mod.type == 'SIMPLE_DEFORM':
draw_simple_deform_modifier(layout)
elif mod.type == 'TRIANGULATE':
draw_triangulate_modifier(layout)
elif mod.type == 'WIREFRAME':
draw_wireframe_modifier(layout)
else:
row = layout.row()
row.label(mod.name + " view ")
def draw_constraint(con,layout,obj):
def draw_show_expanded(con,layout):
if con.show_expanded:
layout.prop(con,'show_expanded',text="",emboss=False)
else:
layout.prop(con,'show_expanded',text="",emboss=False)
def space_template(layout, con, target=True, owner=True):
if target or owner:
split = layout.split(percentage=0.2)
split.label(text="Space:")
row = split.row()
if target:
row.prop(con, "target_space", text="")
if target and owner:
row.label(icon='ARROW_LEFTRIGHT')
if owner:
row.prop(con, "owner_space", text="")
def target_template(layout, con, subtargets=True):
layout.prop(con, "target") # XXX limiting settings for only 'curves' or some type of object
if con.target and subtargets:
if con.target.type == 'ARMATURE':
layout.prop_search(con, "subtarget", con.target.data, "bones", text="Bone")
if hasattr(con, "head_tail"):
row = layout.row()
row.label(text="Head/Tail:")
row.prop(con, "head_tail", text="")
elif con.target.type in {'MESH', 'LATTICE'}:
layout.prop_search(con, "subtarget", con.target, "vertex_groups", text="Vertex Group")
def draw_copy_location_constraint(layout):
col = layout.column(align=True)
box = col.template_constraint(con)
if con.show_expanded:
target_template(box, con)
split = box.split()
col = split.column()
col.prop(con, "use_x", text="X")
sub = col.column()
sub.active = con.use_x
sub.prop(con, "invert_x", text="Invert")
col = split.column()
col.prop(con, "use_y", text="Y")
sub = col.column()
sub.active = con.use_y
sub.prop(con, "invert_y", text="Invert")
col = split.column()
col.prop(con, "use_z", text="Z")
sub = col.column()
sub.active = con.use_z
sub.prop(con, "invert_z", text="Invert")
box.prop(con, "use_offset")
space_template(box, con)
if con.type not in {'RIGID_BODY_JOINT', 'NULL'}:
box.prop(con, "influence")
def draw_copy_rotation_constraint(layout):
col = layout.column(align=True)
box = col.template_constraint(con)
if con.show_expanded:
target_template(box, con)
split = box.split()
col = split.column()
col.prop(con, "use_x", text="X")
sub = col.column()
sub.active = con.use_x
sub.prop(con, "invert_x", text="Invert")
col = split.column()
col.prop(con, "use_y", text="Y")
sub = col.column()
sub.active = con.use_y
sub.prop(con, "invert_y", text="Invert")
col = split.column()
col.prop(con, "use_z", text="Z")
sub = col.column()
sub.active = con.use_z
sub.prop(con, "invert_z", text="Invert")
box.prop(con, "use_offset")
space_template(box, con)
if con.type not in {'RIGID_BODY_JOINT', 'NULL'}:
box.prop(con, "influence")
def draw_copy_scale_constraint(layout):
col = layout.column(align=True)
box = col.template_constraint(con)
if con.show_expanded:
target_template(box, con)
row = box.row(align=True)
row.prop(con, "use_x", text="X")
row.prop(con, "use_y", text="Y")
row.prop(con, "use_z", text="Z")
box.prop(con, "use_offset")
space_template(box, con)
if con.type not in {'RIGID_BODY_JOINT', 'NULL'}:
box.prop(con, "influence")
def draw_copy_transforms_constraint(layout):
col = layout.column(align=True)
box = col.template_constraint(con)
if con.show_expanded:
target_template(box, con)
space_template(box, con)
if con.type not in {'RIGID_BODY_JOINT', 'NULL'}:
box.prop(con, "influence")
def draw_limit_distance_constraint(layout):
col = layout.column(align=True)
box = col.template_constraint(con)
if con.show_expanded:
target_template(box, con)
col = box.column(align=True)
col.prop(con, "distance")
col.operator("constraint.limitdistance_reset")
row = box.row()
row.label(text="Clamp Region:")
row.prop(con, "limit_mode", text="")
row = box.row()
row.prop(con, "use_transform_limit")
row.label()
space_template(box, con)
if con.type not in {'RIGID_BODY_JOINT', 'NULL'}:
box.prop(con, "influence")
def draw_limit_location_constraint(layout):
col = layout.column(align=True)
box = col.template_constraint(con)
if con.show_expanded:
split = box.split()
col = split.column()
col.prop(con, "use_min_x")
sub = col.column()
sub.active = con.use_min_x
sub.prop(con, "min_x", text="")
col.prop(con, "use_max_x")
sub = col.column()
sub.active = con.use_max_x
sub.prop(con, "max_x", text="")
col = split.column()
col.prop(con, "use_min_y")
sub = col.column()
sub.active = con.use_min_y
sub.prop(con, "min_y", text="")
col.prop(con, "use_max_y")
sub = col.column()
sub.active = con.use_max_y
sub.prop(con, "max_y", text="")
col = split.column()
col.prop(con, "use_min_z")
sub = col.column()
sub.active = con.use_min_z
sub.prop(con, "min_z", text="")
col.prop(con, "use_max_z")
sub = col.column()
sub.active = con.use_max_z
sub.prop(con, "max_z", text="")
row = box.row()
row.prop(con, "use_transform_limit")
row.label()
row = box.row()
row.label(text="Convert:")
row.prop(con, "owner_space", text="")
if con.type not in {'RIGID_BODY_JOINT', 'NULL'}:
box.prop(con, "influence")
def draw_limit_rotation_constraint(layout):
col = layout.column(align=True)
box = col.template_constraint(con)
if con.show_expanded:
split = box.split()
col = split.column(align=True)
col.prop(con, "use_limit_x")
sub = col.column(align=True)
sub.active = con.use_limit_x
sub.prop(con, "min_x", text="Min")
sub.prop(con, "max_x", text="Max")
col = split.column(align=True)
col.prop(con, "use_limit_y")
sub = col.column(align=True)
sub.active = con.use_limit_y
sub.prop(con, "min_y", text="Min")
sub.prop(con, "max_y", text="Max")
col = split.column(align=True)
col.prop(con, "use_limit_z")
sub = col.column(align=True)
sub.active = con.use_limit_z
sub.prop(con, "min_z", text="Min")
sub.prop(con, "max_z", text="Max")
box.prop(con, "use_transform_limit")
row = box.row()
row.label(text="Convert:")
row.prop(con, "owner_space", text="")
if con.type not in {'RIGID_BODY_JOINT', 'NULL'}:
box.prop(con, "influence")
def draw_limit_scale_constraint(layout):
col = layout.column(align=True)
box = col.template_constraint(con)
if con.show_expanded:
split = box.split()
col = split.column()
col.prop(con, "use_min_x")
sub = col.column()
sub.active = con.use_min_x
sub.prop(con, "min_x", text="")
col.prop(con, "use_max_x")
sub = col.column()
sub.active = con.use_max_x
sub.prop(con, "max_x", text="")
col = split.column()
col.prop(con, "use_min_y")
sub = col.column()
sub.active = con.use_min_y
sub.prop(con, "min_y", text="")
col.prop(con, "use_max_y")
sub = col.column()
sub.active = con.use_max_y
sub.prop(con, "max_y", text="")
col = split.column()
col.prop(con, "use_min_z")
sub = col.column()
sub.active = con.use_min_z
sub.prop(con, "min_z", text="")
col.prop(con, "use_max_z")
sub = col.column()
sub.active = con.use_max_z
sub.prop(con, "max_z", text="")
row = box.row()
row.prop(con, "use_transform_limit")
row.label()
row = box.row()
row.label(text="Convert:")
row.prop(con, "owner_space", text="")
if con.type not in {'RIGID_BODY_JOINT', 'NULL'}:
box.prop(con, "influence")
if con.type == 'COPY_LOCATION':
draw_copy_location_constraint(layout)
elif con.type == 'COPY_ROTATION':
draw_copy_rotation_constraint(layout)
elif con.type == 'COPY_SCALE':
draw_copy_scale_constraint(layout)
elif con.type == 'COPY_TRANSFORMS':
draw_copy_transforms_constraint(layout)
elif con.type == 'LIMIT_DISTANCE':
draw_limit_distance_constraint(layout)
elif con.type == 'LIMIT_LOCATION':
draw_limit_location_constraint(layout)
elif con.type == 'LIMIT_ROTATION':
draw_limit_rotation_constraint(layout)
elif con.type == 'LIMIT_SCALE':
draw_limit_scale_constraint(layout)
else:
row = layout.row()
row.label(con.name + " view ")
def draw_object_properties(layout,obj,context):
props = get_scene_props(bpy.context.scene)
col = layout.column(align=True)
box = col.box()
col = box.column(align=True)
row = col.row(align=True)
draw_object_tabs(row,obj)
box = col.box()
col = box.column()
if props.tabs == 'INFO':
draw_object_info(col,obj)
if props.tabs == 'DISPLAY':
# box = col.box()
row = col.row()
row.prop(obj,'draw_type',expand=True)
box.prop(obj,'hide_select')
box.prop(obj,'hide')
box.prop(obj,'hide_render')
box.prop(obj,'show_x_ray',icon='GHOST_ENABLED',text='Show X-Ray')
box.prop(obj.cycles_visibility,'camera',icon='CAMERA_DATA',text='Show in Viewport Render')
if props.tabs == 'MATERIAL':
draw_object_materials(col,obj,context)
if props.tabs == 'CONSTRAINTS':
# row = col.row()
col.operator_menu_enum("object.constraint_add", "type", text="Add Constraint",icon='CONSTRAINT_DATA')
# row.operator_menu_enum("fd_object.add_constraint", "type", icon='CONSTRAINT_DATA')
# row.operator("fd_object.collapse_all_constraints",text="",icon='FULLSCREEN_EXIT')
for con in obj.constraints:
draw_constraint(con,col,obj)
if props.tabs == 'MODIFIERS':
# row = col.row()
col.operator_menu_enum("object.modifier_add", "type",icon='MODIFIER')
# row.operator("fd_object.collapse_all_modifiers",text="",icon='FULLSCREEN_EXIT')
for mod in obj.modifiers:
draw_modifier(mod,col,obj)
if props.tabs == 'MESHDATA':
pass
if props.tabs == 'CURVEDATA':
pass
if props.tabs == 'TEXTDATA':
pass
if props.tabs == 'EMPTYDATA':
pass
if props.tabs == 'LIGHTDATA':
pass
if props.tabs == 'CAMERADATA':
pass
if props.tabs == 'DRIVERS':
draw_object_drivers(col,obj)
def draw_object_tabs(layout,obj):
props = get_scene_props(bpy.context.scene)
layout.prop_enum(props, "tabs", 'INFO', icon="BLANK1" if props.tabs == 'INFO' else "INFO", text="Info" if props.tabs == 'INFO' else "")
if obj.type == 'MESH':
layout.prop_enum(props, "tabs", 'DISPLAY', icon="BLANK1" if props.tabs == 'DISPLAY' else "RESTRICT_VIEW_OFF", text="Display" if props.tabs == 'DISPLAY' else "")
layout.prop_enum(props, "tabs", 'MATERIAL', icon="BLANK1" if props.tabs == 'MATERIAL' else "MATERIAL", text="Material" if props.tabs == 'MATERIAL' else "")
layout.prop_enum(props, "tabs", 'CONSTRAINTS', icon="BLANK1" if props.tabs == 'CONSTRAINTS' else "CONSTRAINT", text="Constraints" if props.tabs == 'CONSTRAINTS' else "")
layout.prop_enum(props, "tabs", 'MODIFIERS', icon="BLANK1" if props.tabs == 'MODIFIERS' else "MODIFIER", text="Modifiers" if props.tabs == 'MODIFIERS' else "")
layout.prop_enum(props, "tabs", 'MESHDATA', icon="BLANK1" if props.tabs == 'MESHDATA' else "MESH_DATA", text="Data" if props.tabs == 'MESHDATA' else "")
if obj.type == 'CURVE':
layout.prop_enum(props, "tabs", 'DISPLAY', icon='RESTRICT_VIEW_OFF', text="")
layout.prop_enum(props, "tabs", 'MATERIAL', icon='MATERIAL', text="")
layout.prop_enum(props, "tabs", 'CONSTRAINTS', icon='CONSTRAINT', text="")
layout.prop_enum(props, "tabs", 'MODIFIERS', icon='MODIFIER', text="")
layout.prop_enum(props, "tabs", 'CURVEDATA', icon='CURVE_DATA', text="")
if obj.type == 'FONT':
layout.prop_enum(props, "tabs", 'DISPLAY', icon='RESTRICT_VIEW_OFF', text="")
layout.prop_enum(props, "tabs", 'MATERIAL', icon='MATERIAL', text="")
layout.prop_enum(props, "tabs", 'CONSTRAINTS', icon='CONSTRAINT', text="")
layout.prop_enum(props, "tabs", 'MODIFIERS', icon='MODIFIER', text="")
layout.prop_enum(props, "tabs", 'TEXTDATA', icon='FONT_DATA', text="")
if obj.type == 'EMPTY':
layout.prop_enum(props, "tabs", 'DISPLAY', icon='RESTRICT_VIEW_OFF', text="")
layout.prop_enum(props, "tabs", 'CONSTRAINTS', icon='CONSTRAINT', text="")
layout.prop_enum(props, "tabs", 'EMPTYDATA', icon='EMPTY_DATA', text="")
if obj.type == 'LAMP':
layout.prop_enum(props, "tabs", 'DISPLAY', icon='RESTRICT_VIEW_OFF', text="")
layout.prop_enum(props, "tabs", 'CONSTRAINTS', icon='CONSTRAINT', text="")
layout.prop_enum(props, "tabs", 'LIGHTDATA', icon='LAMP_SPOT', text="")
if obj.type == 'CAMERA':
layout.prop_enum(props, "tabs", 'CONSTRAINTS', icon='CONSTRAINT', text="")
layout.prop_enum(props, "tabs", 'CAMERADATA', icon='OUTLINER_DATA_CAMERA', text="")
if obj.type == 'ARMATURE':
layout.prop_enum(props, "tabs", 'DISPLAY', icon='RESTRICT_VIEW_OFF', text="")
layout.prop_enum(props, "tabs", 'CONSTRAINTS', icon='CONSTRAINT', text="")
layout.prop_enum(props, "tabs", 'DRIVERS', icon="BLANK1" if props.tabs == 'DRIVERS' else "AUTO", text="Drivers" if props.tabs == 'DRIVERS' else "")
def draw_object_info(layout,obj):
# box = layout.box()
row = layout.row()
row.prop(obj,'name')
if obj.type in {'MESH','CURVE','LATTICE','TEXT'}:
pass
# row.operator('fd_object.toggle_edit_mode',text="",icon='EDITMODE_HLT').object_name = obj.name
has_hook_modifier = False
for mod in obj.modifiers:
if mod.type == 'HOOK':
has_hook_modifier = True
has_shape_keys = False
if obj.type == 'MESH':
if obj.data.shape_keys:
if len(obj.data.shape_keys.key_blocks) > 0:
has_shape_keys = True
if has_hook_modifier or has_shape_keys:
row = layout.row()
col = row.column(align=True)
col.label("Dimension")
col.label("X: " + str(obj.dimensions.x))
col.label("Y: " + str(obj.dimensions.y))
col.label("Z: " + str(obj.dimensions.z))
col = row.column(align=True)
col.label("Location")
col.label("X: " + str(obj.location.x))
col.label("Y: " + str(obj.location.y))
col.label("Z: " + str(obj.location.z))
col = row.column(align=True)
col.label("Rotation")
col.label("X: " + str(round(math.degrees(obj.rotation_euler.x),4)))
col.label("Y: " + str(round(math.degrees(obj.rotation_euler.y),4)))
col.label("Z: " + str(round(math.degrees(obj.rotation_euler.z),4)))
if has_hook_modifier:
layout.operator("fd_object.apply_hook_modifiers",icon='HOOK').object_name = obj.name
if has_shape_keys:
layout.operator("fd_object.apply_shape_keys",icon='SHAPEKEY_DATA').object_name = obj.name
else:
if obj.type not in {'EMPTY','CAMERA','LAMP'}:
layout.label('Dimensions:')
col = layout.column(align=True)
#X
row = col.row(align=True)
row.prop(obj,"lock_scale",index=0,text="")
if obj.lock_scale[0]:
row.label("X: " + str(obj.dimensions.x))
else:
row.prop(obj,"dimensions",index=0,text="X")
#Y
row = col.row(align=True)
row.prop(obj,"lock_scale",index=1,text="")
if obj.lock_scale[1]:
row.label("Y: " + str(obj.dimensions.y))
else:
row.prop(obj,"dimensions",index=1,text="Y")
#Z
row = col.row(align=True)
row.prop(obj,"lock_scale",index=2,text="")
if obj.lock_scale[2]:
row.label("Z: " + str(obj.dimensions.z))
else:
row.prop(obj,"dimensions",index=2,text="Z")
col1 = layout.row()
if obj:
col2 = col1.split()
col = col2.column(align=True)
col.label('Location:')
#X
row = col.row(align=True)
row.prop(obj,"lock_location",index=0,text="")
if obj.lock_location[0]:
row.label("X: " + str(obj.location.x))
else:
row.prop(obj,"location",index=0,text="X")
#Y
row = col.row(align=True)
row.prop(obj,"lock_location",index=1,text="")
if obj.lock_location[1]:
row.label("Y: " + str(obj.location.y))
else:
row.prop(obj,"location",index=1,text="Y")
#Z
row = col.row(align=True)
row.prop(obj,"lock_location",index=2,text="")
if obj.lock_location[2]:
row.label("Z: " + str(obj.location.z))
else:
row.prop(obj,"location",index=2,text="Z")
col2 = col1.split()
col = col2.column(align=True)
col.label('Rotation:')
#X
row = col.row(align=True)
row.prop(obj,"lock_rotation",index=0,text="")
if obj.lock_rotation[0]:
row.label("X: " + str(round(math.degrees(obj.rotation_euler.x),4)))
else:
row.prop(obj,"rotation_euler",index=0,text="X")
#Y
row = col.row(align=True)
row.prop(obj,"lock_rotation",index=1,text="")
if obj.lock_rotation[1]:
row.label("Y: " + str(round(math.degrees(obj.rotation_euler.y),4)))
else:
row.prop(obj,"rotation_euler",index=1,text="Y")
#Z
row = col.row(align=True)
row.prop(obj,"lock_rotation",index=2,text="")
if obj.lock_rotation[2]:
row.label("Y: " + str(round(math.degrees(obj.rotation_euler.z),4)))
else:
row.prop(obj,"rotation_euler",index=2,text="Z")
# row = box.row()
# row.prop(obj.mv,'comment')
def draw_object_materials(layout,obj,context):
mat = None
ob = context.object
slot = None
space = context.space_data
if ob:
mat = ob.active_material
if ob:
is_sortable = len(ob.material_slots) > 1
rows = 1
if (is_sortable):
rows = 4
row = layout.row()
row.template_list("MATERIAL_UL_matslots", "", ob, "material_slots", ob, "active_material_index", rows=rows)
col = row.column(align=True)
col.operator("object.material_slot_add", icon='ZOOMIN', text="")
col.operator("object.material_slot_remove", icon='ZOOMOUT', text="")
col.menu("MATERIAL_MT_specials", icon='DOWNARROW_HLT', text="")
if is_sortable:
col.separator()
col.operator("object.material_slot_move", icon='TRIA_UP', text="").direction = 'UP'
col.operator("object.material_slot_move", icon='TRIA_DOWN', text="").direction = 'DOWN'
if ob.mode == 'EDIT':
row = layout.row(align=True)
row.operator("object.material_slot_assign", text="Assign")
row.operator("object.material_slot_select", text="Select")
row.operator("object.material_slot_deselect", text="Deselect")
# split = layout.split(percentage=0.65)
if ob:
layout.template_ID(ob, "active_material", new="material.new")
row = layout.row()
if slot:
row.prop(slot, "link", text="")
else:
row.label()
elif mat:
layout.template_preview(mat)
# split.template_ID(space, "pin_id")
# split.separator()
if mat:
layout.template_preview(mat)
if obj.type in {'MESH','CURVE'}:
pass
if obj.mode == 'EDIT':
row = layout.row(align=True)
row.operator("object.material_slot_assign", text="Assign")
row.operator("object.material_slot_select", text="Select")
row.operator("object.material_slot_deselect", text="Deselect")
layout.operator('fd_general.open_new_window',text="Open Material Editor",icon='NODETREE').space_type = 'NODE_EDITOR'
def draw_object_drivers(layout,obj):
if obj:
if not obj.animation_data:
layout.label("There are no drivers assigned to the object",icon='ERROR')
else:
if len(obj.animation_data.drivers) == 0:
layout.label("There are no drivers assigned to the object",icon='ERROR')
for DR in obj.animation_data.drivers:
box = layout.box()
row = box.row()
DriverName = DR.data_path
if DriverName in {"location","rotation_euler","dimensions" ,"lock_scale",'lock_location','lock_rotation'}:
if DR.array_index == 0:
DriverName = DriverName + " X"
if DR.array_index == 1:
DriverName = DriverName + " Y"
if DR.array_index == 2:
DriverName = DriverName + " Z"
value = eval('bpy.data.objects["' + obj.name + '"].' + DR.data_path)
if type(value).__name__ == 'str':
row.label(DriverName + " = " + str(value),icon='AUTO')
elif type(value).__name__ == 'float':
row.label(DriverName + " = " + str(unit.meter_to_active_unit(value)),icon='AUTO')
elif type(value).__name__ == 'int':
row.label(DriverName + " = " + str(value),icon='AUTO')
elif type(value).__name__ == 'bool':
row.label(DriverName + " = " + str(value),icon='AUTO')
elif type(value).__name__ == 'bpy_prop_array':
row.label(DriverName + " = " + str(value[DR.array_index]),icon='AUTO')
elif type(value).__name__ == 'Vector':
row.label(DriverName + " = " + str(unit.meter_to_active_unit(value[DR.array_index])),icon='AUTO')
elif type(value).__name__ == 'Euler':
row.label(DriverName + " = " + str(unit.meter_to_active_unit(value[DR.array_index])),icon='AUTO')
else:
row.label(DriverName + " = " + str(type(value)),icon='AUTO')
# props = row.operator("fd_driver.add_variable_to_object",text="",icon='ZOOMIN')
# props.object_name = obj.name
# props.data_path = DR.data_path
# props.array_index = DR.array_index
# obj_bp = utils.get_assembly_bp(obj)
# if obj_bp:
# props = row.operator('fd_driver.get_vars_from_object',text="",icon='DRIVER')
# props.object_name = obj.name
# props.var_object_name = obj_bp.name
# props.data_path = DR.data_path
# props.array_index = DR.array_index
utils.draw_driver_expression(box,DR)
# draw_add_variable_operators(box,obj.name,DR.data_path,DR.array_index)
utils.draw_driver_variables(box,DR,obj.name)
class PANEL_object_properties(bpy.types.Panel):
bl_space_type = "VIEW_3D"
bl_region_type = "UI"
bl_label = " "
bl_options = {'DEFAULT_CLOSED'}
@classmethod
def poll(cls, context):
if context.object:
return True
else:
return False
def draw_header(self, context):
layout = self.layout
obj = context.object
layout.label(text="Object: " + obj.name,icon='OBJECT_DATA')
def draw(self, context):
layout = self.layout
obj = context.object
if obj:
draw_object_properties(layout,obj,context)
class OPS_open_new_window(bpy.types.Operator):
bl_idname = "fd_general.open_new_window"
bl_label = "Open New Window"
space_type = bpy.props.StringProperty(name="Space Type")
@classmethod
def poll(cls, context):
return True
def execute(self, context):
bpy.ops.screen.userpref_show('INVOKE_DEFAULT')
for window in context.window_manager.windows:
if len(window.screen.areas) == 1 and window.screen.areas[0].type == 'USER_PREFERENCES':
window.screen.areas[0].type = self.space_type
return {'FINISHED'}
def get_scene_props(scene):
return scene.obj_panel
class scene_props(bpy.types.PropertyGroup):
tabs = bpy.props.EnumProperty(name="type",
items=enum_object_tabs,
description="Select the Object Type.",
default='INFO')
def register():
bpy.utils.register_class(PANEL_object_properties)
bpy.utils.register_class(scene_props)
bpy.utils.register_class(OPS_open_new_window)
bpy.types.Scene.obj_panel = bpy.props.PointerProperty(type = scene_props)
def unregister():
pass | [
"[email protected]"
] | |
5dd63b2b9df8c5af5403c212e5f8fa25f11a8055 | 49536aafb22a77a6caf249c7fadef46d63d24dfe | /tensorflow/tensorflow/contrib/learn/python/learn/learn_io/generator_io.py | 784781d7368490a10d5dbc9cd5842f4bed98eda3 | [
"Apache-2.0"
] | permissive | wangzhi01/deeplearning-1 | 4e5ad93f0d9ecd302b74352f80fe1fa6ae70bf0d | 46ab82253d956953b8aa98e97ceb6cd290e82288 | refs/heads/master | 2020-05-28T03:14:55.687567 | 2018-09-12T16:52:09 | 2018-09-12T16:52:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,499 | py | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Methods to allow generator of dict with numpy arrays."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from collections import Container
from types import FunctionType
from types import GeneratorType
from tensorflow.python.estimator.inputs.queues.feeding_functions import _enqueue_data as enqueue_data
def generator_input_fn(x,
target_key=None,
batch_size=128,
num_epochs=1,
shuffle=True,
queue_capacity=1000,
num_threads=1,
pad_value=None):
"""Returns input function that returns dicts of numpy arrays
yielded from a generator.
It is assumed that every dict of numpy arrays yielded from the dictionary
represents a single sample. The generator should consume a single epoch of the
data.
This returns a function outputting `features` and `target` based on the dict
of numpy arrays. The dict `features` has the same keys as an element yielded
from x.
Example:
```python
def generator():
for index in range(10):
yield {'height': np.random.randint(32,36),
'age': np.random.randint(18, 80),
'label': np.ones(1)}
with tf.Session() as session:
input_fn = generator_io.generator_input_fn(
generator, target_key="label", batch_size=2, shuffle=False,
num_epochs=1)
```
Args:
x: Generator Function, returns a `Generator` that will yield the data
in `dict` of numpy arrays
target_key: String or Container of Strings, the key or Container of keys of
the numpy arrays in x dictionaries to use as target.
batch_size: Integer, size of batches to return.
num_epochs: Integer, number of epochs to iterate over data. If `None` will
run forever.
shuffle: Boolean, if True shuffles the queue. Avoid shuffle at prediction
time.
queue_capacity: Integer, size of queue to accumulate.
num_threads: Integer, number of threads used for reading and enqueueing.
pad_value: default value for dynamic padding of data samples, if provided.
Returns:
Function, that returns a feature `dict` with `Tensors` and an optional
label `dict` with `Tensors`, or if target_key is `str` label is a `Tensor`
Raises:
TypeError: `x` is not `FunctionType`.
TypeError: `x()` is not `GeneratorType`.
TypeError: `next(x())` is not `dict`.
TypeError: `target_key` is not `str` or `target_key` is not `Container`
of `str`.
KeyError: `target_key` not a key or `target_key[index]` not in next(`x()`).
KeyError: `key` mismatch between dicts emitted from `x()`
"""
if not isinstance(x, FunctionType):
raise TypeError(
'x must be generator function; got {}'.format(type(x).__name__))
generator = x()
if not isinstance(generator, GeneratorType):
raise TypeError(
'x() must be generator; got {}'.format(type(generator).__name__))
data = next(generator)
if not isinstance(data, dict):
raise TypeError('x() must yield dict; got {}'.format(type(data).__name__))
input_keys = sorted(next(x()).keys())
if target_key is not None:
if isinstance(target_key, str):
target_key = [target_key]
elif isinstance(target_key, Container):
for item in target_key:
if not isinstance(item, str):
raise TypeError('target_key must be str or Container of str; got {}'.
format(type(item).__name__))
if item not in input_keys:
raise KeyError(
'target_key not in yielded dict. Expected {} keys; got {}'.format(
input_keys, item))
else:
raise TypeError('target_key must be str or Container of str; got {}'.
format(type(target_key).__name__))
def _generator_input_fn():
"""generator input function."""
queue = enqueue_data(
x,
queue_capacity,
shuffle=shuffle,
num_threads=num_threads,
enqueue_size=batch_size,
num_epochs=num_epochs,
pad_value=pad_value)
features = (queue.dequeue_many(batch_size)
if num_epochs is None else queue.dequeue_up_to(batch_size))
if not isinstance(features, list):
features = [features]
features = dict(zip(input_keys, features))
if target_key is not None:
if len(target_key) > 1:
target = {key: features.pop(key) for key in target_key}
else:
target = features.pop(target_key[0])
return features, target
return features
return _generator_input_fn
| [
"[email protected]"
] | |
bbbb760b22d3a07d2b3d10445c267f72ed9fcfbd | e0b6f5bd451aa8af3273fbc948799637681342e1 | /scripts/wm_representation/functions/IEM_conditions/IEM_condition.py | 244e5b35232d3da6732fe524c6e5c3d6790c863a | [] | no_license | davidbestue/encoding | 6b304f6e7429f94f97bd562c7544d1fdccf7bdc1 | c27319aa3bb652b3bfc6b7340044c0fda057bc62 | refs/heads/master | 2022-05-05T23:41:42.419252 | 2022-04-27T08:34:52 | 2022-04-27T08:34:52 | 144,248,690 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,024 | py | # -*- coding: utf-8 -*-
"""
Created on Mon Jul 1 18:24:32 2019
@author: David Bestue
"""
## Import functions prom the previous path
import sys
import os
previous_path = os.path.abspath(os.path.join(os.getcwd(), os.pardir))
sys.path.insert(1, previous_path)
from model_functions import *
from fake_data_generator import *
from Weights_matrixs import *
from Representation import *
from process_encoding import *
from process_wm import *
from data_to_use import *
from bootstrap_functions import *
from joblib import Parallel, delayed
import multiprocessing
import time
import random
from sklearn.model_selection import KFold
import multiprocessing
multiprocessing.cpu_count()
### use the cores so we do not run out of memory
numcores = multiprocessing.cpu_count()
if numcores>20:
numcores=numcores-10
if numcores<10:
numcores=numcores-3
##paths to save the files
path_save_signal ='/home/david/Desktop/Reconstructions/IEM/IEM_target_far_delay.xlsx' #cross_b001_target_mix_octave_1_7_far.xlsx'
path_save_shuffle = '/home/david/Desktop/Reconstructions/IEM/shuff_IEM_target_far_delay.xlsx'
## options (chek the filename too!)
decoding_thing = 'Target' #'Distractor' #'Target'
Distance_to_use = 'far' #'close' 'far'
training_time= 'delay' #'stim_p' 'delay' 'respo'
## depending on the options, I will use one condition or the other
if decoding_thing=='Distractor':
cond_t = '2_7'
elif decoding_thing=='Target': ##at some point we can go for the response, though it should be similar
cond_t = '1_7'
# depending on the options, the TRs used for the training will be different
if training_time=='stim_p':
tr_st=3
tr_end=4
elif training_time=='delay':
tr_st=4
tr_end=6
elif training_time=='respo':
if decoding_thing=='Target':
tr_st=8
tr_end=9
elif decoding_thing=='Distractor':
tr_st=11
tr_end=12
## dictionary and list to save the files
Reconstructions={}
Reconstructions_shuff=[]
## elements for the loop
Conditions=['1_0.2', '1_7', '2_0.2', '2_7'] # '1_0.2', '1_7', '2_0.2', '2_7'
Subjects=['d001', 'n001', 'b001', 'r001', 's001', 'l001'] #'d001', 'n001', 'b001', 'r001', 's001', 'l001'
brain_regions = ['visual', 'ips', 'pfc'] # 'visual', 'ips', 'pfc'
ref_angle=180
num_shuffles = 10 #00
for Subject in Subjects:
for Brain_region in brain_regions:
#plt.figure()
### Data to use
enc_fmri_paths, enc_beh_paths, wm_fmri_paths, wm_beh_paths, masks = data_to_use( Subject, 'together', Brain_region)
##### Process training data
training_activity, training_behaviour = preprocess_wm_files(wm_fmri_paths, masks, wm_beh_paths, condition=cond_t,
distance=Distance_to_use, sys_use='unix', nscans_wm=nscans_wm, TR=2.335)
#
#training activity
if training_time=='stim_p':
delay_TR_cond = training_activity[:, tr_st, :]
if training_time=='delay':
delay_TR_cond = np.mean(training_activity[:, tr_st:tr_end, :], axis=1) ## training_activity[:, 8, :]
if training_time=='respo':
delay_TR_cond = training_activity[:, tr_st, :]
#
if decoding_thing=='Distractor':
training_thing = training_behaviour['Dist']
elif decoding_thing=='Target':
training_thing = training_behaviour['T']
##### Train your weigths
WM, Inter = Weights_matrix_LM( delay_TR_cond, training_thing )
WM_t = WM.transpose()
for idx_c, Condition in enumerate(Conditions):
if Condition == cond_t:
training_activity, training_behaviour = delay_TR_cond, training_thing
enc_fmri_paths, enc_beh_paths, wm_fmri_paths, wm_beh_paths, masks = data_to_use( Subject, 'together', Brain_region)
testing_activity, testing_behaviour = preprocess_wm_files(wm_fmri_paths, masks, wm_beh_paths,
condition=Condition, distance=Distance_to_use, sys_use='unix', nscans_wm=nscans_wm, TR=2.335)
#
Reconstruction = IEM_cross_condition_kfold(testing_activity= testing_activity, testing_behaviour=testing_behaviour,
decode_item= decoding_thing, WM=WM, WM_t=WM_t, Inter=Inter, tr_st=tr_st, tr_end=tr_end, n_slpits=10)
Reconstructions[Subject + '_' + Brain_region + '_' + Condition]=Reconstruction
shuff = IEM_cross_condition_kfold_shuff(testing_activity=testing_activity, testing_behaviour=testing_behaviour,
decode_item=decoding_thing, WM=WM, WM_t=WM_t, Inter=Inter, condition=Condition, subject=Subject, region=Brain_region,
iterations=num_shuffles, tr_st=tr_st, tr_end=tr_end, ref_angle=180, n_slpits=10)
Reconstructions_shuff.append(shuff)
###Reconstructions_shuff.append(shuff)
else:
Reconstruction, shuff = all_process_condition_shuff( Subject=Subject, Brain_Region=Brain_region, WM=WM, WM_t=WM_t,
distance=Distance_to_use, decode_item= decoding_thing, iterations=num_shuffles, Inter=Inter, Condition=Condition,
method='together', heatmap=False) #100
Reconstructions[Subject + '_' + Brain_region + '_' + Condition]=Reconstruction
Reconstructions_shuff.append(shuff)
### Save signal
### Get signal from the reconstructions (get the signal before; not done in the function in case you want to save the whole)
### If you want to save the whole recosntruction, uncomment the following lines
### Save Recosntructions
# path_save_reconstructions = #
# writer = pd.ExcelWriter(path_save_reconstructions)
# for i in range(len(Reconstructions.keys())):
# Reconstructions[Reconstructions.keys()[i]].to_excel(writer, sheet_name=Reconstructions.keys()[i]) #each dataframe in a excel sheet
# writer.save() #save reconstructions (heatmaps)
#Save just the signal (around the decoding thing)
Decoding_df =[]
for dataframes in Reconstructions.keys():
df = Reconstructions[dataframes]
a = pd.DataFrame(df.iloc[ref_angle*2,:]) ##*2 because there are 720
a = a.reset_index()
a.columns = ['times', 'decoding'] # column names
a['decoding'] = [sum(df.iloc[:,i] * f2(ref_angle)) for i in range(len(a))] #"population vector method" scalar product
a['times']=a['times'].astype(float)
a['region'] = dataframes.split('_')[1]
a['subject'] = dataframes.split('_')[0]
a['condition'] = dataframes.split('_')[-2] + '_' + dataframes.split('_')[-1]
Decoding_df.append(a)
Df = pd.concat(Decoding_df)
Df['label'] = 'signal' #ad the label of signal (you will concatenate this df with the one of the shuffleing)
Df.to_excel( path_save_signal ) #save signal
### Save Shuffle (in shuffles you do not need to get the *2 thing becuase it is done inside the function)
Df_shuffs = pd.concat(Reconstructions_shuff)
Df_shuffs['label'] = 'shuffle' ## add the label of shuffle
Df_shuffs.to_excel(path_save_shuffle) #save shuffle
| [
"[email protected]"
] | |
ba41f7aef79a7bcb7d8f12a8308d4d45eacd1ce8 | 6f4e925bf4538d104f1e3e9754d4297c5504ab80 | /python/recall/app/domain/__init__.py | 55879d938b278a1a92324ce2cb752388ae90ff9d | [
"MIT"
] | permissive | kingreatwill/openself | 7f02282da3e0b1f328c3627d83ba2b5ed4563dc8 | 8517d24e665b39371835ecd2ed0cd3509a5d9d62 | refs/heads/master | 2023-01-23T13:15:49.491402 | 2020-11-19T02:39:52 | 2020-11-19T02:39:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 596 | py | import models
class Domain:
def __init__(self, model: models.db.Document):
self.model = model
# 列表;
def list(self, size=10, index=1, **kwargs):
size = int(size)
index = int(index)
return self.model.objects(**kwargs).skip((index - 1) * size).limit(size)
# 明细;
def get(self, id):
return self.model.objects(**{self.model.key(): id}).first()
def update(self, id, **kwargs):
model = self.model.objects(**{self.model.key(): id}).first()
if model:
return model.update(**kwargs)
return True
| [
"[email protected]"
] | |
0894ab4443b0c20e40c07b66f35c003920e6f84e | 39735aac6631d2992b47ad7645930266322a4774 | /tensorflow_probability/python/distributions/distribution_test.py | d25be8d069320024879b7b4b5336383f16bf12b7 | [
"Apache-2.0"
] | permissive | Abdelpakey/probability | b96dff53fab9d9405f39d224fa77ff13f871c5db | b43d491fade784ae216a026823d2d27d7317264f | refs/heads/master | 2020-04-01T05:26:28.718050 | 2019-02-15T15:47:16 | 2019-02-15T15:47:16 | 152,903,143 | 0 | 0 | null | 2018-10-13T18:39:10 | 2018-10-13T18:39:31 | null | UTF-8 | Python | false | false | 13,317 | py | # Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
import numpy as np
import tensorflow as tf
import tensorflow_probability as tfp
from tensorflow.python.framework import tensor_util
from tensorflow.python.framework import test_util
tfd = tfp.distributions
@test_util.run_all_in_graph_and_eager_modes
class DistributionTest(tf.test.TestCase):
def testParamShapesAndFromParams(self):
classes = [
tfd.Normal,
tfd.Bernoulli,
tfd.Beta,
tfd.Chi2,
tfd.Exponential,
tfd.Gamma,
tfd.InverseGamma,
tfd.Laplace,
tfd.StudentT,
tfd.Uniform,
]
sample_shapes = [(), (10,), (10, 20, 30)]
for cls in classes:
for sample_shape in sample_shapes:
param_shapes = cls.param_shapes(sample_shape)
params = dict([(name, tf.random_normal(shape))
for name, shape in param_shapes.items()])
dist = cls(**params)
self.assertAllEqual(sample_shape, self.evaluate(
tf.shape(dist.sample())))
dist_copy = dist.copy()
self.assertAllEqual(sample_shape,
self.evaluate(tf.shape(dist_copy.sample())))
self.assertEqual(dist.parameters, dist_copy.parameters)
def testCopyExtraArgs(self):
# Note: we cannot easily test all distributions since each requires
# different initialization arguments. We therefore spot test a few.
normal = tfd.Normal(loc=1., scale=2., validate_args=True)
self.assertEqual(normal.parameters, normal.copy().parameters)
wishart = tfd.Wishart(df=2, scale=[[1., 2], [2, 5]], validate_args=True)
self.assertEqual(wishart.parameters, wishart.copy().parameters)
def testCopyOverride(self):
normal = tfd.Normal(loc=1., scale=2., validate_args=True)
unused_normal_copy = normal.copy(validate_args=False)
base_params = normal.parameters.copy()
copy_params = normal.copy(validate_args=False).parameters.copy()
self.assertNotEqual(
base_params.pop("validate_args"), copy_params.pop("validate_args"))
self.assertEqual(base_params, copy_params)
def testIsScalar(self):
mu = 1.
sigma = 2.
normal = tfd.Normal(mu, sigma, validate_args=True)
self.assertTrue(tensor_util.constant_value(normal.is_scalar_event()))
self.assertTrue(tensor_util.constant_value(normal.is_scalar_batch()))
normal = tfd.Normal([mu], [sigma], validate_args=True)
self.assertTrue(tensor_util.constant_value(normal.is_scalar_event()))
self.assertFalse(tensor_util.constant_value(normal.is_scalar_batch()))
mvn = tfd.MultivariateNormalDiag([mu], [sigma], validate_args=True)
self.assertFalse(tensor_util.constant_value(mvn.is_scalar_event()))
self.assertTrue(tensor_util.constant_value(mvn.is_scalar_batch()))
mvn = tfd.MultivariateNormalDiag([[mu]], [[sigma]], validate_args=True)
self.assertFalse(tensor_util.constant_value(mvn.is_scalar_event()))
self.assertFalse(tensor_util.constant_value(mvn.is_scalar_batch()))
# We now test every codepath within the underlying is_scalar_helper
# function.
# Test case 1, 2.
x = tf.placeholder_with_default(input=1, shape=[])
# None would fire an exception were it actually executed.
self.assertTrue(normal._is_scalar_helper(x.get_shape(), lambda: None))
self.assertTrue(
normal._is_scalar_helper(tf.TensorShape(None), lambda: tf.shape(x)))
x = tf.placeholder_with_default(input=[1], shape=[1])
# None would fire an exception were it actually executed.
self.assertFalse(normal._is_scalar_helper(x.get_shape(), lambda: None))
self.assertFalse(
normal._is_scalar_helper(tf.TensorShape(None), lambda: tf.shape(x)))
# There's no notion of partially known shapes in eager mode, so exit
# early.
if tf.executing_eagerly():
return
# Test case 3.
x = tf.placeholder_with_default(input=1, shape=None)
is_scalar = normal._is_scalar_helper(x.get_shape(), lambda: tf.shape(x))
self.assertTrue(self.evaluate(is_scalar))
x = tf.placeholder_with_default(input=[1], shape=None)
is_scalar = normal._is_scalar_helper(x.get_shape(), lambda: tf.shape(x))
self.assertFalse(self.evaluate(is_scalar))
def _GetFakeDistribution(self):
class FakeDistribution(tfd.Distribution):
"""Fake Distribution for testing _set_sample_static_shape."""
def __init__(self, batch_shape=None, event_shape=None):
self._static_batch_shape = tf.TensorShape(batch_shape)
self._static_event_shape = tf.TensorShape(event_shape)
super(FakeDistribution, self).__init__(
dtype=tf.float32,
reparameterization_type=tfd.NOT_REPARAMETERIZED,
validate_args=True,
allow_nan_stats=True,
name="DummyDistribution")
def _batch_shape(self):
return self._static_batch_shape
def _event_shape(self):
return self._static_event_shape
return FakeDistribution
def testSampleShapeHints(self):
# In eager mode, all shapes are known, so these tests do not need to
# execute.
if tf.executing_eagerly():
return
fake_distribution = self._GetFakeDistribution()
# Make a new session since we're playing with static shapes. [And below.]
x = tf.placeholder_with_default(
input=np.ones((6, 7, 2, 3, 5), dtype=np.float32), shape=None)
dist = fake_distribution(batch_shape=[2, 3], event_shape=[5])
sample_shape = tf.convert_to_tensor([6, 7], dtype=tf.int32)
y = dist._set_sample_static_shape(x, sample_shape)
# We use as_list since TensorShape comparison does not work correctly for
# unknown values, ie, Dimension(None).
self.assertAllEqual([6, 7, 2, 3, 5], y.get_shape().as_list())
x = tf.placeholder_with_default(
input=np.ones((6, 7, 2, 3, 5), dtype=np.float32), shape=None)
dist = fake_distribution(batch_shape=[None, 3], event_shape=[5])
sample_shape = tf.convert_to_tensor([6, 7], dtype=tf.int32)
y = dist._set_sample_static_shape(x, sample_shape)
self.assertAllEqual([6, 7, None, 3, 5], y.get_shape().as_list())
x = tf.placeholder_with_default(
input=np.ones((6, 7, 2, 3, 5), dtype=np.float32), shape=None)
dist = fake_distribution(batch_shape=[None, 3], event_shape=[None])
sample_shape = tf.convert_to_tensor([6, 7], dtype=tf.int32)
y = dist._set_sample_static_shape(x, sample_shape)
self.assertAllEqual([6, 7, None, 3, None], y.get_shape().as_list())
x = tf.placeholder_with_default(
input=np.ones((6, 7, 2, 3, 5), dtype=np.float32), shape=None)
dist = fake_distribution(batch_shape=None, event_shape=None)
sample_shape = tf.convert_to_tensor([6, 7], dtype=tf.int32)
y = dist._set_sample_static_shape(x, sample_shape)
self.assertTrue(y.get_shape().ndims is None)
x = tf.placeholder_with_default(
input=np.ones((6, 7, 2, 3, 5), dtype=np.float32), shape=None)
dist = fake_distribution(batch_shape=[None, 3], event_shape=None)
# There's no notion of partially known shapes in eager mode, so exit
# early.
sample_shape = tf.convert_to_tensor([6, 7], dtype=tf.int32)
y = dist._set_sample_static_shape(x, sample_shape)
self.assertTrue(y.get_shape().ndims is None)
def testNameScopeWorksCorrectly(self):
x = tfd.Normal(loc=0., scale=1., name="x")
x_duplicate = tfd.Normal(loc=0., scale=1., name="x")
with tf.name_scope("y") as name:
y = tfd.Bernoulli(logits=0., name=name)
x_sample = x.sample(name="custom_sample")
x_sample_duplicate = x.sample(name="custom_sample")
x_log_prob = x.log_prob(0., name="custom_log_prob")
x_duplicate_sample = x_duplicate.sample(name="custom_sample")
self.assertEqual(x.name, "x/")
self.assertEqual(y.name, "y/")
# There's no notion of graph, hence the same name will be reused.
# Tensors also do not have names in eager mode, so exit early.
if tf.executing_eagerly():
return
self.assertTrue(x_sample.name.startswith("x/custom_sample"))
self.assertTrue(x_log_prob.name.startswith("x/custom_log_prob"))
self.assertEqual(x_duplicate.name, "x_1/")
self.assertTrue(x_duplicate_sample.name.startswith(
"x_1/custom_sample"))
self.assertTrue(x_sample_duplicate.name.startswith("x/custom_sample_1"))
def testStrWorksCorrectlyScalar(self):
# Usually we'd write np.float(X) here, but a recent Eager bug would
# erroneously coerce the value to float32 anyway. We therefore use constants
# here, until the bug is resolved in TensorFlow 1.12.
normal = tfd.Normal(loc=tf.constant(0, tf.float16),
scale=tf.constant(1, tf.float16))
self.assertEqual(
str(normal),
"tfp.distributions.Normal("
"\"Normal/\", "
"batch_shape=(), "
"event_shape=(), "
"dtype=float16)")
chi2 = tfd.Chi2(df=np.float32([1., 2.]), name="silly")
self.assertEqual(
str(chi2),
"tfp.distributions.Chi2("
"\"silly/\", " # What a silly name that is!
"batch_shape=(2,), "
"event_shape=(), "
"dtype=float32)")
# There's no notion of partially known shapes in eager mode, so exit
# early.
if tf.executing_eagerly():
return
exp = tfd.Exponential(rate=tf.placeholder_with_default(
input=1., shape=None))
self.assertEqual(
str(exp),
"tfp.distributions.Exponential(\"Exponential/\", "
# No batch shape.
"event_shape=(), "
"dtype=float32)")
def testStrWorksCorrectlyMultivariate(self):
mvn_static = tfd.MultivariateNormalDiag(
loc=np.zeros([2, 2]), name="MVN")
self.assertEqual(
str(mvn_static),
"tfp.distributions.MultivariateNormalDiag("
"\"MVN/\", "
"batch_shape=(2,), "
"event_shape=(2,), "
"dtype=float64)")
# There's no notion of partially known shapes in eager mode, so exit
# early.
if tf.executing_eagerly():
return
mvn_dynamic = tfd.MultivariateNormalDiag(
loc=tf.placeholder_with_default(
input=np.ones((3, 3), dtype=np.float32), shape=[None, 3]),
name="MVN2")
self.assertEqual(
str(mvn_dynamic),
"tfp.distributions.MultivariateNormalDiag("
"\"MVN2/\", "
"batch_shape=(?,), " # Partially known.
"event_shape=(3,), "
"dtype=float32)")
def testReprWorksCorrectlyScalar(self):
# Usually we'd write np.float(X) here, but a recent Eager bug would
# erroneously coerce the value to float32 anyway. We therefore use constants
# here, until the bug is resolved in TensorFlow 1.12.
normal = tfd.Normal(loc=tf.constant(0, tf.float16),
scale=tf.constant(1, tf.float16))
self.assertEqual(
repr(normal),
"<tfp.distributions.Normal"
" 'Normal/'"
" batch_shape=()"
" event_shape=()"
" dtype=float16>")
chi2 = tfd.Chi2(df=np.float32([1., 2.]), name="silly")
self.assertEqual(
repr(chi2),
"<tfp.distributions.Chi2"
" 'silly/'" # What a silly name that is!
" batch_shape=(2,)"
" event_shape=()"
" dtype=float32>")
# There's no notion of partially known shapes in eager mode, so exit
# early.
if tf.executing_eagerly():
return
exp = tfd.Exponential(rate=tf.placeholder_with_default(
input=1., shape=None))
self.assertEqual(
repr(exp),
"<tfp.distributions.Exponential"
" 'Exponential/'"
" batch_shape=<unknown>"
" event_shape=()"
" dtype=float32>")
def testReprWorksCorrectlyMultivariate(self):
mvn_static = tfd.MultivariateNormalDiag(
loc=np.zeros([2, 2]), name="MVN")
self.assertEqual(
repr(mvn_static),
"<tfp.distributions.MultivariateNormalDiag"
" 'MVN/'"
" batch_shape=(2,)"
" event_shape=(2,)"
" dtype=float64>")
# There's no notion of partially known shapes in eager mode, so exit
# early.
if tf.executing_eagerly():
return
mvn_dynamic = tfd.MultivariateNormalDiag(
loc=tf.placeholder_with_default(
input=np.ones((3, 3), dtype=np.float32), shape=[None, 3]),
name="MVN2")
self.assertEqual(
repr(mvn_dynamic),
"<tfp.distributions.MultivariateNormalDiag"
" 'MVN2/'"
" batch_shape=(?,)" # Partially known.
" event_shape=(3,)"
" dtype=float32>")
if __name__ == "__main__":
tf.test.main()
| [
"[email protected]"
] | |
f8d8ee4061dbff936f37094f60a8e6e5b2dbd040 | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/p2DJ/New/program/qiskit/noisy/startQiskit_noisy241.py | d59a29805cadc73e57d6ef26c5940d08ffc753ef | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,185 | py | # qubit number=2
# total number=11
import cirq
import qiskit
from qiskit.providers.aer import QasmSimulator
from qiskit.test.mock import FakeVigo
from qiskit import IBMQ
from qiskit import QuantumCircuit, QuantumRegister, ClassicalRegister
from qiskit import BasicAer, execute, transpile
from pprint import pprint
from qiskit.test.mock import FakeVigo
from math import log2,floor, sqrt, pi
import numpy as np
import networkx as nx
def build_oracle(n: int, f) -> QuantumCircuit:
# implement the oracle O_f^\pm
# NOTE: use U1 gate (P gate) with \lambda = 180 ==> CZ gate
# or multi_control_Z_gate (issue #127)
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
# oracle.draw('mpl', filename='circuit/deutsch-oracle.png')
return oracle
def make_circuit(n:int,f) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n, "qc")
target = QuantumRegister(1, "qt")
prog = QuantumCircuit(input_qubit, target)
# inverse last one (can be omitted if using O_f^\pm)
prog.x(target)
# apply H to get superposition
for i in range(n):
prog.h(input_qubit[i])
prog.h(input_qubit[1]) # number=1
prog.h(target)
prog.barrier()
# apply oracle O_f
oracle = build_oracle(n, f)
prog.append(
oracle.to_gate(),
[input_qubit[i] for i in range(n)] + [target])
# apply H back (QFT on Z_2^n)
for i in range(n):
prog.h(input_qubit[i])
prog.barrier()
# measure
#for i in range(n):
# prog.measure(input_qubit[i], classicals[i])
prog.swap(input_qubit[1],input_qubit[0]) # number=2
prog.swap(input_qubit[1],input_qubit[0]) # number=3
prog.cx(input_qubit[0],input_qubit[1]) # number=8
prog.x(input_qubit[1]) # number=9
prog.cx(input_qubit[0],input_qubit[1]) # number=10
prog.cx(input_qubit[0],input_qubit[1]) # number=7
prog.rx(-2.73004401596953,input_qubit[1]) # number=6
prog.z(input_qubit[1]) # number=4
# circuit end
return prog
if __name__ == '__main__':
n = 2
f = lambda rep: rep[-1]
# f = lambda rep: "1" if rep[0:2] == "01" or rep[0:2] == "10" else "0"
# f = lambda rep: "0"
prog = make_circuit(n, f)
sample_shot =2800
backend = FakeVigo()
circuit1 = transpile(prog,FakeVigo())
circuit1.x(qubit=3)
circuit1.x(qubit=3)
circuit1.measure_all()
prog = circuit1
info = execute(prog, backend=backend, shots=sample_shot).result().get_counts()
writefile = open("../data/startQiskit_noisy241.csv","w")
print(info,file=writefile)
print("results end", file=writefile)
print(circuit1.depth(),file=writefile)
print(circuit1,file=writefile)
writefile.close()
| [
"[email protected]"
] | |
5a0c7f2232c9b5b9b6aebd0299f3b756198fbcab | a1488a281e582373b7270d85059f08330c0b685d | /dueros/directive/Display/tag/NewTag.py | 86f19cc6e51950cde6ea45a11b1821aef4bfab0f | [
"Apache-2.0"
] | permissive | xuchengzhi/bot-sdk-python | 473fb8e7df629a6168983e26de74546bbca32768 | 966d103d55f9f1220c00d806ac13d0754015a31c | refs/heads/master | 2020-12-06T17:00:55.587643 | 2019-10-18T02:54:45 | 2019-10-18T02:54:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 409 | py | #!/usr/bin/env python3
# -*- encoding=utf-8 -*-
# description:
# author:jack
# create_time: 2018/9/17
"""
desc:pass
"""
from dueros.directive.Display.tag.TagTypeEnum import TagTypeEnum
from dueros.directive.Display.tag.BaseTag import BaseTag
class NewTag(BaseTag):
def __init__(self):
super(NewTag, self).__init__(TagTypeEnum.TAG_TYPE_NEW, '最新')
if __name__ == '__main__':
pass | [
"[email protected]"
] | |
71de969b7a916bf5eab2c78a574d3186cf9d290b | 365186abceefc51e811706ad325a2f53a63a25f8 | /tests/scripts/comparisonPlots.py | e0fbf4cda1982e9d1ea7945d69f32ca47e3b51b7 | [
"MIT"
] | permissive | tboudreaux/pytopsscrape | a857bdca8558bf86f7afd5f8f3e6d2e5ca90fa64 | c9f95e6a6419debb0b6a22f22d1574a8bbf73bd0 | refs/heads/master | 2023-04-06T17:48:08.812425 | 2023-04-04T00:01:13 | 2023-04-04T00:01:13 | 532,559,997 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,499 | py | import matplotlib.pyplot as plt
from pyTOPSScrape.parse import load_opal
import os
import datetime
def make_comparision_plot():
TargetPath = "./GS98Target.opac"
TestPath = "./GS98TestResult.opac"
OPALPath = "./GS98OPAL.opac"
targetTime = datetime.datetime.fromtimestamp(os.path.getmtime(TargetPath))
testTime = datetime.datetime.fromtimestamp(os.path.getmtime(TestPath))
OPALTime = datetime.datetime.fromtimestamp(os.path.getmtime(OPALPath))
print(f"Target File Last Modified at: {targetTime}")
print(f"Test File Last Modified at: {testTime}")
print(f"OPAL Comp File Last Modified at: {OPALTime}")
Target = load_opal(TargetPath)
Test = load_opal(TestPath)
OPAL = load_opal(OPALPath)
fig, ax = plt.subplots(1,1,figsize=(10,7))
ax.plot(Target[0], Target[2][75, :, 13], label="Current Test Target")
ax.plot(Test[0], Test[2][75, :, 13], label="Test Result")
ax.plot(OPAL[0], OPAL[2][75, :, 13], label="OPAL")
ax.legend()
ax.set_xlabel("Log T")
ax.set_ylabel("Opacity")
ax.set_title("Comparision made at log(R)=-1.5")
plt.savefig("comparison.pdf", bbox_inches='tight')
fig, ax = plt.subplots(1,1,figsize=(10,7))
ax.plot(Target[0], Target[2][75, :, 13] - Test[2][75, :, 13])
ax.set_xlabel("Log T")
ax.set_ylabel("Opacity")
ax.set_title("Target - Result Residuals made at log(R)=-1.5")
plt.savefig("TRResid.pdf", bbox_inches='tight')
if __name__ == "__main__":
make_comparision_plot()
| [
"[email protected]"
] | |
6c7175ef0bf5c454553094b3f009ebac86114775 | a46d135ba8fd7bd40f0b7d7a96c72be446025719 | /packages/python/plotly/plotly/validators/violin/_metasrc.py | 466d497ebca4399340d12c5b16b62c2cd713264a | [
"MIT"
] | permissive | hugovk/plotly.py | 5e763fe96f225d964c4fcd1dea79dbefa50b4692 | cfad7862594b35965c0e000813bd7805e8494a5b | refs/heads/master | 2022-05-10T12:17:38.797994 | 2021-12-21T03:49:19 | 2021-12-21T03:49:19 | 234,146,634 | 0 | 0 | MIT | 2020-01-15T18:33:43 | 2020-01-15T18:33:41 | null | UTF-8 | Python | false | false | 392 | py | import _plotly_utils.basevalidators
class MetasrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(self, plotly_name="metasrc", parent_name="violin", **kwargs):
super(MetasrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
**kwargs
)
| [
"[email protected]"
] | |
c3e40fb6c21a8b78a1912a6dddd65973f62ce9b6 | a82dfb61b17fa66b9c75fe871401cff77aa77f56 | /libmcell/definition/doc.py | 48cdb0ffba9e2e4089d530fbc690a17b0601d563 | [
"MIT"
] | permissive | mcellteam/mcell | 49ca84048a091de8933adccc083d31b7bcb1529e | 3920aec22c55013b78f7d6483b81f70a0d564d22 | refs/heads/master | 2022-12-23T15:01:51.931150 | 2021-09-29T16:49:14 | 2021-09-29T16:49:14 | 10,253,341 | 29 | 12 | NOASSERTION | 2021-07-08T01:56:40 | 2013-05-23T20:59:54 | C++ | UTF-8 | Python | false | false | 7,463 | py | """
Copyright (C) 2021 by
The Salk Institute for Biological Studies
Use of this source code is governed by an MIT-style
license that can be found in the LICENSE file or at
https://opensource.org/licenses/MIT.
"""
import sys
import os
import yaml
from constants import *
from gen import indent_and_fix_rst_chars, yaml_type_to_py_type, get_default_or_unset_value_py
def cat_to_title(cat):
if cat == CATEGORY_CONSTANTS:
return 'Enums and Constants'
else:
return cat.replace('_', ' ').capitalize()
def write_cat_label(f, cat):
f.write('.. _api-' + cat + ':\n\n')
def gen_example_links(base_links):
split_links = base_links.strip().split()
n = len(split_links)
if n == 0:
return ''
res = 'Example' + ('' if n == 1 else 's') + ': '
for l in split_links:
name = os.path.basename(os.path.dirname(l)) + '/' + os.path.basename(l)
res += '`' + name + ' <' + EXAMPLES_BASE_URL + l + '>`_ '
return res
def write_h4(f, text, name, class_name):
f.write('.. _' + class_name + '__' + name + ':\n\n')
f.write(text + '\n')
f.write('-' * len(text) + '\n\n')
def get_method_declaration(method):
res = method[KEY_NAME] + ' ('
if KEY_PARAMS in method:
num_params = len(method[KEY_PARAMS])
for i in range(num_params):
param = method[KEY_PARAMS][i]
t = yaml_type_to_py_type(param[KEY_TYPE])
res += param[KEY_NAME] + ': ' + t
if KEY_DEFAULT in param:
res += '=' + get_default_or_unset_value_py(param)
if i != num_params - 1:
res += ', '
res += ')'
if KEY_RETURN_TYPE in method:
res += ' -> ' + yaml_type_to_py_type(method[KEY_RETURN_TYPE])
return res
def generate_class_documentation(f, class_name, class_def):
f.write(class_name + '\n' + '='*len(class_name) + '\n\n')
if KEY_DOC in class_def:
f.write(class_def[KEY_DOC].strip() + '\n\n')
if KEY_EXAMPLES in class_def:
f.write(gen_example_links(class_def[KEY_EXAMPLES]) + '\n\n')
if KEY_ITEMS in class_def and class_def[KEY_ITEMS]:
f.write('Attributes:\n' + '*'*len('Attributes:') + '\n')
num_items = len(class_def[KEY_ITEMS])
for item in class_def[KEY_ITEMS]:
t = yaml_type_to_py_type(item[KEY_TYPE])
header = item[KEY_NAME] + ': ' + t
write_h4(f, header, item[KEY_NAME], class_name)
if KEY_DOC in item and item[KEY_DOC]:
f.write(' | ' + indent_and_fix_rst_chars(item[KEY_DOC].strip(), ' | ') + '\n')
if KEY_DEFAULT in item:
f.write(' | - default argument value in constructor: ' + get_default_or_unset_value_py(item))
f.write('\n')
if KEY_EXAMPLES in item:
f.write('\n | ' + gen_example_links(item[KEY_EXAMPLES]) + '\n\n')
f.write('\n')
if KEY_METHODS in class_def and class_def[KEY_METHODS]:
f.write('\nMethods:\n' + '*'*len('nMethods:') + '\n')
for method in class_def[KEY_METHODS]:
method_name = method[KEY_NAME]
header = get_method_declaration(method)
write_h4(f, header, method_name, class_name)
if KEY_DOC in method:
f.write('\n | ' + indent_and_fix_rst_chars(method[KEY_DOC].strip(), ' | ') + '\n\n')
if KEY_PARAMS in method:
num_params = len(method[KEY_PARAMS])
for param in method[KEY_PARAMS]:
t = yaml_type_to_py_type(param[KEY_TYPE])
f.write('* | ' + param[KEY_NAME] + ': ' + t)
if KEY_DEFAULT in param:
f.write(' = ' + get_default_or_unset_value_py(param))
if KEY_DOC in param:
f.write('\n | ' + indent_and_fix_rst_chars(param[KEY_DOC].strip(), ' | ') + '\n\n')
else:
f.write('\n')
if KEY_EXAMPLES in method:
f.write(' | ' + gen_example_links(method[KEY_EXAMPLES]) + '\n\n')
f.write('\n')
f.write('\n')
def generate_documentation(data_classes):
# generate constants
with open(os.path.join(DOC_DIRECTORY, CATEGORY_CONSTANTS + EXT_RST), 'w') as f:
write_cat_label(f, CATEGORY_CONSTANTS)
f.write(
'*******************\n' +
cat_to_title(CATEGORY_CONSTANTS) + '\n' +
'*******************\n\n'
)
# generate enums first, then constants
enums = data_classes[KEY_ENUMS]
for enum in enums:
enum_name = enum[KEY_NAME]
f.write(enum_name + '\n' + '='*len(enum_name) + '\n\n')
if KEY_DOC in enum:
f.write('\n | ' + indent_and_fix_rst_chars(enum[KEY_DOC].strip(), ' | ') + '\n\n')
for value in enum[KEY_VALUES]:
f.write('* | **' + value[KEY_NAME] + '** = ' + str(value[KEY_VALUE]) + '\n')
if KEY_DOC in value:
f.write(' | ' + indent_and_fix_rst_chars(value[KEY_DOC].strip(), ' | ') + '\n\n')
f.write('\n')
f.write('\n\n')
c = 'Constants'
f.write(c + '\n' + '='*len(c) + '\n\n')
constants = data_classes[KEY_CONSTANTS]
for const in constants:
const_name = const[KEY_NAME]
f.write('* | **' + const_name + '**: ' + yaml_type_to_py_type(const[KEY_TYPE]) + \
' = ' + str(const[KEY_VALUE]) +'\n')
if KEY_DOC in const:
f.write(' | ' + indent_and_fix_rst_chars(const[KEY_DOC].strip(), ' | ') + '\n\n')
f.write('\n\n')
# then generate classes into files by category
for cat in CATEGORIES:
if cat == CATEGORY_CONSTANTS:
continue
input_file = cat + EXT_RST
with open(os.path.join(DOC_DIRECTORY, input_file), 'w') as f:
write_cat_label(f, cat)
cat_name = cat_to_title(cat)
f.write('*'*len(cat_name) + '\n' + cat_name + '\n' + '*'*len(cat_name) + '\n')
for key, value in sorted(data_classes.items()):
if key != KEY_CONSTANTS and key != KEY_ENUMS and value[KEY_CATEGORY] == cat:
generate_class_documentation(f, key, value)
# and generate api.rst file
with open(os.path.join(DOC_DIRECTORY, API_RST), 'w') as f:
title = 'Python API Reference'
f.write(
title + '\n' +
'='*len(title) + '\n\n'
)
f.write(
'.. toctree::\n'
' :maxdepth: 2\n'
' :hidden:\n'
' :caption: Contents\n\n'
)
for cat in CATEGORIES:
f.write(' ' + cat + '\n')
f.write('\nThis section contains automatically generated documentation on Python classes, enums, '
'and constants provided by MCell.\n\n')
for cat in CATEGORIES:
f.write('- :ref:`api-' + cat + '`\n')
| [
"[email protected]"
] | |
23458d70bd4f9ae696d8d81fa5c01f56971f7da7 | 3b7474148c07df7f4755106a3d0ada9b2de5efdc | /training/c25_flask/examples/world_api/original/tools.py | 1dc1684dc799c4417844116947e284621a8d0bee | [] | no_license | juancsosap/pythontraining | 7f67466846138f32d55361d64de81e74a946b484 | 1441d6fc9544042bc404d5c7efffd119fce33aa7 | refs/heads/master | 2021-08-26T05:37:15.851025 | 2021-08-11T22:35:23 | 2021-08-11T22:35:23 | 129,974,006 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,890 | py | import pymysql
from flask import request, jsonify, render_template, make_response, abort
def xmlify(template, value):
text = render_template(template, value=value)
response = make_response(text)
response.headers['Content-Type'] = 'application/xml'
return response
def prepare_response(template, info):
if len(info) > 0:
formats = ['application/json', 'application/xml']
accept = request.accept_mimetypes.best_match(formats)
if accept == 'application/json':
return jsonify(info)
elif accept == 'application/xml':
return xmlify(template, info)
else:
abort(406)
return make_response(jsonify({}), 204)
class MySQLDBManager:
def __init__(self, **kwargs):
self.host = kwargs['host'] if 'host' in kwargs else 'localhost'
self.port = kwargs['port'] if 'port' in kwargs else 3306
self.user = kwargs['user'] if 'user' in kwargs else 'root'
self.password = kwargs['password']
self.db = kwargs['db']
def connect(self):
self.conn = pymysql.connect(host=self.host,
port=self.port,
db=self.db,
user=self.user,
password=self.password)
self.cursor = self.conn.cursor()
def disconnect(self):
if self.conn:
self.conn.close()
def execute(self, sql, *args):
if len(args) > 0:
self.cursor.execute(sql, args)
else:
self.cursor.execute(sql)
result = self.cursor.fetchall()
return result
dbman = MySQLDBManager(password='roottoor', db='world')
module_name = 'tools.tools'
if __name__ == '__main__':
print('Loading {} module'.format(module_name))
else:
print('Importing {} module'.format(module_name))
| [
"[email protected]"
] | |
d37148f04674bda7b996896bf1686b7c6b7db8c4 | a8b0599af76b5393039431f876be00d628a1fe43 | /backend/kangas/server/__init__.py | 4f348efc3c7c9d70831cec668eae3966b7210d79 | [
"Apache-2.0"
] | permissive | comet-ml/kangas | c951f648d890dca5a66cbab405d3437be2f3e9e3 | df0c1a495032cc4f1c367c74fcb0ef6e5a2063be | refs/heads/main | 2023-06-12T23:38:43.068259 | 2023-06-05T18:38:34 | 2023-06-05T19:28:33 | 550,324,241 | 944 | 41 | Apache-2.0 | 2023-06-05T19:28:35 | 2022-10-12T15:10:04 | Jupyter Notebook | UTF-8 | Python | false | false | 2,379 | py | # -*- coding: utf-8 -*-
######################################################
# _____ _____ _ _ #
# (____ \ _ | ___) (_) | | #
# _ \ \ ____| |_ ____| | ___ ___ _ _ | | #
# | | | )/ _ | _)/ _ | |(_ / __) |/ || | #
# | |__/ ( ( | | | ( ( | | |__| | | | ( (_| | #
# |_____/ \_||_|___)\_||_|_____/|_| |_|\____| #
# #
# Copyright (c) 2023 Kangas Development Team #
# All rights reserved #
######################################################
import os
from .queries import KANGAS_ROOT # noqa
def start_tornado_server(port, debug_level=None, max_workers=None):
"""
Args:
port: (int) the port to start the frontend server
debug_level: (str) None means suppress output from servers
"""
import asyncio
from concurrent.futures import ThreadPoolExecutor
import tornado
import tornado.log
import tornado.options
import tornado.web
from .tornado_server import datagrid_handlers
async def main():
if debug_level is not None:
tornado.options.options["logging"] = debug_level
tornado.log.enable_pretty_logging()
# set max_workers
executor = ThreadPoolExecutor(max_workers=max_workers)
print(
"Kangas tornado backend server starting with %s max workers"
% executor._max_workers
)
for handler in datagrid_handlers:
handler[1].executor = executor
app = tornado.web.Application(datagrid_handlers)
app.listen(port)
await asyncio.Event().wait()
try:
asyncio.run(main())
except KeyboardInterrupt:
print()
print("Exiting Kangas tornado backend server")
def start_flask_server(host, port, debug_level=None, max_workers=None):
from .flask_server import run
if max_workers is None:
max_workers = min(32, os.cpu_count() + 4)
print("Kangas flask backend server starting with %s max workers" % max_workers)
try:
run(
host=host,
port=port,
debug_level=debug_level,
max_workers=max_workers,
)
except KeyboardInterrupt:
print()
print("Exiting Kangas flask backend server")
| [
"[email protected]"
] | |
1e1ae4854016d822cbf704f310b243729c7e3e4a | 21acc25dd3969318dd8476e364fe2fb4eabfe4f4 | /podcastninja/migrations/0005_auto_20150423_1005.py | 5c94549f4db338ebc255bd4a0a32cb7727b5426a | [] | no_license | monty5811/podcastninja | 72dc98375974b1714a8457b09126981a76166b9a | 94a55536270f3e1c4e4f2160e0a24e79c9f40b7f | refs/heads/master | 2020-05-17T01:24:57.312486 | 2015-05-25T15:18:03 | 2015-05-25T15:18:03 | 35,883,288 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 526 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
('podcastninja', '0004_podcastitem_s3_url'),
]
operations = [
migrations.AlterField(
model_name='podcastitem',
name='s3_url',
field=models.TextField(blank=True, null=True, verbose_name=b's3 url', validators=[django.core.validators.URLValidator()]),
),
]
| [
"[email protected]"
] | |
dc9bc77e75ec86cb2ad265207209d03d37bf69a4 | 7950c4faf15ec1dc217391d839ddc21efd174ede | /leetcode-cn/1929.0_Concatenation_of_Array.py | d8ab060fd5948df008b621e9dca0f8d6bf0d9362 | [] | no_license | lixiang2017/leetcode | f462ecd269c7157aa4f5854f8c1da97ca5375e39 | f93380721b8383817fe2b0d728deca1321c9ef45 | refs/heads/master | 2023-08-25T02:56:58.918792 | 2023-08-22T16:43:36 | 2023-08-22T16:43:36 | 153,090,613 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 528 | py | '''
执行用时:36 ms, 在所有 Python3 提交中击败了38.78% 的用户
内存消耗:15.1 MB, 在所有 Python3 提交中击败了51.26% 的用户
'''
class Solution:
def getConcatenation(self, nums: List[int]) -> List[int]:
return nums + nums
'''
执行用时:36 ms, 在所有 Python3 提交中击败了38.78% 的用户
内存消耗:15.1 MB, 在所有 Python3 提交中击败了47.15% 的用户
'''
class Solution:
def getConcatenation(self, nums: List[int]) -> List[int]:
return nums * 2 | [
"[email protected]"
] | |
3668163b33ba19dd7eff00d702f7712c5fd93349 | 8a41a7f9340cfa784cb36d35dca1ecb1630e4097 | /Programming/Python/Databases/mongodb_practice/mongodb_with_docker_container_class_based.py | 2b5256a980b7d9de036f2423af2cae13cf65bfc6 | [] | no_license | anishst/Learn | 02e6b6cce43cf21621d328ef0fc25168267a9a3d | a1aed8b78b19acdb23e20be57b67fb242e0aefc5 | refs/heads/master | 2022-05-13T10:17:40.293640 | 2022-03-30T12:44:21 | 2022-03-30T12:44:21 | 173,595,812 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,266 | py | # UNDER DEV NOT FULLY WORKING
import uuid
import pymongo
class Database(object):
URI = "mongodb://192.168.1.50:27017"
DATABASE = None
@staticmethod
def initialize():
client = pymongo.MongoClient(Database.URI)
Database.DATABASE = client['items_test']
@staticmethod
def insert(collection, data):
Database.DATABASE[collection].insert(data)
@staticmethod
def find(collection, query):
return Database.DATABASE[collection].find(query)
@staticmethod
def find_one(collection, query):
return Database.DATABASE[collection].find_one(query)
@staticmethod
def update(collection, query, data):
Database.DATABASE[collection].update(query, data, upsert=True)
@staticmethod
def remove(collection, query):
return Database.DATABASE[collection].remove(query)
class Items:
def __init__(self, store, url, desc, target_price, _id=None):
self._id = uuid.uuid4().hex if _id is None else _id
self.store = store
self.url = url
self.desc = desc
self.target_price = target_price
def __repr__(self):
return "<Item {} with URL {}>".format(self.store, self.url)
def save_to_mongo(self):
Database.update("items_test", {'_id': self._id}, self.json())
def json(self):
return {
"_id": self._id,
"name": self.store,
"url": self.url,
"desc": self.desc,
"target_price": self.target_price
}
def delete(self):
Database.remove('items_test', {'_id': self._id})
@staticmethod
def get_all_items():
return [elem for elem in Database.find('items_test', {})]
@staticmethod
def get_by_id(id):
return Database.find_one('items_test', {"_id": id})
Database.initialize()
# add new item
# new_item = Items('amazon', 'url', 'desc1', '30')
# new_item.save_to_mongo()
# print(len(new_item.get_all_items()))
all_items = Database.find('items_test',{})
for item in all_items:
print(item["_id"])
print(item["name"])
print(item["url"])
# get by id
print(Items.get_by_id('67913520e1af4ca2b0ed7f9abb5b5019'))
# delete item
Items.delete()
# total count
print(len(Items.get_all_items()))
| [
"[email protected]"
] | |
c389303e3146bc35ff821cb1d46e512bb30de237 | 5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d | /alipay/aop/api/domain/AnttechMorseMarketingSrtaConsultModel.py | ed15c1198c9962d4b975315a6f97b110b2a9d905 | [
"Apache-2.0"
] | permissive | alipay/alipay-sdk-python-all | 8bd20882852ffeb70a6e929038bf88ff1d1eff1c | 1fad300587c9e7e099747305ba9077d4cd7afde9 | refs/heads/master | 2023-08-27T21:35:01.778771 | 2023-08-23T07:12:26 | 2023-08-23T07:12:26 | 133,338,689 | 247 | 70 | Apache-2.0 | 2023-04-25T04:54:02 | 2018-05-14T09:40:54 | Python | UTF-8 | Python | false | false | 3,452 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class AnttechMorseMarketingSrtaConsultModel(object):
def __init__(self):
self._anonymous_mobile_sha_256_list = None
self._blind_mobile_sha_256 = None
self._extend_params = None
self._order_amount = None
self._resource_id = None
@property
def anonymous_mobile_sha_256_list(self):
return self._anonymous_mobile_sha_256_list
@anonymous_mobile_sha_256_list.setter
def anonymous_mobile_sha_256_list(self, value):
self._anonymous_mobile_sha_256_list = value
@property
def blind_mobile_sha_256(self):
return self._blind_mobile_sha_256
@blind_mobile_sha_256.setter
def blind_mobile_sha_256(self, value):
self._blind_mobile_sha_256 = value
@property
def extend_params(self):
return self._extend_params
@extend_params.setter
def extend_params(self, value):
self._extend_params = value
@property
def order_amount(self):
return self._order_amount
@order_amount.setter
def order_amount(self, value):
self._order_amount = value
@property
def resource_id(self):
return self._resource_id
@resource_id.setter
def resource_id(self, value):
self._resource_id = value
def to_alipay_dict(self):
params = dict()
if self.anonymous_mobile_sha_256_list:
if hasattr(self.anonymous_mobile_sha_256_list, 'to_alipay_dict'):
params['anonymous_mobile_sha_256_list'] = self.anonymous_mobile_sha_256_list.to_alipay_dict()
else:
params['anonymous_mobile_sha_256_list'] = self.anonymous_mobile_sha_256_list
if self.blind_mobile_sha_256:
if hasattr(self.blind_mobile_sha_256, 'to_alipay_dict'):
params['blind_mobile_sha_256'] = self.blind_mobile_sha_256.to_alipay_dict()
else:
params['blind_mobile_sha_256'] = self.blind_mobile_sha_256
if self.extend_params:
if hasattr(self.extend_params, 'to_alipay_dict'):
params['extend_params'] = self.extend_params.to_alipay_dict()
else:
params['extend_params'] = self.extend_params
if self.order_amount:
if hasattr(self.order_amount, 'to_alipay_dict'):
params['order_amount'] = self.order_amount.to_alipay_dict()
else:
params['order_amount'] = self.order_amount
if self.resource_id:
if hasattr(self.resource_id, 'to_alipay_dict'):
params['resource_id'] = self.resource_id.to_alipay_dict()
else:
params['resource_id'] = self.resource_id
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = AnttechMorseMarketingSrtaConsultModel()
if 'anonymous_mobile_sha_256_list' in d:
o.anonymous_mobile_sha_256_list = d['anonymous_mobile_sha_256_list']
if 'blind_mobile_sha_256' in d:
o.blind_mobile_sha_256 = d['blind_mobile_sha_256']
if 'extend_params' in d:
o.extend_params = d['extend_params']
if 'order_amount' in d:
o.order_amount = d['order_amount']
if 'resource_id' in d:
o.resource_id = d['resource_id']
return o
| [
"[email protected]"
] | |
c89f1e925348210ada55438f3e47f2b3572cbe03 | 0412893529999de784ab9cb914f385ba788a3684 | /test/test_ack_collector_down.py | af6a6ac2ccc5999ccfadc2c84f1e1ec9cacdf9c9 | [
"Apache-2.0"
] | permissive | JeremyTangCD/lm-sdk-python | 0326bf034c16b022b760600dc18fe7aaad42fa26 | 2a15e055e5a3f72d2f2e4fb43bdbed203c5a9983 | refs/heads/master | 2020-04-15T15:39:59.276224 | 2019-01-09T09:55:36 | 2019-01-09T09:55:36 | 164,803,314 | 0 | 0 | Apache-2.0 | 2019-01-09T09:58:55 | 2019-01-09T06:33:40 | Python | UTF-8 | Python | false | false | 1,154 | py | # coding: utf-8
"""
LogicMonitor REST API
LogicMonitor is a SaaS-based performance monitoring platform that provides full visibility into complex, hybrid infrastructures, offering granular performance monitoring and actionable data and insights. logicmonitor_sdk enables you to manage your LogicMonitor account programmatically. # noqa: E501
OpenAPI spec version: 1.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import logicmonitor_sdk
from logicmonitor_sdk.models.ack_collector_down import AckCollectorDown # noqa: E501
from logicmonitor_sdk.rest import ApiException
class TestAckCollectorDown(unittest.TestCase):
"""AckCollectorDown unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testAckCollectorDown(self):
"""Test AckCollectorDown"""
# FIXME: construct object with mandatory attributes with example values
# model = logicmonitor_sdk.models.ack_collector_down.AckCollectorDown() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
3c2ddbefb534733402dab2315f80ebe6a3f1e70b | 4f4ecdacdd57fddfec039439589472382875c539 | /arelle/ModelRenderingObject.py | b01745af7d306800437c03ceca950e84d7240f08 | [
"Apache-2.0"
] | permissive | irjudson/Arelle | 0fadce7cf36d41115b1e833c9e30fb717c120613 | d03be32dce33c34c3388e54afbe837bf83a4ff48 | refs/heads/master | 2020-05-29T11:39:16.391796 | 2013-11-07T23:00:14 | 2013-11-07T23:00:14 | 1,867,690 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 69,169 | py | '''
Created on Mar 7, 2011
@author: Mark V Systems Limited
(c) Copyright 2011 Mark V Systems Limited, All rights reserved.
'''
import inspect, os
from arelle import XmlUtil, XbrlConst, XPathParser, Locale, XPathContext
from arelle.ModelDtsObject import ModelResource
from arelle.ModelInstanceObject import ModelDimensionValue
from arelle.ModelValue import qname, QName
from arelle.ModelObject import ModelObject
from arelle.ModelFormulaObject import (Trace, ModelFormulaResource, ModelFormulaRules, ModelConceptName,
ModelParameter, Aspect, aspectStr)
from arelle.ModelInstanceObject import ModelFact
from arelle.FormulaEvaluator import (filterFacts as formulaEvaluatorFilterFacts,
aspectsMatch, factsPartitions, VariableBinding)
from arelle.PrototypeInstanceObject import FactPrototype
ROLLUP_NOT_ANALYZED = 0
CHILD_ROLLUP_FIRST = 1
CHILD_ROLLUP_LAST = 2
CHILDREN_BUT_NO_ROLLUP = 3
OPEN_ASPECT_ENTRY_SURROGATE = '\uDBFF'
EMPTY_SET = set()
def definitionNodes(nodes):
return [(ord.definitionNodeObject if isinstance(node, StructuralNode) else node) for node in nodes]
# table linkbase structural nodes for rendering
class StructuralNode:
def __init__(self, parentStructuralNode, definitionNode, zInheritance=None, contextItemFact=None, breakdownTableNode=None):
self.parentStructuralNode = parentStructuralNode
self._definitionNode = definitionNode
self._rendrCntx = getattr(definitionNode.modelXbrl, "rendrCntx", None) # None for EU 2010 table linkbases
self.variables = {}
self.aspects = {}
self.childStructuralNodes = []
self.rollUpStructuralNode = None
self.choiceStructuralNodes = []
self.zInheritance = zInheritance
if contextItemFact is not None:
self.contextItemBinding = VariableBinding(self._rendrCntx,
boundFact=contextItemFact)
if isinstance(self.contextItemBinding.yieldedFact, FactPrototype):
for aspect in definitionNode.aspectsCovered():
if aspect != Aspect.DIMENSIONS:
self.aspectEntryObjectId = self.aspects[aspect] = contextItemFact.aspectEntryObjectId
break
else:
self.contextItemBinding = None
self.subtreeRollUp = ROLLUP_NOT_ANALYZED
self.depth = parentStructuralNode.depth + 1 if parentStructuralNode else 0
if breakdownTableNode is not None:
self.breakdownTableNode = breakdownTableNode
self.tagSelector = definitionNode.tagSelector
self.isLabeled = True
@property
def modelXbrl(self):
return self._definitionNode.modelXbrl
@property
def isAbstract(self):
if self.subtreeRollUp:
return self.subtreeRollUp == CHILDREN_BUT_NO_ROLLUP
try:
try:
return self.abstract # ordinate may have an abstract attribute
except AttributeError: # if none use axis object
return self.definitionNode.isAbstract
except AttributeError: # axis may never be abstract
return False
@property
def isRollUp(self):
return self.definitionNode.isRollUp
@property
def cardinalityAndDepth(self):
return self.definitionNode.cardinalityAndDepth(self)
@property
def structuralDepth(self):
if self.parentStructuralNode is not None:
return self.parentStructuralNode.structuralDepth + 1
return 0
@property
def definitionNode(self):
if self.choiceStructuralNodes:
return self.choiceStructuralNodes[getattr(self,"choiceNodeIndex",0)]._definitionNode
return self._definitionNode
def breakdownNode(self, tableELR):
definitionNode = self._definitionNode
if isinstance(definitionNode, ModelBreakdown):
return definitionNode
axisSubtreeRelSet = definitionNode.modelXbrl.relationshipSet((XbrlConst.tableBreakdownTree, XbrlConst.tableBreakdownTreeMMDD, XbrlConst.tableBreakdownTree201305, XbrlConst.tableDefinitionNodeSubtree, XbrlConst.tableDefinitionNodeSubtreeMMDD, XbrlConst.tableDefinitionNodeSubtree201305, XbrlConst.tableDefinitionNodeSubtree201301, XbrlConst.tableAxisSubtree2011), tableELR)
while (True):
for parentRel in axisSubtreeRelSet.toModelObject(definitionNode):
definitionNode = parentRel.fromModelObject
if isinstance(definitionNode, ModelBreakdown):
return definitionNode
break # recurse to move to this node's parent breakdown node
return definitionNode # give up here
def constraintSet(self, tagSelectors=None):
definitionNode = self.definitionNode
if tagSelectors:
for tag in tagSelectors:
if tag in definitionNode.constraintSets:
return definitionNode.constraintSets[tag]
return definitionNode.constraintSets.get(None) # returns None if no default constraint set
def aspectsCovered(self):
return _DICT_SET(self.aspects.keys()) | self.definitionNode.aspectsCovered()
def hasAspect(self, aspect, inherit=True):
return (aspect in self.aspects or
self.definitionNode.hasAspect(self, aspect) or
(inherit and
self.parentStructuralNode is not None and
self.parentStructuralNode.hasAspect(aspect, inherit)))
def aspectValue(self, aspect, inherit=True, dims=None, depth=0, tagSelectors=None):
xc = self._rendrCntx
if self.choiceStructuralNodes: # use aspects from choice structural node
chosenStructuralNode = self.choiceStructuralNodes[getattr(self,"choiceNodeIndex",0)]
aspects = chosenStructuralNode.aspects
definitionNode = chosenStructuralNode._definitionNode
contextItemBinding = chosenStructuralNode.contextItemBinding
else:
aspects = self.aspects
definitionNode = self._definitionNode
contextItemBinding = self.contextItemBinding
constraintSet = self.constraintSet(tagSelectors)
if aspect == Aspect.DIMENSIONS:
if dims is None: dims = set()
if inherit and self.parentStructuralNode is not None:
dims |= self.parentStructuralNode.aspectValue(aspect, dims=dims, depth=depth+1)
if aspect in aspects:
dims |= aspects[aspect]
elif constraintSet is not None and constraintSet.hasAspect(self, aspect):
dims |= set(definitionNode.aspectValue(xc, aspect) or {})
if constraintSet is not None and constraintSet.hasAspect(self, Aspect.OMIT_DIMENSIONS):
dims -= set(constraintSet.aspectValue(xc, Aspect.OMIT_DIMENSIONS))
return dims
if aspect in aspects:
return aspects[aspect]
elif constraintSet is not None and constraintSet.hasAspect(self, aspect):
if isinstance(definitionNode, ModelSelectionDefinitionNode):
# result is in the indicated variable of ordCntx
return self.variables.get(self._definitionNode.variableQname)
elif isinstance(definitionNode, ModelFilterDefinitionNode):
if contextItemBinding:
return contextItemBinding.aspectValue(aspect)
elif isinstance(definitionNode, ModelTupleDefinitionNode):
if aspect == Aspect.LOCATION and contextItemBinding:
return contextItemBinding.yieldedFact
# non-location tuple aspects don't leak into cell bindings
else:
return constraintSet.aspectValue(xc, aspect)
if inherit and self.parentStructuralNode is not None:
return self.parentStructuralNode.aspectValue(aspect, depth=depth+1)
return None
'''
@property
def primaryItemQname(self): # for compatibility with viewRelationsihps
if Aspect.CONCEPT in self.aspects:
return self.aspects[Aspect.CONCEPT]
return self.definitionNode.primaryItemQname
@property
def explicitDims(self):
return self.definitionNode.explicitDims
'''
def objectId(self, refId=""):
return self._definitionNode.objectId(refId)
def header(self, role=None, lang=None, evaluate=True, returnGenLabel=True, returnMsgFormatString=False):
# if ord is a nested selectionAxis selection, use selection-message or text contents instead of axis headers
isZSelection = isinstance(self._definitionNode, ModelSelectionDefinitionNode) and hasattr(self, "zSelection")
if role is None:
# check for message before checking for genLabel
msgsRelationshipSet = self._definitionNode.modelXbrl.relationshipSet(
(XbrlConst.tableDefinitionNodeSelectionMessage201301, XbrlConst.tableAxisSelectionMessage2011)
if isZSelection else
(XbrlConst.tableDefinitionNodeMessage201301, XbrlConst.tableAxisMessage2011))
if msgsRelationshipSet:
msg = msgsRelationshipSet.label(self._definitionNode, XbrlConst.standardMessage, lang, returnText=False)
if msg is not None:
if evaluate:
if returnMsgFormatString:
return msg.formatString # not possible to evaluate (during resolution)
else:
return self.evaluate(msg, msg.evaluate)
else:
return XmlUtil.text(msg)
if isZSelection: # no message, return text of selection
return self.variables.get(self._definitionNode.variableQname, "selection")
if returnGenLabel:
label = self._definitionNode.genLabel(role=role, lang=lang)
if label:
return label
if self.isEntryAspect:
# True if open node bound to a prototype, false if boudn to a real fact
return OPEN_ASPECT_ENTRY_SURROGATE # sort pretty high, work ok for python 2.7/3.2 as well as 3.3
# if there's a child roll up, check for it
if self.rollUpStructuralNode is not None: # check the rolling-up child too
return self.rollUpStructuralNode.header(role, lang, evaluate, returnGenLabel, returnMsgFormatString)
# if aspect is a concept of dimension, return its standard label
concept = None
for aspect in self.aspectsCovered():
aspectValue = self.aspectValue(aspect)
if isinstance(aspect, QName) or aspect == Aspect.CONCEPT: # dimension or concept
if isinstance(aspectValue, QName):
concept = self.modelXbrl.qnameConcepts[aspectValue]
break
elif isinstance(aspectValue, ModelDimensionValue):
if aspectValue.isExplicit:
concept = aspectValue.member
elif aspectValue.isTyped:
return XmlUtil.innerTextList(aspectValue.typedMember)
elif isinstance(aspectValue, ModelObject):
text = XmlUtil.innerTextList(aspectValue)
if not text and XmlUtil.hasChild(aspectValue, aspectValue.namespaceURI, "forever"):
text = "forever"
return text
if concept is not None:
label = concept.label(lang=lang)
if label:
return label
# if there is a role, check if it's available on a parent node
if role and self.parentStructuralNode is not None:
return self.parentStructuralNode.header(role, lang, evaluate, returnGenLabel, returnMsgFormatString)
return None
def evaluate(self, evalObject, evalMethod, otherOrdinate=None, evalArgs=()):
xc = self._rendrCntx
if self.contextItemBinding and not isinstance(xc.contextItem, ModelFact):
previousContextItem = xc.contextItem # xbrli.xbrl
xc.contextItem = self.contextItemBinding.yieldedFact
else:
previousContextItem = None
if self.choiceStructuralNodes and hasattr(self,"choiceNodeIndex"):
variables = self.choiceStructuralNodes[self.choiceNodeIndex].variables
else:
variables = self.variables
removeVarQnames = []
for variablesItems in (self.tableDefinitionNode.parameters.items(), variables.items()):
for qn, value in variablesItems:
if qn not in xc.inScopeVars:
removeVarQnames.append(qn)
xc.inScopeVars[qn] = value
if self.parentStructuralNode is not None:
result = self.parentStructuralNode.evaluate(evalObject, evalMethod, otherOrdinate, evalArgs)
elif otherOrdinate is not None:
# recurse to other ordinate (which will recurse to z axis)
result = otherOrdinate.evaluate(evalObject, evalMethod, None, evalArgs)
elif self.zInheritance is not None:
result = self.zInheritance.evaluate(evalObject, evalMethod, None, evalArgs)
else:
try:
result = evalMethod(xc, *evalArgs)
except XPathContext.XPathException as err:
xc.modelXbrl.error(err.code,
_("%(element)s set %(xlinkLabel)s \nException: %(error)s"),
modelObject=evalObject, element=evalObject.localName,
xlinkLabel=evalObject.xlinkLabel, error=err.message)
result = ''
for qn in removeVarQnames:
xc.inScopeVars.pop(qn)
if previousContextItem is not None:
xc.contextItem = previousContextItem # xbrli.xbrl
return result
def hasValueExpression(self, otherAxisStructuralNode=None):
return (self.definitionNode.hasValueExpression or
(otherAxisStructuralNode is not None and otherAxisStructuralNode.definitionNode.hasValueExpression))
def evalValueExpression(self, fact, otherAxisStructuralNode=None):
for structuralNode in (self, otherAxisStructuralNode):
if structuralNode is not None and structuralNode.definitionNode.hasValueExpression:
return self.evaluate(self.definitionNode, structuralNode.definitionNode.evalValueExpression, otherAxisStructuralNode=otherAxisStructuralNode, evalArgs=(fact,))
return None
@property
def isEntryAspect(self):
# true if open node and bound to a fact prototype
return self.contextItemBinding is not None and isinstance(self.contextItemBinding.yieldedFact, FactPrototype)
def isEntryPrototype(self, default=False):
# true if all axis open nodes before this one are entry prototypes (or not open axes)
if self.contextItemBinding is not None:
# True if open node bound to a prototype, false if boudn to a real fact
return isinstance(self.contextItemBinding.yieldedFact, FactPrototype)
if self.parentStructuralNode is not None:
return self.parentStructuralNode.isEntryPrototype(default)
return default # nothing open to be bound to a fact
@property
def tableDefinitionNode(self):
if self.parentStructuralNode is None:
return self.breakdownTableNode
else:
return self.parentStructuralNode.tableDefinitionNode
@property
def tagSelectors(self):
try:
return self._tagSelectors
except AttributeError:
if self.parentStructuralNode is None:
self._tagSelectors = set()
else:
self._tagSelectors = self.parentStructuralNode.tagSelectors
if self.tagSelector:
self._tagSelectors.add(self.tagSelector)
return self._tagSelectors
@property
def leafNodeCount(self):
childLeafCount = 0
for childStructuralNode in self.childStructuralNodes:
childLeafCount += childStructuralNode.leafNodeCount
if childLeafCount == 0:
return 1
if not self.isAbstract and isinstance(self.definitionNode, (ModelClosedDefinitionNode, ModelEuAxisCoord)):
childLeafCount += 1 # has a roll up
return childLeafCount
def setHasOpenNode(self):
if self.parentStructuralNode is not None:
self.parentStructuralNode.setHasOpenNode()
else:
self.hasOpenNode = True
def inheritedPrimaryItemQname(self, view):
return (self.primaryItemQname or self.inheritedPrimaryItemQname(self.parentStructuralNode, view))
def inheritedExplicitDims(self, view, dims=None, nested=False):
if dims is None: dims = {}
if self.parentOrdinateContext:
self.parentStructuralNode.inheritedExplicitDims(view, dims, True)
for dim, mem in self.explicitDims:
dims[dim] = mem
if not nested:
return {(dim,mem) for dim,mem in dims.items() if mem != 'omit'}
def inheritedAspectValue(self, otherAxisStructuralNode,
view, aspect, tagSelectors,
xAspectStructuralNodes, yAspectStructuralNodes, zAspectStructuralNodes):
aspectStructuralNodes = xAspectStructuralNodes.get(aspect, EMPTY_SET) | yAspectStructuralNodes.get(aspect, EMPTY_SET) | zAspectStructuralNodes.get(aspect, EMPTY_SET)
structuralNode = None
if len(aspectStructuralNodes) == 1:
structuralNode = aspectStructuralNodes.pop()
elif len(aspectStructuralNodes) > 1:
if aspect == Aspect.LOCATION:
hasClash = False
for _aspectStructuralNode in aspectStructuralNodes:
if not _aspectStructuralNode.definitionNode.aspectValueDependsOnVars(aspect):
if structuralNode:
hasClash = True
else:
structuralNode = _aspectStructuralNode
else:
# take closest structural node
hasClash = True
''' reported in static analysis by RenderingEvaluator.py
if hasClash:
from arelle.ModelFormulaObject import aspectStr
view.modelXbrl.error("xbrlte:aspectClash",
_("Aspect %(aspect)s covered by multiple axes."),
modelObject=view.modelTable, aspect=aspectStr(aspect))
'''
if structuralNode:
definitionNodeConstraintSet = structuralNode.constraintSet(tagSelectors)
if definitionNodeConstraintSet is not None and definitionNodeConstraintSet.aspectValueDependsOnVars(aspect):
return self.evaluate(definitionNodeConstraintSet,
definitionNodeConstraintSet.aspectValue, # this passes a method
otherAxisStructuralNode=otherAxisStructuralNode,
evalArgs=(aspect,))
return structuralNode.aspectValue(aspect, tagSelectors=tagSelectors)
return None
def __repr__(self):
return ("structuralNode[{0}]{1})".format(self.objectId(),self.definitionNode))
# Root class for rendering is formula, to allow linked and nested compiled expressions
def definitionModelLabelsView(mdlObj):
return tuple(sorted([("{} {} {} {}".format(label.localName,
str(rel.order).rstrip("0").rstrip("."),
os.path.basename(label.role),
label.xmlLang),
label.stringValue)
for rel in mdlObj.modelXbrl.relationshipSet((XbrlConst.elementLabel,XbrlConst.elementReference)).fromModelObject(mdlObj)
for label in (rel.toModelObject,)] +
[("xlink:label", mdlObj.xlinkLabel)]))
# 2010 EU Table linkbase
class ModelEuTable(ModelResource):
def init(self, modelDocument):
super(ModelEuTable, self).init(modelDocument)
self.aspectsInTaggedConstraintSets = set()
@property
def aspectModel(self):
return "dimensional"
@property
def propertyView(self):
return ((("id", self.id),) +
self.definitionLabelsView)
def header(self, role=None, lang=None, strip=False, evaluate=True):
return self.genLabel(role=role, lang=lang, strip=strip)
@property
def parameters(self):
return {}
@property
def definitionLabelsView(self):
return definitionModelLabelsView(self)
def __repr__(self):
return ("table[{0}]{1})".format(self.objectId(),self.propertyView))
class ModelEuAxisCoord(ModelResource):
def init(self, modelDocument):
super(ModelEuAxisCoord, self).init(modelDocument)
@property
def abstract(self):
return self.get("abstract") or 'false'
@property
def isAbstract(self):
return self.abstract == "true"
@property
def isMerged(self):
return False
@property
def parentChildOrder(self):
return self.get("parentChildOrder")
@property
def isRollUp(self):
return False
@property
def parentDefinitionNode(self):
try:
return self._parentDefinitionNode
except AttributeError:
parentDefinitionNode = None
for rel in self.modelXbrl.relationshipSet(XbrlConst.euAxisMember).toModelObject(self):
parentDefinitionNode = rel.fromModelObject
break
self._parentDefinitionNode = parentDefinitionNode
return parentDefinitionNode
def aspectsCovered(self):
aspectsCovered = set()
if XmlUtil.hasChild(self, XbrlConst.euRend, "primaryItem"):
aspectsCovered.add(Aspect.CONCEPT)
if XmlUtil.hasChild(self, XbrlConst.euRend, "timeReference"):
aspectsCovered.add(Aspect.INSTANT)
for e in XmlUtil.children(self, XbrlConst.euRend, "explicitDimCoord"):
aspectsCovered.add(self.prefixedNameQname(e.get("dimension")))
return aspectsCovered
@property
def constraintSets(self):
return {None: self}
@property
def tagSelector(self): # default constraint set for ruleNode has name None
return None
def hasAspect(self, structuralNode, aspect):
if aspect == Aspect.CONCEPT:
return XmlUtil.hasChild(self, XbrlConst.euRend, "primaryItem")
elif aspect == Aspect.DIMENSIONS:
return XmlUtil.hasChild(self, XbrlConst.euRend, "explicitDimCoord")
elif aspect in (Aspect.PERIOD_TYPE, Aspect.INSTANT):
return XmlUtil.hasChild(self, XbrlConst.euRend, "timeReference")
elif isinstance(aspect, QName):
for e in XmlUtil.children(self, XbrlConst.euRend, "explicitDimCoord"):
if self.prefixedNameQname(e.get("dimension")) == aspect:
return True
return False
def aspectValueDependsOnVars(self, aspect):
return False
def aspectValue(self, xpCtx, aspect, inherit=False):
if aspect == Aspect.DIMENSIONS:
dims = set(self.prefixedNameQname(e.get("dimension"))
for e in XmlUtil.children(self, XbrlConst.euRend, "explicitDimCoord"))
if inherit and self.parentDefinitionNode is not None:
dims |= self.parentDefinitionNode.aspectValue(None, aspect, inherit)
return dims
if inherit and not self.hasAspect(None, aspect):
if self.parentDefinitionNode is not None:
return self.parentDefinitionNode.aspectValue(None, aspect, inherit)
return None
if aspect == Aspect.CONCEPT:
priItem = XmlUtil.childAttr(self, XbrlConst.euRend, "primaryItem", "name")
if priItem is not None:
return self.prefixedNameQname(priItem)
return None
elif aspect == Aspect.PERIOD_TYPE:
if XmlUtil.hasChild(self, XbrlConst.euRend, "timeReference"):
return "instant"
elif aspect == Aspect.INSTANT:
return XmlUtil.datetimeValue(XmlUtil.childAttr(self, XbrlConst.euRend, "timeReference", "instant"),
addOneDay=True)
elif isinstance(aspect, QName):
for e in XmlUtil.children(self, XbrlConst.euRend, "explicitDimCoord"):
if self.prefixedNameQname(e.get("dimension")) == aspect:
return self.prefixedNameQname(e.get("value"))
return None
'''
@property
def primaryItemQname(self):
priItem = XmlUtil.childAttr(self, XbrlConst.euRend, "primaryItem", "name")
if priItem is not None:
return self.prefixedNameQname(priItem)
return None
@property
def explicitDims(self):
return {(self.prefixedNameQname(e.get("dimension")),
self.prefixedNameQname(e.get("value")))
for e in XmlUtil.children(self, XbrlConst.euRend, "explicitDimCoord")}
@property
def instant(self):
return XmlUtil.datetimeValue(XmlUtil.childAttr(self, XbrlConst.euRend, "timeReference", "instant"),
addOneDay=True)
'''
def cardinalityAndDepth(self, structuralNode):
return (1, 1)
def header(self, role=None, lang=None, strip=False, evaluate=True):
return self.genLabel(role=role, lang=lang, strip=strip)
@property
def hasValueExpression(self):
return False
@property
def definitionLabelsView(self):
return definitionModelLabelsView(self)
@property
def propertyView(self):
explicitDims = self.aspectValue(None, Aspect.DIMENSIONS, inherit=True)
return ((("id", self.id),
("primary item", self.aspectValue(None, Aspect.CONCEPT, inherit=True)),
("dimensions", "({0})".format(len(explicitDims)),
tuple((str(dim),str(self.aspectValue(None, dim, inherit=True)))
for dim in sorted(explicitDims)))
if explicitDims else (),
("abstract", self.abstract)) +
self.definitionLabelsView)
def __repr__(self):
return ("axisCoord[{0}]{1})".format(self.objectId(),self.propertyView))
# 2011 Table linkbase
class ModelTable(ModelFormulaResource):
def init(self, modelDocument):
super(ModelTable, self).init(modelDocument)
self.modelXbrl.modelRenderingTables.add(self)
self.modelXbrl.hasRenderingTables = True
self.aspectsInTaggedConstraintSets = set()
@property
def aspectModel(self):
return self.get("aspectModel", "dimensional") # attribute removed 2013-06, always dimensional
@property
def descendantArcroles(self):
return (XbrlConst.tableFilter, XbrlConst.tableFilterMMDD, XbrlConst.tableFilter201305, XbrlConst.tableFilter201301, XbrlConst.tableFilter2011,
XbrlConst.tableBreakdown, XbrlConst.tableBreakdownMMDD, XbrlConst.tableBreakdown201305, XbrlConst.tableBreakdown201301, XbrlConst.tableAxis2011,
XbrlConst.tableParameter, XbrlConst.tableParameterMMDD)
@property
def filterRelationships(self):
try:
return self._filterRelationships
except AttributeError:
rels = [] # order so conceptName filter is first (if any) (may want more sorting in future)
for rel in self.modelXbrl.relationshipSet((XbrlConst.tableFilter, XbrlConst.tableFilterMMDD, XbrlConst.tableFilter201305, XbrlConst.tableFilter201301, XbrlConst.tableFilter2011)).fromModelObject(self):
if isinstance(rel.toModelObject, ModelConceptName):
rels.insert(0, rel) # put conceptName filters first
else:
rels.append(rel)
self._filterRelationships = rels
return rels
@property
def parameters(self):
try:
return self._parameters
except AttributeError:
self._parameters = {}
xc = self.modelXbrl.rendrCntx
for rel in self.modelXbrl.relationshipSet((XbrlConst.tableParameter, XbrlConst.tableParameterMMDD)).fromModelObject(self):
if isinstance(rel.toModelObject, ModelParameter):
varQname = rel.variableQname
parameter = rel.toModelObject
if isinstance(parameter, ModelParameter):
self._parameters[varQname] = xc.inScopeVars.get(var.qname)
return self._parameters
def header(self, role=None, lang=None, strip=False, evaluate=True):
return self.genLabel(role=role, lang=lang, strip=strip)
@property
def definitionLabelsView(self):
return definitionModelLabelsView(self)
@property
def propertyView(self):
return ((("id", self.id),) +
self.definitionLabelsView)
def __repr__(self):
return ("modlTable[{0}]{1})".format(self.objectId(),self.propertyView))
class ModelDefinitionNode(ModelFormulaResource):
def init(self, modelDocument):
super(ModelDefinitionNode, self).init(modelDocument)
@property
def parentDefinitionNode(self):
return None
@property
def descendantArcroles(self):
return (XbrlConst.tableDefinitionNodeMessage201301, XbrlConst.tableAxisMessage2011,
XbrlConst.tableDefinitionNodeSubtree201305,
XbrlConst.tableDefinitionNodeSubtree, XbrlConst.tableDefinitionNodeSubtreeMMDD)
def hasAspect(self, structuralNode, aspect):
return False
def aspectValueDependsOnVars(self, aspect):
return False
@property
def variablename(self):
"""(str) -- name attribute"""
return self.getStripped("name")
@property
def variableQname(self):
"""(QName) -- resolved name for an XPath bound result having a QName name attribute"""
varName = self.variablename
return qname(self, varName, noPrefixIsNoNamespace=True) if varName else None
def aspectValue(self, xpCtx, aspect, inherit=True):
if aspect == Aspect.DIMENSIONS:
return []
return None
def aspectsCovered(self):
return set()
@property
def constraintSets(self):
return {None: self}
@property
def tagSelector(self):
return self.get("tagSelector")
@property
def valueExpression(self):
return self.get("value")
@property
def hasValueExpression(self):
return bool(self.valueProg) # non empty program
def compile(self):
if not hasattr(self, "valueProg"):
value = self.valueExpression
self.valueProg = XPathParser.parse(self, value, self, "value", Trace.VARIABLE)
# duplicates formula resource for RuleAxis but not for other subclasses
super(ModelDefinitionNode, self).compile()
def evalValueExpression(self, xpCtx, fact):
# compiled by FormulaResource compile()
return xpCtx.evaluateAtomicValue(self.valueProg, 'xs:string', fact)
'''
@property
def primaryItemQname(self): # for compatibility with viewRelationsihps
return None
@property
def explicitDims(self):
return set()
'''
@property
def isAbstract(self):
return False
@property
def isMerged(self):
return False
@property
def isRollUp(self):
return self.get("rollUp") == 'true'
def cardinalityAndDepth(self, structuralNode):
return (1,
1 if (structuralNode.header(evaluate=False) is not None) else 0)
def header(self, role=None, lang=None, strip=False, evaluate=True):
if role is None:
# check for message before checking for genLabel
msgsRelationshipSet = self.modelXbrl.relationshipSet((XbrlConst.tableDefinitionNodeMessage201301, XbrlConst.tableAxisMessage2011))
if msgsRelationshipSet:
msg = msgsRelationshipSet.label(self, XbrlConst.standardMessage, lang, returnText=False)
if msg is not None:
if evaluate:
result = msg.evaluate(self.modelXbrl.rendrCntx)
else:
result = XmlUtil.text(msg)
if strip:
return result.strip()
return result
return self.genLabel(role=role, lang=lang, strip=strip)
@property
def definitionNodeView(self):
return XmlUtil.xmlstring(self, stripXmlns=True, prettyPrint=True)
@property
def definitionLabelsView(self):
return definitionModelLabelsView(self)
class ModelBreakdown(ModelDefinitionNode):
def init(self, modelDocument):
super(ModelBreakdown, self).init(modelDocument)
@property
def parentChildOrder(self):
return self.get("parentChildOrder")
@property
def descendantArcroles(self):
return (XbrlConst.tableBreakdownTree, XbrlConst.tableBreakdownTreeMMDD, XbrlConst.tableBreakdownTree201305)
@property
def propertyView(self):
return ((("id", self.id),
("parent child order", self.parentChildOrder),
("definition", self.definitionNodeView)) +
self.definitionLabelsView)
class ModelClosedDefinitionNode(ModelDefinitionNode):
def init(self, modelDocument):
super(ModelClosedDefinitionNode, self).init(modelDocument)
@property
def abstract(self):
return self.get("abstract")
@property
def isAbstract(self):
return self.abstract == 'true'
@property
def parentChildOrder(self):
return self.get("parentChildOrder")
@property
def descendantArcroles(self):
return (XbrlConst.tableDefinitionNodeSubtree, XbrlConst.tableDefinitionNodeSubtreeMMDD, XbrlConst.tableDefinitionNodeSubtree201305, XbrlConst.tableDefinitionNodeSubtree201301, XbrlConst.tableAxisSubtree2011, XbrlConst.tableDefinitionNodeMessage201301, XbrlConst.tableAxisMessage2011)
def filteredFacts(self, xpCtx, facts):
aspects = self.aspectsCovered()
axisAspectValues = dict((aspect, self.aspectValue(xpCtx, aspect))
for aspect in aspects)
fp = FactPrototype(self, axisAspectValues)
return set(fact
for fact in facts
if aspectsMatch(xpCtx, fact, fp, aspects))
class ModelConstraintSet(ModelFormulaRules):
def init(self, modelDocument):
super(ModelConstraintSet, self).init(modelDocument)
self._locationSourceVar = self.source(Aspect.LOCATION_RULE, acceptFormulaSource=False)
self._locationAspectCovered = set()
self.aspectValues = {} # only needed if error blocks compiling this node, replaced by compile()
self.aspectProgs = {} # ditto
if self._locationSourceVar: self._locationAspectCovered.add(Aspect.LOCATION) # location is parent (tuple), not sibling
def hasAspect(self, structuralNode, aspect, inherit=None):
return self._hasAspect(structuralNode, aspect, inherit)
def _hasAspect(self, structuralNode, aspect, inherit=None): # opaque from ModelRuleDefinitionNode
if aspect == Aspect.LOCATION and self._locationSourceVar:
return True
return self.hasRule(aspect)
def aspectValue(self, xpCtx, aspect, inherit=None):
try:
if xpCtx is None: xpCtx = self.modelXbrl.rendrCntx
if aspect == Aspect.LOCATION and self._locationSourceVar in xpCtx.inScopeVars:
return xpCtx.inScopeVars[self._locationSourceVar]
return self.evaluateRule(xpCtx, aspect)
except AttributeError:
return '(unavailable)' # table defective or not initialized
def aspectValueDependsOnVars(self, aspect):
return aspect in _DICT_SET(self.aspectProgs.keys()) or aspect in self._locationAspectCovered
def aspectsCovered(self):
return _DICT_SET(self.aspectValues.keys()) | _DICT_SET(self.aspectProgs.keys()) | self._locationAspectCovered
# provide model table's aspect model to compile() method of ModelFormulaRules
@property
def aspectModel(self):
for frameRecord in inspect.stack():
obj = frameRecord[0].f_locals['self']
if isinstance(obj,ModelTable):
return obj.aspectModel
return None
'''
@property
def primaryItemQname(self):
return self.evaluateRule(self.modelXbrl.rendrCntx, Aspect.CONCEPT)
@property
def explicitDims(self):
dimMemSet = set()
dims = self.evaluateRule(self.modelXbrl.rendrCntx, Aspect.DIMENSIONS)
if dims: # may be none if no dim aspects on this ruleAxis
for dim in dims:
mem = self.evaluateRule(self.modelXbrl.rendrCntx, dim)
if mem: # may be none if dimension was omitted
dimMemSet.add( (dim, mem) )
return dimMemSet
@property
def instant(self):
periodType = self.evaluateRule(self.modelXbrl.rendrCntx, Aspect.PERIOD_TYPE)
if periodType == "forever":
return None
return self.evaluateRule(self.modelXbrl.rendrCntx,
{"instant": Aspect.INSTANT,
"duration": Aspect.END}[periodType])
'''
def cardinalityAndDepth(self, structuralNode):
if self.aspectValues or self.aspectProgs or structuralNode.header(evaluate=False) is not None:
return (1, 1)
else:
return (0, 0)
class ModelRuleSet(ModelConstraintSet, ModelFormulaResource):
def init(self, modelDocument):
super(ModelRuleSet, self).init(modelDocument)
@property
def tagName(self): # can't call it tag because that would hide ElementBase.tag
return self.get("tag")
class ModelRuleDefinitionNode(ModelConstraintSet, ModelClosedDefinitionNode):
def init(self, modelDocument):
super(ModelRuleDefinitionNode, self).init(modelDocument)
@property
def merge(self):
return self.get("merge")
@property
def isMerged(self):
return self.merge == "true"
@property
def constraintSets(self):
try:
return self._constraintSets
except AttributeError:
self._constraintSets = dict((ruleSet.tagName, ruleSet)
for ruleSet in XmlUtil.children(self, self.namespaceURI, "ruleSet"))
if self.aspectsCovered(): # any local rule?
self._constraintSets[None] = self
return self._constraintSets
def hasAspect(self, structuralNode, aspect):
return any(constraintSet._hasAspect(structuralNode, aspect)
for constraintSet in self.constraintSets.values())
@property
def aspectsInTaggedConstraintSet(self):
try:
return self._aspectsInTaggedConstraintSet
except AttributeError:
self._aspectsInTaggedConstraintSet = set()
for tag, constraintSet in self.constraitSets().items():
if tag is not None:
for aspect in constraintSet.aspectsCovered():
if aspect != Aspect.DIMENSIONS:
self._aspectsInTaggedConstraintSet.add(aspect)
return self._aspectsInTaggedConstraintSet
def compile(self):
super(ModelRuleDefinitionNode, self).compile()
for constraintSet in self.constraintSets.values():
if constraintSet != self: # compile nested constraint sets
constraintSet.compile()
@property
def propertyView(self):
return ((("id", self.id),
("abstract", self.abstract),
("merge", self.merge),
("definition", self.definitionNodeView)) +
self.definitionLabelsView)
def __repr__(self):
return ("modelRuleDefinitionNode[{0}]{1})".format(self.objectId(),self.propertyView))
# deprecated 2013-05-17
class ModelTupleDefinitionNode(ModelRuleDefinitionNode):
def init(self, modelDocument):
super(ModelTupleDefinitionNode, self).init(modelDocument)
@property
def descendantArcroles(self):
return (XbrlConst.tableTupleContent201301, XbrlConst.tableTupleContent2011, XbrlConst.tableDefinitionNodeMessage201301, XbrlConst.tableAxisMessage2011)
@property
def contentRelationships(self):
return self.modelXbrl.relationshipSet((XbrlConst.tableTupleContent201301, XbrlConst.tableTupleContent2011)).fromModelObject(self)
def hasAspect(self, structuralNode, aspect, inherit=None):
return aspect == Aspect.LOCATION # non-location aspects aren't leaked to ordinate for Tuple or self.hasRule(aspect)
def aspectValue(self, xpCtx, aspect, inherit=None):
return self.evaluateRule(xpCtx, aspect)
def aspectsCovered(self):
return {Aspect.LOCATION} # tuple's aspects don't leak to ordinates
def tupleAspectsCovered(self):
return _DICT_SET(self.aspectValues.keys()) | _DICT_SET(self.aspectProgs.keys()) | {Aspect.LOCATION}
def filteredFacts(self, xpCtx, facts):
aspects = self.aspectsCovered()
axisAspectValues = dict((aspect, self.tupleAspectsCovered(aspect))
for aspect in aspects
if aspect != Aspect.LOCATION) # location determined by ordCntx, not axis
fp = FactPrototype(self, axisAspectValues)
return set(fact
for fact in facts
if fact.isTuple and aspectsMatch(xpCtx, fact, fp, aspects))
class ModelCompositionDefinitionNode(ModelClosedDefinitionNode):
def init(self, modelDocument):
super(ModelCompositionDefinitionNode, self).init(modelDocument)
@property
def abstract(self): # always abstract, no filters, no data
return 'true'
class ModelRelationshipDefinitionNode(ModelClosedDefinitionNode):
def init(self, modelDocument):
super(ModelRelationshipDefinitionNode, self).init(modelDocument)
def aspectsCovered(self):
return {Aspect.CONCEPT}
@property
def conceptQname(self):
name = self.getStripped("conceptname")
return qname(self, name, noPrefixIsNoNamespace=True) if name else None
@property
def relationshipSourceQname(self):
sourceQname = XmlUtil.child(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "relationshipSource")
if sourceQname is not None:
return qname( sourceQname, XmlUtil.text(sourceQname) )
return None
@property
def linkrole(self):
return XmlUtil.childText(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "linkrole")
@property
def axis(self):
a = XmlUtil.childText(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), ("axis", "formulaAxis"))
if not a: a = 'descendant' # would be an XML error
return a
@property
def isOrSelfAxis(self):
return self.axis.endswith('-or-self')
@property
def generations(self):
try:
return _INT( XmlUtil.childText(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "generations") )
except (TypeError, ValueError):
if self.axis in ('sibling', 'child', 'parent'):
return 1
return 0
@property
def relationshipSourceQnameExpression(self):
return XmlUtil.childText(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "relationshipSourceExpression")
@property
def linkroleExpression(self):
return XmlUtil.childText(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "linkroleExpression")
@property
def axisExpression(self):
return XmlUtil.childText(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), ("axisExpression", "formulAxisExpression"))
@property
def generationsExpression(self):
return XmlUtil.childText(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "generationsExpression")
def compile(self):
if not hasattr(self, "relationshipSourceQnameExpressionProg"):
self.relationshipSourceQnameExpressionProg = XPathParser.parse(self, self.relationshipSourceQnameExpression, self, "relationshipSourceQnameExpressionProg", Trace.VARIABLE)
self.linkroleExpressionProg = XPathParser.parse(self, self.linkroleExpression, self, "linkroleQnameExpressionProg", Trace.VARIABLE)
self.axisExpressionProg = XPathParser.parse(self, self.axisExpression, self, "axisExpressionProg", Trace.VARIABLE)
self.generationsExpressionProg = XPathParser.parse(self, self.generationsExpression, self, "generationsExpressionProg", Trace.VARIABLE)
super(ModelRelationshipDefinitionNode, self).compile()
def variableRefs(self, progs=[], varRefSet=None):
if self.relationshipSourceQname and self.relationshipSourceQname != XbrlConst.qnXfiRoot:
if varRefSet is None: varRefSet = set()
varRefSet.add(self.relationshipSourceQname)
return super(ModelRelationshipDefinitionNode, self).variableRefs(
[p for p in (self.relationshipSourceQnameExpressionProg,
self.linkroleExpressionProg, self.axisExpressionProg,
self.generationsExpressionProg)
if p], varRefSet)
def evalRrelationshipSourceQname(self, xpCtx, fact=None):
if self.relationshipSourceQname:
return self.relationshipSourceQname
return xpCtx.evaluateAtomicValue(self.relationshipSourceQnameExpressionProg, 'xs:QName', fact)
def evalLinkrole(self, xpCtx, fact=None):
if self.linkrole:
return self.linkrole
return xpCtx.evaluateAtomicValue(self.linkroleExpressionProg, 'xs:anyURI', fact)
def evalAxis(self, xpCtx, fact=None):
if self.axis:
return self.axis
return xpCtx.evaluateAtomicValue(self.axisExpressionProg, 'xs:token', fact)
def evalGenerations(self, xpCtx, fact=None):
if self.generations:
return self.generations
return xpCtx.evaluateAtomicValue(self.generationsExpressionProg, 'xs:integer', fact)
def cardinalityAndDepth(self, structuralNode):
return self.lenDepth(self.relationships(structuralNode),
self.axis.endswith('-or-self'))
def lenDepth(self, nestedRelationships, includeSelf):
l = 0
d = 1
for rel in nestedRelationships:
if isinstance(rel, list):
nl, nd = self.lenDepth(rel, False)
l += nl
nd += 1 # returns 0 if sublist is not nested
if nd > d:
d = nd
else:
l += 1
if includeSelf:
l += 1 # root relationships include root in addition
if includeSelf:
d += 1
return (l, d)
@property
def propertyView(self):
return ((("id", self.id),
("abstract", self.abstract),
("definition", self.definitionNodeView)) +
self.definitionLabelsView)
def __repr__(self):
return ("modelRelationshipDefinitionNode[{0}]{1})".format(self.objectId(),self.propertyView))
class ModelConceptRelationshipDefinitionNode(ModelRelationshipDefinitionNode):
def init(self, modelDocument):
super(ModelConceptRelationshipDefinitionNode, self).init(modelDocument)
def hasAspect(self, structuralNode, aspect):
return aspect == Aspect.CONCEPT
@property
def arcrole(self):
return XmlUtil.childText(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "arcrole")
@property
def arcQname(self):
arcnameElt = XmlUtil.child(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "arcname")
if arcnameElt is not None:
return qname( arcnameElt, XmlUtil.text(arcnameElt) )
return None
@property
def linkQname(self):
linknameElt = XmlUtil.child(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "linkname")
if linknameElt is not None:
return qname( linknameElt, XmlUtil.text(linknameElt) )
return None
def compile(self):
if not hasattr(self, "arcroleExpressionProg"):
self.arcroleExpressionProg = XPathParser.parse(self, self.arcroleExpression, self, "arcroleExpressionProg", Trace.VARIABLE)
self.linkQnameExpressionProg = XPathParser.parse(self, self.linkQnameExpression, self, "linkQnameExpressionProg", Trace.VARIABLE)
self.arcQnameExpressionProg = XPathParser.parse(self, self.arcQnameExpression, self, "arcQnameExpressionProg", Trace.VARIABLE)
super(ModelConceptRelationshipDefinitionNode, self).compile()
def variableRefs(self, progs=[], varRefSet=None):
return super(ModelConceptRelationshipDefinitionNode, self).variableRefs(
[p for p in (self.arcroleExpressionProg,
self.linkQnameExpressionProg, self.arcQnameExpressionProg)
if p], varRefSet)
def evalArcrole(self, xpCtx, fact=None):
if self.arcrole:
return self.arcrole
return xpCtx.evaluateAtomicValue(self.arcroleExpressionProg, 'xs:anyURI', fact)
def evalLinkQname(self, xpCtx, fact=None):
if self.linkQname:
return self.linkQname
return xpCtx.evaluateAtomicValue(self.linkQnameExpressionProg, 'xs:QName', fact)
def evalArcQname(self, xpCtx, fact=None):
if self.arcQname:
return self.arcQname
return xpCtx.evaluateAtomicValue(self.arcQnameExpressionProg, 'xs:QName', fact)
@property
def arcroleExpression(self):
return XmlUtil.childText(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "arcroleExpression")
@property
def linkQnameExpression(self):
return XmlUtil.childText(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "linknameExpression")
@property
def arcQnameExpression(self):
return XmlUtil.childText(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "arcnameExpression")
def coveredAspect(self, ordCntx=None):
return Aspect.CONCEPT
def relationships(self, structuralNode):
self._sourceQname = structuralNode.evaluate(self, self.evalRrelationshipSourceQname) or XbrlConst.qnXfiRoot
linkrole = structuralNode.evaluate(self, self.evalLinkrole)
if not linkrole:
linkrole = "XBRL-all-linkroles"
linkQname = (structuralNode.evaluate(self, self.evalLinkQname) or () )
arcrole = (structuralNode.evaluate(self, self.evalArcrole) or () )
arcQname = (structuralNode.evaluate(self, self.evalArcQname) or () )
self._axis = (structuralNode.evaluate(self, self.evalAxis) or () )
self._generations = (structuralNode.evaluate(self, self.evalGenerations) or () )
return concept_relationships(self.modelXbrl.rendrCntx,
None,
(self._sourceQname,
linkrole,
arcrole,
self._axis.replace('-or-self',''),
self._generations,
linkQname,
arcQname),
True) # return nested lists representing concept tree nesting
class ModelDimensionRelationshipDefinitionNode(ModelRelationshipDefinitionNode):
def init(self, modelDocument):
super(ModelDimensionRelationshipDefinitionNode, self).init(modelDocument)
def hasAspect(self, structuralNode, aspect):
return aspect == self.coveredAspect(structuralNode) or aspect == Aspect.DIMENSIONS
def aspectValue(self, xpCtx, aspect, inherit=None):
if aspect == Aspect.DIMENSIONS:
return (self.coveredAspect(xpCtx), )
return None
def aspectsCovered(self):
return {self.dimensionQname}
@property
def dimensionQname(self):
dimensionElt = XmlUtil.child(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "dimension")
if dimensionElt is not None:
return qname( dimensionElt, XmlUtil.text(dimensionElt) )
return None
@property
def dimensionQnameExpression(self):
return XmlUtil.childText(self, (XbrlConst.table, XbrlConst.tableMMDD, XbrlConst.table201305, XbrlConst.table201301, XbrlConst.table2011), "dimensionExpression")
def compile(self):
if not hasattr(self, "dimensionQnameExpressionProg"):
self.dimensionQnameExpressionProg = XPathParser.parse(self, self.dimensionQnameExpression, self, "dimensionQnameExpressionProg", Trace.VARIABLE)
super(ModelDimensionRelationshipDefinitionNode, self).compile()
def variableRefs(self, progs=[], varRefSet=None):
return super(ModelDimensionRelationshipDefinitionNode, self).variableRefs(self.dimensionQnameExpressionProg, varRefSet)
def evalDimensionQname(self, xpCtx, fact=None):
if self.dimensionQname:
return self.dimensionQname
return xpCtx.evaluateAtomicValue(self.dimensionQnameExpressionProg, 'xs:QName', fact)
def coveredAspect(self, structuralNode=None):
try:
return self._coveredAspect
except AttributeError:
self._coveredAspect = self.dimRelationships(structuralNode, getDimQname=True)
return self._coveredAspect
def relationships(self, structuralNode):
return self.dimRelationships(structuralNode, getMembers=True)
def dimRelationships(self, structuralNode, getMembers=False, getDimQname=False):
self._dimensionQname = structuralNode.evaluate(self, self.evalDimensionQname)
self._sourceQname = structuralNode.evaluate(self, self.evalRrelationshipSourceQname) or XbrlConst.qnXfiRoot
linkrole = structuralNode.evaluate(self, self.evalLinkrole)
if not linkrole and getMembers:
linkrole = "XBRL-all-linkroles"
dimConcept = self.modelXbrl.qnameConcepts.get(self._dimensionQname)
sourceConcept = self.modelXbrl.qnameConcepts.get(self._sourceQname)
self._axis = (structuralNode.evaluate(self, self.evalAxis) or () )
self._generations = (structuralNode.evaluate(self, self.evalGenerations) or () )
if ((self._dimensionQname and (dimConcept is None or not dimConcept.isDimensionItem)) or
(self._sourceQname and self._sourceQname != XbrlConst.qnXfiRoot and (
sourceConcept is None or not sourceConcept.isItem))):
return ()
if dimConcept is not None:
if getDimQname:
return self._dimensionQname
if sourceConcept is None:
sourceConcept = dimConcept
if getMembers:
return concept_relationships(self.modelXbrl.rendrCntx,
None,
(self._sourceQname,
linkrole,
"XBRL-dimensions", # all dimensions arcroles
self._axis.replace('-or-self',''),
self._generations),
True) # return nested lists representing concept tree nesting
if getDimQname:
if sourceConcept is not None:
# look back from member to a dimension
return self.stepDimRel(sourceConcept, linkrole)
return None
def stepDimRel(self, stepConcept, linkrole):
if stepConcept.isDimensionItem:
return stepConcept.qname
for rel in self.modelXbrl.relationshipSet("XBRL-dimensions").toModelObject(stepConcept):
if not linkrole or linkrole == rel.consecutiveLinkrole:
dim = self.stepDimRel(rel.fromModelObject, rel.linkrole)
if dim:
return dim
return None
coveredAspectToken = {"concept": Aspect.CONCEPT,
"entity-identifier": Aspect.VALUE,
"period-start": Aspect.START, "period-end": Aspect.END,
"period-instant": Aspect.INSTANT, "period-instant-end": Aspect.INSTANT_END,
"unit": Aspect.UNIT}
class ModelOpenDefinitionNode(ModelDefinitionNode):
def init(self, modelDocument):
super(ModelOpenDefinitionNode, self).init(modelDocument)
# deprecated 2013-05-17
class ModelSelectionDefinitionNode(ModelOpenDefinitionNode):
def init(self, modelDocument):
super(ModelSelectionDefinitionNode, self).init(modelDocument)
@property
def descendantArcroles(self):
return (XbrlConst.tableDefinitionNodeMessage201301, XbrlConst.tableAxisMessage2011, XbrlConst.tableDefinitionNodeSelectionMessage201301, XbrlConst.tableAxisSelectionMessage2011)
def clear(self):
XPathParser.clearNamedProg(self, "selectProg")
super(ModelSelectionDefinitionNode, self).clear()
def coveredAspect(self, structuralNode=None):
try:
return self._coveredAspect
except AttributeError:
coveredAspect = self.get("coveredAspect")
if coveredAspect in coveredAspectToken:
self._coveredAspect = coveredAspectToken[coveredAspect]
else: # must be a qname
self._coveredAspect = qname(self, coveredAspect)
return self._coveredAspect
def aspectsCovered(self):
return {self.coveredAspect}
def hasAspect(self, structuralNode, aspect):
return aspect == self.coveredAspect() or (isinstance(self._coveredAspect,QName) and aspect == Aspect.DIMENSIONS)
@property
def select(self):
return self.get("select")
def compile(self):
if not hasattr(self, "selectProg"):
self.selectProg = XPathParser.parse(self, self.select, self, "select", Trace.PARAMETER)
super(ModelSelectionDefinitionNode, self).compile()
def variableRefs(self, progs=[], varRefSet=None):
return super(ModelSelectionDefinitionNode, self).variableRefs(self.selectProg, varRefSet)
def evaluate(self, xpCtx, typeQname=None):
if typeQname:
return xpCtx.evaluateAtomicValue(self.selectProg, typeQname)
else:
return xpCtx.flattenSequence(xpCtx.evaluate(self.selectProg, None))
aspectNodeAspectCovered = {"conceptAspect": Aspect.CONCEPT,
"unitAspect": Aspect.UNIT,
"entityIdentifierAspect": Aspect.ENTITY_IDENTIFIER,
"periodAspect": Aspect.PERIOD}
class ModelFilterDefinitionNode(ModelOpenDefinitionNode):
def init(self, modelDocument):
super(ModelFilterDefinitionNode, self).init(modelDocument)
@property
def descendantArcroles(self):
return (XbrlConst.tableAspectNodeFilter, XbrlConst.tableAspectNodeFilterMMDD, XbrlConst.tableAspectNodeFilter201305, XbrlConst.tableFilterNodeFilter2011, XbrlConst.tableAxisFilter2011,XbrlConst.tableAxisFilter201205, XbrlConst.tableDefinitionNodeMessage201301, XbrlConst.tableAxisMessage2011,
XbrlConst.tableDefinitionNodeSubtree, XbrlConst.tableDefinitionNodeSubtreeMMDD, XbrlConst.tableDefinitionNodeSubtree201305, XbrlConst.tableDefinitionNodeSubtree201301, XbrlConst.tableAxisSubtree2011, XbrlConst.tableDefinitionNodeMessage201301, XbrlConst.tableAxisMessage2011)
@property
def filterRelationships(self):
try:
return self._filterRelationships
except AttributeError:
rels = [] # order so conceptName filter is first (if any) (may want more sorting in future)
for rel in self.modelXbrl.relationshipSet((XbrlConst.tableAspectNodeFilter, XbrlConst.tableAspectNodeFilterMMDD, XbrlConst.tableAspectNodeFilter201305, XbrlConst.tableFilterNodeFilter2011, XbrlConst.tableAxisFilter2011,XbrlConst.tableAxisFilter201205)).fromModelObject(self):
if isinstance(rel.toModelObject, ModelConceptName):
rels.insert(0, rel) # put conceptName filters first
else:
rels.append(rel)
self._filterRelationships = rels
return rels
def hasAspect(self, structuralNode, aspect):
return aspect in self.aspectsCovered()
def aspectsCovered(self, varBinding=None):
try:
return self._aspectsCovered
except AttributeError:
self._aspectsCovered = set()
self._dimensionsCovered = set()
self.includeUnreportedValue = False
if self.localName == "aspectNode": # after 2-13-05-17
aspectElt = XmlUtil.child(self, self.namespaceURI, ("conceptAspect", "unitAspect", "entityIdentifierAspect", "periodAspect", "dimensionAspect"))
if aspectElt is not None:
if aspectElt.localName == "dimensionAspect":
dimQname = qname(aspectElt, aspectElt.textValue)
self._aspectsCovered.add(dimQname)
self._aspectsCovered.add(Aspect.DIMENSIONS)
self._dimensionsCovered.add(dimQname)
self.includeUnreportedValue = aspectElt.get("includeUnreportedValue") in ("true", "1")
else:
self._aspectsCovered.add(aspectNodeAspectCovered[aspectElt.localName])
else:
# filter node (prior to 2013-05-17)
for rel in self.filterRelationships:
if rel.isCovered:
_filter = rel.toModelObject
self._aspectsCovered |= _filter.aspectsCovered(varBinding)
self._dimensionsCovered = set(aspect for aspect in self._aspectsCovered if isinstance(aspect,QName))
if self._dimensionsCovered:
self._aspectsCovered.add(Aspect.DIMENSIONS)
return self._aspectsCovered
def aspectValue(self, xpCtx, aspect, inherit=None):
if aspect == Aspect.DIMENSIONS:
return self._dimensionsCovered
# does not apply to filter, value can only come from a bound fact
return None
def filteredFactsPartitions(self, xpCtx, facts):
filteredFacts = formulaEvaluatorFilterFacts(xpCtx, VariableBinding(xpCtx),
facts, self.filterRelationships, None)
if not self.includeUnreportedValue:
# remove unreported falue
reportedAspectFacts = set()
for fact in filteredFacts:
if all(fact.context is not None and
isinstance(fact.context.dimValue(dimAspect), ModelDimensionValue)
for dimAspect in self._dimensionsCovered):
reportedAspectFacts.add(fact)
else:
reportedAspectFacts = filteredFacts
return factsPartitions(xpCtx, reportedAspectFacts, self.aspectsCovered())
@property
def propertyView(self):
return ((("id", self.id),
("aspect", ", ".join(aspectStr(aspect)
for aspect in self.aspectsCovered()
if aspect != Aspect.DIMENSIONS)),
("definition", self.definitionNodeView)) +
self.definitionLabelsView)
from arelle.ModelObjectFactory import elementSubstitutionModelClass
elementSubstitutionModelClass.update((
# IWD
(XbrlConst.qnTableTableMMDD, ModelTable),
(XbrlConst.qnTableBreakdownMMDD, ModelBreakdown),
(XbrlConst.qnTableRuleSetMMDD, ModelRuleSet),
(XbrlConst.qnTableRuleNodeMMDD, ModelRuleDefinitionNode),
(XbrlConst.qnTableConceptRelationshipNodeMMDD, ModelConceptRelationshipDefinitionNode),
(XbrlConst.qnTableDimensionRelationshipNodeMMDD, ModelDimensionRelationshipDefinitionNode),
(XbrlConst.qnTableAspectNodeMMDD, ModelFilterDefinitionNode),
# PWD 2013-08-28
(XbrlConst.qnTableTable, ModelTable),
(XbrlConst.qnTableBreakdown, ModelBreakdown),
(XbrlConst.qnTableRuleNode, ModelRuleDefinitionNode),
(XbrlConst.qnTableConceptRelationshipNode, ModelConceptRelationshipDefinitionNode),
(XbrlConst.qnTableDimensionRelationshipNode, ModelDimensionRelationshipDefinitionNode),
(XbrlConst.qnTableAspectNode, ModelFilterDefinitionNode),
# PWD 2013-05-17
(XbrlConst.qnTableTable201305, ModelTable),
(XbrlConst.qnTableBreakdown201305, ModelBreakdown),
(XbrlConst.qnTableRuleNode201305, ModelRuleDefinitionNode),
(XbrlConst.qnTableConceptRelationshipNode201305, ModelConceptRelationshipDefinitionNode),
(XbrlConst.qnTableDimensionRelationshipNode201305, ModelDimensionRelationshipDefinitionNode),
(XbrlConst.qnTableAspectNode201305, ModelFilterDefinitionNode),
# PWD 2013-01-17
(XbrlConst.qnTableTable201301, ModelTable),
(XbrlConst.qnTableRuleNode201301, ModelRuleDefinitionNode),
(XbrlConst.qnTableCompositionNode201301, ModelCompositionDefinitionNode),
(XbrlConst.qnTableConceptRelationshipNode201301, ModelConceptRelationshipDefinitionNode),
(XbrlConst.qnTableDimensionRelationshipNode201301, ModelDimensionRelationshipDefinitionNode),
(XbrlConst.qnTableSelectionNode201301, ModelSelectionDefinitionNode),
(XbrlConst.qnTableFilterNode201301, ModelFilterDefinitionNode),
(XbrlConst.qnTableTupleNode201301, ModelTupleDefinitionNode),
# PWD 2011 Montreal
(XbrlConst.qnTableTable2011, ModelTable),
(XbrlConst.qnTableRuleAxis2011, ModelRuleDefinitionNode),
(XbrlConst.qnTableCompositionAxis2011, ModelCompositionDefinitionNode),
(XbrlConst.qnTableConceptRelationshipAxis2011, ModelConceptRelationshipDefinitionNode),
(XbrlConst.qnTableSelectionAxis2011, ModelSelectionDefinitionNode),
(XbrlConst.qnTableFilterAxis2011, ModelFilterDefinitionNode),
(XbrlConst.qnTableTupleAxis2011, ModelTupleDefinitionNode),
(XbrlConst.qnTableDimensionRelationshipAxis2011, ModelDimensionRelationshipDefinitionNode),
# Eurofiling
(XbrlConst.qnEuTable, ModelEuTable),
(XbrlConst.qnEuAxisCoord, ModelEuAxisCoord),
))
# import after other modules resolved to prevent circular references
from arelle.FunctionXfi import concept_relationships
| [
"[email protected]"
] | |
8860fd14e571f6895267fbdf6e37de2a1b996050 | dfab6798ece135946aebb08f93f162c37dd51791 | /timber/luban.timber/__init__.py | a1439a83d3cd277a7cb77b0454e6d6e5598f66c6 | [] | no_license | yxqd/luban | 405f5f7dcf09015d214079fe7e23d644332be069 | 00f699d15c572c8bf160516d582fa37f84ac2023 | refs/heads/master | 2020-03-20T23:08:45.153471 | 2012-05-18T14:52:43 | 2012-05-18T14:52:43 | 137,831,650 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 840 | py | # -*- Python -*-
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# Jiao Lin
# California Institute of Technology
# (C) 2006-2011 All Rights Reserved
#
# {LicenseText}
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# ************************************************************
# bad bad
import luban
luban.__doc__ += """* timber: default extension of luban core
"""
# ************************************************************
# activate extensions
from . import elements, actions
from . import luban_ext
from . import controller # replace the core controllers with timber controllers. see eg .controllers.CherrypyController
from .controller import setUploadPath
# End of file
| [
"[email protected]"
] | |
87477ba53d15435cb55aa99b65ce10afdee5a360 | e52501eb4db862d90ae5541bd512a50df30e0726 | /Chapter 2+3 Intro + Variables + Strings/Chapter3-7 ShrinkingGuestList.py | 47c9f766034b09d285472e34a1f448ce0ac89821 | [] | no_license | ericnwin/Python-Crash-Course-Lessons | b2b12c221f545c961a47f2343b2aa3dac901927b | 7b755c0b4ce65528f4880b3583aca3be9547b33b | refs/heads/master | 2022-12-19T10:55:35.987330 | 2020-09-13T23:23:54 | 2020-09-13T23:23:54 | 295,255,228 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,190 | py | # You just found out that your new dinner table won’t
# arrive in time for the dinner, and you have space for only two guests.
# • Start with your program from Exercise 3-6. Add a new line that prints a
# message saying that you can invite only two people for dinner.
# • Use pop() to remove guests from your list one at a time until only two
# names remain in your list. Each time you pop a name from your list, print
# a message to that person letting them know you’re sorry you can’t invite
# them to dinner.
# • Print a message to each of the two people still on your list, letting them
# know they’re still invited.
# • Use del to remove the last two names from your list, so you have an empty
# list. Print your list to make sure you actually have an empty list at the end
# of your program.
dinner_guests = ['Joeji', 'Elon Musk', 'OpenAI']
print(
f"Hey {dinner_guests[0]} I'm a huge fan of your music! Please join me for dinner. ")
print(f"Hey {dinner_guests[1]} can I get a free car? We can talk over dinner.")
print(f"Hey {dinner_guests[2]} teach me AI. I gib food as payment.")
# Declare who can't make it
declined_invitations = "OpenAI"
dinner_guests.remove(declined_invitations)
print(f"Unfortunately {declined_invitations} can't make it.\n")
# Adding new person to invite list
new_person_invite = "Kanye West"
dinner_guests.append(new_person_invite)
print(dinner_guests)
# Making 2nd set of invitations
print(
'\n' f"Hey {dinner_guests[0]} I'm a huge fan of your music! Please join me for dinner. ")
print(f"Hey {dinner_guests[1]} can I get a free car? We can talk over dinner.")
print(f"Hey {dinner_guests[2]} I loved you in Titanic. Please eat with me.\n")
# shrinking down to 2 people and sending msg to those who are invited
print(f"Hey sorry we only have room for two... I'm uninviting one of you sorry.\n")
uninvited = dinner_guests.pop()
print(f"Hey sorry {uninvited} you've been uninvited :( \n")
print(f"Hey {dinner_guests[0]} you're still invited.")
print(f"Hey {dinner_guests[1]} you're still invited.")
# Remove last 2 names from list and printing out an empty list
del dinner_guests[0]
del dinner_guests[0]
print(dinner_guests)
| [
"[email protected]"
] | |
644f53da5330e99d42a57e2457baa4815d3cc52f | d0a54a3faa1891b647f8c621521cd26c13bd2926 | /backend/mytts.py | b5bd2373d11ec245d0b144f5f903e259d2fd903f | [
"MIT"
] | permissive | ishine/PTTS-WebAPP | 166318593d3247c88d458c9d4fe39dca27ef408f | dcc07a79d8dd695ca15e4dd5a69811b3ddd91709 | refs/heads/main | 2023-04-02T06:03:41.237351 | 2021-04-14T02:37:16 | 2021-04-14T02:37:16 | 357,388,655 | 0 | 0 | MIT | 2021-04-14T02:37:17 | 2021-04-13T01:31:11 | null | UTF-8 | Python | false | false | 2,595 | py | #!/usr/bin/env python
import os.path as osp
import librosa
import torch
from .hparams import HParam
from .transform import StandardNorm, TextProcessor
from .models import MelGenerator, ParallelText2Mel
from .synthesizer import Synthesizer
try:
from .manager import GPUManager
except ImportError as err:
print(err); gm = None
else:
gm = GPUManager()
def select_device(device):
cpu_request = device.lower() == 'cpu'
# if device requested other than 'cpu'
if device and not cpu_request:
c = 1024 ** 2 # bytes to MB
x = torch.cuda.get_device_properties(int(device))
s = f'Using torch {torch.__version__} '
print("%sCUDA:%s (%s, %dMB)" % (s, device, x.name, x.total_memory / c))
return torch.device(f'cuda:{device}')
else:
print(f'Using torch {torch.__version__} CPU')
return torch.device('cpu')
class MyTTS:
def __init__(self, config=None, device=None):
if torch.cuda.is_available():
index = device if device else str(0 if gm is None else gm.auto_choice())
else:
index = 'cpu'
self.device = device = select_device(index)
self.hparams = hparams = HParam(config) \
if config else HParam(osp.join(osp.dirname(osp.abspath(__file__)), "config", "default.yaml"))
checkpoint = osp.join(osp.dirname(osp.abspath(__file__)), "pretrained", hparams.parallel.checkpoint)
vocoder_checkpoint = osp.join(osp.dirname(osp.abspath(__file__)), "pretrained", hparams.vocoder.checkpoint)
normalizer = StandardNorm(hparams.audio.spec_mean, hparams.audio.spec_std)
processor = TextProcessor(hparams.text)
text2mel = ParallelText2Mel(hparams.parallel)
text2mel.eval()
vocoder = MelGenerator(hparams.audio.n_mel_channels).to(device)
vocoder.eval(inference=True)
self.synthesizer = Synthesizer(
model=text2mel,
checkpoint=checkpoint,
vocoder=vocoder,
vocoder_checkpoint=vocoder_checkpoint,
processor=processor,
normalizer=normalizer,
device=device
)
def __call__(self, texts, speed, volume, tone):
rate = int(tone) / 3
alpha = (4 / int(speed)) * rate
beta = int(volume) / 3
wave = self.synthesizer.inference(texts, alpha=alpha, beta=beta)
wave = wave.cpu().detach().numpy()
sr = self.hparams.audio.sampling_rate
# use TSM + resample to change tone
wave = librosa.core.resample(wave, int(sr*rate), sr)
return wave, sr
| [
"[email protected]"
] | |
b2fcc624e79ef9ef10c62818cb0c7d2d93c0d250 | 080bbe77da955b3917435c25fc63b90b0f3c724e | /botorch/utils/multi_objective/box_decomposition.py | e566f0c69e493acd4370a0a28582374334f572aa | [
"MIT"
] | permissive | irinaespejo/botorch | 3d15d962ff0f5bb34fbd11b2eb7549db755af705 | e4dcf603fdaf83f0e5f8b9b392f943c89dfff7eb | refs/heads/master | 2023-07-11T18:02:11.853790 | 2021-08-19T15:57:21 | 2021-08-19T15:58:12 | 316,017,084 | 0 | 0 | MIT | 2020-11-25T18:02:11 | 2020-11-25T18:02:09 | null | UTF-8 | Python | false | false | 744 | py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
r"""
DEPRECATED - Box decomposition algorithms.
Use the botorch.utils.multi_objective.box_decompositions instead.
"""
import warnings
from botorch.utils.multi_objective.box_decompositions.non_dominated import ( # noqa F401
NondominatedPartitioning,
)
warnings.warn(
"The botorch.utils.multi_objective.box_decomposition module has "
"been renamed to botorch.utils.multi_objective.box_decompositions. "
"botorch.utils.multi_objective.box_decomposition will be removed in "
"the next release.",
DeprecationWarning,
)
| [
"[email protected]"
] | |
a8569f82ed1a73ffbd59f8b49866754ec53e411d | 9dfb3372a1e4516d970a6e9d0a9fd8360580eae7 | /python pySerial/maping_data.py | feb9a76200b26899373a1eeba25711e6b4835877 | [] | no_license | clambering-goat/cameron_pyton | d1cd0e7b04da14e7ba4f89dcb4d973f297a4626c | df0b0365b86e75cfcfc2c1fc21608f1536a3b79f | refs/heads/master | 2021-07-14T20:37:37.021401 | 2019-02-28T07:52:11 | 2019-02-28T07:52:11 | 137,251,669 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 418 | py | import serial
y_points=[]
with serial.Serial('COM4', 9600, timeout=1) as ser:
for q in range(20000):
line =ser.readline()
x=line.decode("utf-8")
#print(x)
y_points.append(int(x))
import matplotlib.pyplot as plt
x_points=[]
for q in range(len(y_points)):
x_points.append(q)
plt.plot(x_points,y_points)
plt.ylabel('some numbers')
plt.xlabel('some numbers')
plt.show()
| [
"[email protected]"
] | |
664fef8dbbee5f880d4f0a0866edc6ccd5676737 | 0facb323be8a76bb4c168641309972fa77cbecf2 | /Configurations/HWWSemiLepHighMass/Full_v6Production/template_seed/templates_jhchoi/MassPoints2018/List_MX.py | ca93c1c06e444ba9cee292c6bdab834fd117111f | [] | no_license | bhoh/SNuAnalytics | ef0a1ba9fa0d682834672a831739dfcfa1e7486b | 34d1fc062e212da152faa83be50561600819df0e | refs/heads/master | 2023-07-06T03:23:45.343449 | 2023-06-26T12:18:28 | 2023-06-26T12:18:28 | 242,880,298 | 0 | 1 | null | 2020-02-25T01:17:50 | 2020-02-25T01:17:49 | null | UTF-8 | Python | false | false | 396 | py | List_MX=[
115 ,
120 ,
124 ,
125 ,
126 ,
130 ,
135 ,
140 ,
145 ,
150 ,
155 ,
160 ,
165 ,
170 ,
175 ,
180 ,
190 ,
200 ,
210 ,
230 ,
250 ,
270 ,
300 ,
350 ,
400 ,
450 ,
500 ,
550 ,
600 ,
650 ,
700 ,
750 ,
800 ,
900 ,
1000 ,
1500 ,
2000 ,
2500 ,
3000 ,
4000 ,
5000 ,
]
if __name__ == '__main__':
#print('( '+" ".join(str(MX) for MX in List_MX)+' )')
print " ".join(str(MX) for MX in List_MX)
| [
"[email protected]"
] | |
145a90c675971039d677b9e3411c7b6f30d2cde6 | 59be93c710d9e1750d2767f1c98f347ed3dc635c | /elements/when.py | 308f8c8d7c8fbbc825abfaf9cd8a8914f92fd203 | [
"MIT"
] | permissive | artemZholus/elements | 802d14eb574be0c3f18a50fdbc87ee262fbcd01a | 21b4f27e854d91a65619e8fc81b3916386c5ef66 | refs/heads/main | 2023-07-10T05:21:28.947510 | 2021-08-18T18:02:05 | 2021-08-18T18:02:05 | 397,594,638 | 0 | 0 | MIT | 2021-08-18T12:35:49 | 2021-08-18T12:35:49 | null | UTF-8 | Python | false | false | 711 | py | class Every:
def __init__(self, every):
self._every = every
self._last = None
def __call__(self, step):
step = int(step)
if not self._every:
return False
if self._last is None:
self._last = step
return True
if step >= self._last + self._every:
self._last += self._every
return True
return False
class Once:
def __init__(self):
self._once = True
def __call__(self):
if self._once:
self._once = False
return True
return False
class Until:
def __init__(self, until):
self._until = until
def __call__(self, step):
step = int(step)
if not self._until:
return True
return step < self._until
| [
"[email protected]"
] | |
9155110a9ae58bc903e5e05dc9dfed7c7bdc4cea | c9ddbdb5678ba6e1c5c7e64adf2802ca16df778c | /cases/synthetic/tree-big-1645.py | 4a5e725248f8e0b1a645420a84fb9273621ed52e | [] | no_license | Virtlink/ccbench-chocopy | c3f7f6af6349aff6503196f727ef89f210a1eac8 | c7efae43bf32696ee2b2ee781bdfe4f7730dec3f | refs/heads/main | 2023-04-07T15:07:12.464038 | 2022-02-03T15:42:39 | 2022-02-03T15:42:39 | 451,969,776 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 23,292 | py | # Binary-search trees
class TreeNode(object):
value:int = 0
left:"TreeNode" = None
right:"TreeNode" = None
def insert(self:"TreeNode", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode(x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode(x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode2(object):
value:int = 0
value2:int = 0
left:"TreeNode2" = None
left2:"TreeNode2" = None
right:"TreeNode2" = None
right2:"TreeNode2" = None
def insert(self:"TreeNode2", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode2(x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode2(x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode2", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode2(x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode2(x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode2", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode2", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode3(object):
value:int = 0
value2:int = 0
value3:int = 0
left:"TreeNode3" = None
left2:"TreeNode3" = None
left3:"TreeNode3" = None
right:"TreeNode3" = None
right2:"TreeNode3" = None
right3:"TreeNode3" = None
def insert(self:"TreeNode3", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode3", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode3(x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode3(x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode3", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode3", x:int, x2:int) -> bool:
if $Exp < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode3", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode4(object):
value:int = 0
value2:int = 0
value3:int = 0
value4:int = 0
left:"TreeNode4" = None
left2:"TreeNode4" = None
left3:"TreeNode4" = None
left4:"TreeNode4" = None
right:"TreeNode4" = None
right2:"TreeNode4" = None
right3:"TreeNode4" = None
right4:"TreeNode4" = None
def insert(self:"TreeNode4", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode4", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode4(x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode4(x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode4", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode4", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode4", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains4(self:"TreeNode4", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class TreeNode5(object):
value:int = 0
value2:int = 0
value3:int = 0
value4:int = 0
value5:int = 0
left:"TreeNode5" = None
left2:"TreeNode5" = None
left3:"TreeNode5" = None
left4:"TreeNode5" = None
left5:"TreeNode5" = None
right:"TreeNode5" = None
right2:"TreeNode5" = None
right3:"TreeNode5" = None
right4:"TreeNode5" = None
right5:"TreeNode5" = None
def insert(self:"TreeNode5", x:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert2(self:"TreeNode5", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def insert5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if x < self.value:
if self.left is None:
self.left = makeNode5(x, x, x, x, x)
return True
else:
return self.left.insert(x)
elif x > self.value:
if self.right is None:
self.right = makeNode5(x, x, x, x, x)
return True
else:
return self.right.insert(x)
return False
def contains(self:"TreeNode5", x:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains2(self:"TreeNode5", x:int, x2:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains3(self:"TreeNode5", x:int, x2:int, x3:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains4(self:"TreeNode5", x:int, x2:int, x3:int, x4:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
def contains5(self:"TreeNode5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if x < self.value:
if self.left is None:
return False
else:
return self.left.contains(x)
elif x > self.value:
if self.right is None:
return False
else:
return self.right.contains(x)
else:
return True
class Tree(object):
root:TreeNode = None
size:int = 0
def insert(self:"Tree", x:int) -> object:
if self.root is None:
self.root = makeNode(x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree2(object):
root:TreeNode2 = None
root2:TreeNode2 = None
size:int = 0
size2:int = 0
def insert(self:"Tree2", x:int) -> object:
if self.root is None:
self.root = makeNode2(x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree2", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode2(x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree2", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree2", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree3(object):
root:TreeNode3 = None
root2:TreeNode3 = None
root3:TreeNode3 = None
size:int = 0
size2:int = 0
size3:int = 0
def insert(self:"Tree3", x:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree3", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree3", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode3(x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree3", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree3", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree3", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree4(object):
root:TreeNode4 = None
root2:TreeNode4 = None
root3:TreeNode4 = None
root4:TreeNode4 = None
size:int = 0
size2:int = 0
size3:int = 0
size4:int = 0
def insert(self:"Tree4", x:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree4", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree4", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> object:
if self.root is None:
self.root = makeNode4(x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree4", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree4", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree4", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains4(self:"Tree4", x:int, x2:int, x3:int, x4:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
class Tree5(object):
root:TreeNode5 = None
root2:TreeNode5 = None
root3:TreeNode5 = None
root4:TreeNode5 = None
root5:TreeNode5 = None
size:int = 0
size2:int = 0
size3:int = 0
size4:int = 0
size5:int = 0
def insert(self:"Tree5", x:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert2(self:"Tree5", x:int, x2:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert3(self:"Tree5", x:int, x2:int, x3:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def insert5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> object:
if self.root is None:
self.root = makeNode5(x, x, x, x, x)
self.size = 1
else:
if self.root.insert(x):
self.size = self.size + 1
def contains(self:"Tree5", x:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains2(self:"Tree5", x:int, x2:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains3(self:"Tree5", x:int, x2:int, x3:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains4(self:"Tree5", x:int, x2:int, x3:int, x4:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def contains5(self:"Tree5", x:int, x2:int, x3:int, x4:int, x5:int) -> bool:
if self.root is None:
return False
else:
return self.root.contains(x)
def makeNode(x: int) -> TreeNode:
b:TreeNode = None
b = TreeNode()
b.value = x
return b
def makeNode2(x: int, x2: int) -> TreeNode2:
b:TreeNode2 = None
b2:TreeNode2 = None
b = TreeNode2()
b.value = x
return b
def makeNode3(x: int, x2: int, x3: int) -> TreeNode3:
b:TreeNode3 = None
b2:TreeNode3 = None
b3:TreeNode3 = None
b = TreeNode3()
b.value = x
return b
def makeNode4(x: int, x2: int, x3: int, x4: int) -> TreeNode4:
b:TreeNode4 = None
b2:TreeNode4 = None
b3:TreeNode4 = None
b4:TreeNode4 = None
b = TreeNode4()
b.value = x
return b
def makeNode5(x: int, x2: int, x3: int, x4: int, x5: int) -> TreeNode5:
b:TreeNode5 = None
b2:TreeNode5 = None
b3:TreeNode5 = None
b4:TreeNode5 = None
b5:TreeNode5 = None
b = TreeNode5()
b.value = x
return b
# Input parameters
n:int = 100
n2:int = 100
n3:int = 100
n4:int = 100
n5:int = 100
c:int = 4
c2:int = 4
c3:int = 4
c4:int = 4
c5:int = 4
# Data
t:Tree = None
t2:Tree = None
t3:Tree = None
t4:Tree = None
t5:Tree = None
i:int = 0
i2:int = 0
i3:int = 0
i4:int = 0
i5:int = 0
k:int = 37813
k2:int = 37813
k3:int = 37813
k4:int = 37813
k5:int = 37813
# Crunch
t = Tree()
while i < n:
t.insert(k)
k = (k * 37813) % 37831
if i % c != 0:
t.insert(i)
i = i + 1
print(t.size)
for i in [4, 8, 15, 16, 23, 42]:
if t.contains(i):
print(i)
| [
"[email protected]"
] | |
c07aa82c886d791ed37e80ecf66b26fe3ba26449 | f59860bb4d04007cf03258753aefcbf58e760db0 | /music/migrations/0005_song_datetime.py | a64764e5215f82e94025a21d14a4720153be91ab | [] | no_license | Arefeh902/station_49 | fc306d7668d64c68df7dba35adbdc25d5600544a | 3076e4ab616759f5aa0a973525c0436b603f942f | refs/heads/master | 2023-07-01T10:25:39.820956 | 2021-08-10T18:47:28 | 2021-08-10T18:47:28 | 391,368,241 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 386 | py | # Generated by Django 2.1.9 on 2021-08-07 08:21
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('music', '0004_auto_20210807_0806'),
]
operations = [
migrations.AddField(
model_name='song',
name='datetime',
field=models.DateTimeField(auto_now=True),
),
]
| [
"[email protected]"
] | |
b563563bd985a3f9d737ea973f8314bd6fb8f40d | 9d2b33eb85fca3a81ccb7272422c41a08467a8c4 | /pdm/formats/requirements.py | 37c230c8eab420dab9e877204074086b6fe605a6 | [
"MIT"
] | permissive | skyoo2003/pdm | 9b9d20079f325c087855c7a37a4270d36fa71131 | 95a758ee259dff02f00f0a3eab79fa23e5d2aa97 | refs/heads/master | 2022-12-12T18:01:36.331560 | 2020-09-04T01:16:41 | 2020-09-04T01:16:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,729 | py | import hashlib
import urllib.parse
from pip._internal.req.req_file import parse_requirements
from pdm.models.markers import Marker
from pdm.models.requirements import parse_requirement
from pdm.utils import get_finder
def _requirement_to_str_lowercase_name(requirement):
"""Formats a packaging.requirements.Requirement with a lowercase name."""
parts = [requirement.name.lower()]
if requirement.extras:
parts.append("[{0}]".format(",".join(sorted(requirement.extras))))
if requirement.specifier:
parts.append(str(requirement.specifier))
if requirement.url:
parts.append("@ {0}".format(requirement.url))
if requirement.marker:
parts.append("; {0}".format(requirement.marker))
return "".join(parts)
def requirement_from_ireq(ireq):
"""Formats an `InstallRequirement` instance as a
`pdm.models.requirement.Requirement`.
Generic formatter for pretty printing InstallRequirements to the terminal
in a less verbose way than using its `__str__` method.
:param :class:`InstallRequirement` ireq: A pip **InstallRequirement** instance.
:return: A formatted string for prettyprinting
:rtype: str
"""
if ireq.editable:
line = "{}".format(ireq.link)
else:
line = _requirement_to_str_lowercase_name(ireq.req)
if str(ireq.req.marker) != str(ireq.markers):
if not ireq.req.marker:
line = "{}; {}".format(line, ireq.markers)
else:
name, markers = line.split(";", 1)
markers = Marker(markers) & ireq.markers
line = "{}; {}".format(name, markers)
return parse_requirement(line, ireq.editable)
def parse_requirement_file(filename):
from pip._internal.req.constructors import install_req_from_parsed_requirement
finder = get_finder([])
ireqs = [
install_req_from_parsed_requirement(pr)
for pr in parse_requirements(filename, finder.session, finder)
]
return ireqs, finder
def check_fingerprint(project, filename):
import tomlkit
with open(filename, encoding="utf-8") as fp:
try:
tomlkit.parse(fp.read())
except ValueError:
# the file should be a requirements.txt if it not a TOML document.
return True
else:
return False
def convert_url_to_source(url, name=None):
if not name:
name = hashlib.sha1(url.encode("utf-8")).hexdigest()[:6]
return {"name": name, "url": url, "verify_ssl": url.startswith("https://")}
def convert(project, filename):
ireqs, finder = parse_requirement_file(str(filename))
reqs = [requirement_from_ireq(ireq) for ireq in ireqs]
data = {"dependencies": dict(req.as_req_dict() for req in reqs)}
if finder.index_urls:
sources = [convert_url_to_source(finder.index_urls[0], "pypi")]
sources.extend(convert_url_to_source(url) for url in finder.index_urls[1:])
data["source"] = sources
return data
def export(project, candidates, options):
lines = []
for candidate in candidates:
req = candidate.req.as_line()
lines.append(req)
if options.hashes and candidate.hashes:
for item in candidate.hashes.values():
lines.append(f" \\\n --hash={item}")
lines.append("\n")
sources = project.tool_settings.get("source", [])
for source in sources:
url = source["url"]
prefix = "--index-url" if source["name"] == "pypi" else "--extra-index-url"
lines.append(f"{prefix} {url}\n")
if not source["verify_ssl"]:
host = urllib.parse.urlparse(url).hostname
lines.append(f"--trusted-host {host}\n")
return "".join(lines)
| [
"[email protected]"
] | |
bef3b9ad03bdc33f7171cc9b588f198ce873e861 | 62922a76e40003f3d3a7d02282853f9a2b76c6fc | /cv2/ch22/test1.py | 1172f8de48d2bc9bfba7168431a2727b16325054 | [] | no_license | cchangcs/ai_learning_record | a7d0d9c7fcdc1e97d8869aa7e63b535f8cf62df2 | 235a90ff5fe0205334376a927d462b8ae64e4e70 | refs/heads/master | 2020-04-01T16:59:31.203223 | 2018-11-21T11:12:34 | 2018-11-21T11:12:34 | 153,408,023 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,103 | py | # encoding:utf-8
'''
斑点检测SimpleBlodDetector()
斑点检测:默认检测黑色点,如果要检测白色的点需要设置bycolor为true,并且color数值为255
斑点通常是指与周围有着颜色和灰度差别的区域,在实际的图中,往往存在着大量这样的斑点,如一棵树是一个斑点,一块草地是一个斑点。
由于斑点代表的是一个区域,相比单纯的角点,它的稳定性更好,抗噪声能力更强,所以它在图像配准上扮演着重要的角色。
同时有时图像中的斑点也是我们关心的区域,比如在医学与生物领域,我们需要从一些X光照片或细胞显微照片中提取一些具有特殊意义的斑点的位置或数量
'''
import cv2
import numpy as np
im = cv2.imread('blob.jpg', cv2.IMREAD_GRAYSCALE)
detector = cv2.SimpleBlobDetector_create()
keypoints = detector.detect(im)
im_with_keypoints = cv2.drawKeypoints(im, keypoints, np.array([]), (0, 0, 255), cv2.DRAW_MATCHES_FLAGS_DRAW_RICH_KEYPOINTS)
cv2.imshow("Keypoints", im_with_keypoints)
cv2.waitKey(0)
cv2.destroyAllWindows()
| [
"[email protected]"
] | |
b1d84ff6d8719c6d1cb346458bafaa88df886d86 | 0facb323be8a76bb4c168641309972fa77cbecf2 | /Configurations/HWWSemiLepHighMass/nanoAODv5/v6_production/2017/NJET_biined_WJets/SKIM10/HMVar10_Full_ALL_var/MassPoints/structure_M1500.py | 006d035cd83abd3e70ffc306361571ee477e383b | [] | no_license | bhoh/SNuAnalytics | ef0a1ba9fa0d682834672a831739dfcfa1e7486b | 34d1fc062e212da152faa83be50561600819df0e | refs/heads/master | 2023-07-06T03:23:45.343449 | 2023-06-26T12:18:28 | 2023-06-26T12:18:28 | 242,880,298 | 0 | 1 | null | 2020-02-25T01:17:50 | 2020-02-25T01:17:49 | null | UTF-8 | Python | false | false | 1,725 | py | #['WW', 'ggHWWlnuqq_M1500', 'DY', 'DATA', 'WZ', 'ggHWWlnuqq_M125', 'ZZZ', 'ggHWWlnuqq_M900', 'vbfHWWlnuqq_M500', 'Wjets1j', 'QCD_MU', 'WZZ', 'vbfHWWlnuqq_M900', 'QCD_bcToE', 'Wjets2j', 'QCD_EM', 'ggHWWlnuqq_M500', 'ZZ', 'WWW', 'vbfHWWlnuqq_M1500', 'vbfHWWlnuqq_M125', 'WWZ', 'Wjets0j', 'top']
QCD_MU=['QCD_Pt-15to20_MuEnrichedPt5',
'QCD_Pt-20to30_MuEnrichedPt5',
'QCD_Pt-30to50_MuEnrichedPt5',
'QCD_Pt-50to80_MuEnrichedPt5',
'QCD_Pt-80to120_MuEnrichedPt5',
'QCD_Pt-120to170_MuEnrichedPt5',
'QCD_Pt-170to300_MuEnrichedPt5',
'QCD_Pt-300to470_MuEnrichedPt5',
'QCD_Pt-470to600_MuEnrichedPt5',
'QCD_Pt-600to800_MuEnrichedPt5',
'QCD_Pt-800to1000_MuEnrichedPt5',
'QCD_Pt-1000toInf_MuEnrichedPt5',
]
QCD_EM=[
'QCD_Pt-20to30_EMEnriched',
'QCD_Pt-30to50_EMEnriched',
'QCD_Pt-50to80_EMEnriched',
'QCD_Pt-80to120_EMEnriched',
'QCD_Pt-120to170_EMEnriched',
'QCD_Pt-170to300_EMEnriched',
'QCD_Pt-300toInf_EMEnriched'
]
QCD_bcToE=[
'QCD_Pt_20to30_bcToE',
'QCD_Pt_30to80_bcToE',
'QCD_Pt_80to170_bcToE',
'QCD_Pt_170to250_bcToE',
'QCD_Pt_250toInf_bcToE',
]
for name in [ 'DY', 'WZZ', 'WWZ','WWW','ZZZ', 'ZZ', 'WZ', 'WW', 'WpWmJJ_EWK_QCD_noHiggs', 'top', 'Wjets0j', 'Wjets1j', 'Wjets2j','vbfHWWlnuqq_M125','ggHWWlnuqq_M125'] + ['QCD_MU','QCD_EM','QCD_bcToE']:
structure[name] = {
'isSignal' : 0,
'isData' : 0
}
#ggHWWlnuqq_M1500_S_B_I
structure['ggHWWlnuqq_M1500'] = {
'isSignal' : 1,
'isData' : 0
}
structure['vbfHWWlnuqq_M1500'] = {
'isSignal' : 1,
'isData' : 0
}
structure['PseudoData'] = {
'isSignal' : 0,
'isData' : 1
}
| [
"[email protected]"
] | |
622882398cd5c1e5077722f60c7aa9e77ef203af | 0ad8fc76aebe7ce22abe771fbeadf227e5b471cb | /app/productdb/tasks.py | 8cb6cd358ef178da1d7cd5290af4ab4a8389c040 | [
"MIT"
] | permissive | ppavlu/product-database | 354c6a1a3e9ebfdc931f2aacf8751ed0f149401c | 09610c09600c63eb91106c0b5a2fa995b134dbf4 | refs/heads/master | 2021-01-17T22:51:43.247027 | 2015-10-11T11:37:12 | 2015-10-11T11:37:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,251 | py | from django_project.celery import app as app
from app.productdb.models import Settings
import app.productdb.crawler.cisco_eox_api_crawler as cisco_eox_api_crawler
import logging
logger = logging.getLogger(__name__)
@app.task(serializer='json', name="synchronize_with_cisco_eox_api")
def execute_task_to_synchronize_cisco_eox_states():
"""
This task will automatically synchronize the Cisco EoX states with the local database. It will execute the
configured queries and saves the information to the local database. There are two types of operation:
* cisco_eox_api_auto_sync_auto_create_elements is set to true - will create any element which is not part of the blacklist and not in the
database
* cisco_eox_api_auto_sync_auto_create_elements is set to false - will only update entries, which are already included in the database
:return:
"""
logger.info("execute synchronize Cisco EoX update task...")
# update based on the configured query settings
result = cisco_eox_api_crawler.synchronize_with_cisco_eox_api()
logger.info("result: %s" % str(result))
s = Settings.objects.get(id=0)
s.eox_api_sync_task_id = ""
s.save()
return result
| [
"[email protected]"
] | |
e7c933c7739f81bba762a05bd13220dda275b7ae | 0e1e643e864bcb96cf06f14f4cb559b034e114d0 | /Exps_7_v3/doc3d/Wyx_w_M_w_Sob_to_Wz_focus/IN_Sob_k5_EroMore/Sob_k35_s001_EroM/pyr_Tcrop255_p60_j15/pyr_5s/L7/step09_5side_L7.py | 94ce9cec1f7924ce4d28ef2ca3b168732be84876 | [] | no_license | KongBOy/kong_model2 | 33a94a9d2be5b0f28f9d479b3744e1d0e0ebd307 | 1af20b168ffccf0d5293a393a40a9fa9519410b2 | refs/heads/master | 2022-10-14T03:09:22.543998 | 2022-10-06T11:33:42 | 2022-10-06T11:33:42 | 242,080,692 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 399,932 | py | #############################################################################################################################################################################################################
#############################################################################################################################################################################################################
### 把 kong_model2 加入 sys.path
import os
from tkinter import S
code_exe_path = os.path.realpath(__file__) ### 目前執行 step10_b.py 的 path
code_exe_path_element = code_exe_path.split("\\") ### 把 path 切分 等等 要找出 kong_model 在第幾層
kong_layer = code_exe_path_element.index("kong_model2") ### 找出 kong_model2 在第幾層
kong_model2_dir = "\\".join(code_exe_path_element[:kong_layer + 1]) ### 定位出 kong_model2 的 dir
import sys ### 把 kong_model2 加入 sys.path
sys.path.append(kong_model2_dir)
# print(__file__.split("\\")[-1])
# print(" code_exe_path:", code_exe_path)
# print(" code_exe_path_element:", code_exe_path_element)
# print(" kong_layer:", kong_layer)
# print(" kong_model2_dir:", kong_model2_dir)
#############################################################################################################################################################################################################
from step08_b_use_G_generate_Wxy_w_M_to_Wz_combine import Wyx_w_M_to_Wz
from step08_b_use_G_generate_0_util import Tight_crop
from step09_c_train_step import Train_step_Wyx_w_M_to_Wz
from step09_d_KModel_builder_combine_step789 import KModel_builder, MODEL_NAME
from step10_a1_loss import Sobel_MAE
Sob_k5_s001_erose_M = Sobel_MAE(sobel_kernel_size=5, sobel_kernel_scale=1, erose_M=True, erose_More=True)
use_gen_op = Wyx_w_M_to_Wz( focus=True, tight_crop=Tight_crop(pad_size=60, resize=(255, 255), jit_scale= 0), sobel=Sob_k5_s001_erose_M, sobel_only=True )
use_train_step = Train_step_Wyx_w_M_to_Wz( focus=True, tight_crop=Tight_crop(pad_size=60, resize=(255, 255), jit_scale=15), sobel=Sob_k5_s001_erose_M, sobel_only=True )
import time
start_time = time.time()
###############################################################################################################################################################################################
##################################
### 5side1
##################################
# "1" 3 6 10 15 21 28 36 45 55
# side1 OK 1
pyramid_1side_1__2side_1__3side_1_4side_1_5s1 = [5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5]
# 1 "3" 6 10 15 21 28 36 45 55
# side2 OK 4
pyramid_1side_2__2side_1__3side_1_4side_1_5s1 = [5, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 5]
pyramid_1side_2__2side_2__3side_1_4side_1_5s1 = [5, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 5]
pyramid_1side_2__2side_2__3side_2_4side_1_5s1 = [5, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 5]
pyramid_1side_2__2side_2__3side_2_4side_2_5s1 = [5, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 5]
# 1 3 "6" 10 15 21 28 36 45 55
# side3 OK 10
pyramid_1side_3__2side_1__3side_1_4side_1_5s1 = [5, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 5]
pyramid_1side_3__2side_2__3side_1_4side_1_5s1 = [5, 2, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 2, 5]
pyramid_1side_3__2side_2__3side_2_4side_1_5s1 = [5, 3, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 3, 5]
pyramid_1side_3__2side_3__3side_1_4side_1_5s1 = [5, 2, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 2, 5]
pyramid_1side_3__2side_3__3side_2_4side_1_5s1 = [5, 3, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 3, 5]
pyramid_1side_3__2side_3__3side_3_4side_1_5s1 = [5, 3, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 3, 5]
pyramid_1side_3__2side_2__3side_2_4side_2_5s1 = [5, 4, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 4, 5]
pyramid_1side_3__2side_3__3side_2_4side_2_5s1 = [5, 4, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 4, 5]
pyramid_1side_3__2side_3__3side_3_4side_2_5s1 = [5, 4, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 4, 5]
pyramid_1side_3__2side_3__3side_3_4side_3_5s1 = [5, 4, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 4, 5]
# 1 3 6 "10" 15 21 28 36 45 55
# side4 OK 20
pyramid_1side_4__2side_1__3side_1_4side_1_5s1 = [5, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 1, 5]
pyramid_1side_4__2side_2__3side_1_4side_1_5s1 = [5, 2, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 2, 5]
pyramid_1side_4__2side_2__3side_2_4side_1_5s1 = [5, 3, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 3, 5]
pyramid_1side_4__2side_3__3side_1_4side_1_5s1 = [5, 2, 2, 1, 0, 0, 0, 0, 0, 0, 0, 1, 2, 2, 5]
pyramid_1side_4__2side_3__3side_2_4side_1_5s1 = [5, 3, 2, 1, 0, 0, 0, 0, 0, 0, 0, 1, 2, 3, 5]
pyramid_1side_4__2side_3__3side_3_4side_1_5s1 = [5, 3, 3, 1, 0, 0, 0, 0, 0, 0, 0, 1, 3, 3, 5]
pyramid_1side_4__2side_4__3side_1_4side_1_5s1 = [5, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0, 2, 2, 2, 5]
pyramid_1side_4__2side_4__3side_2_4side_1_5s1 = [5, 3, 2, 2, 0, 0, 0, 0, 0, 0, 0, 2, 2, 3, 5]
pyramid_1side_4__2side_4__3side_3_4side_1_5s1 = [5, 3, 3, 2, 0, 0, 0, 0, 0, 0, 0, 2, 3, 3, 5]
pyramid_1side_4__2side_4__3side_4_4side_1_5s1 = [5, 3, 3, 3, 0, 0, 0, 0, 0, 0, 0, 3, 3, 3, 5]
pyramid_1side_4__2side_2__3side_2_4side_2_5s1 = [5, 4, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 4, 5]
pyramid_1side_4__2side_3__3side_2_4side_2_5s1 = [5, 4, 2, 1, 0, 0, 0, 0, 0, 0, 0, 1, 2, 4, 5]
pyramid_1side_4__2side_3__3side_3_4side_2_5s1 = [5, 4, 3, 1, 0, 0, 0, 0, 0, 0, 0, 1, 3, 4, 5]
pyramid_1side_4__2side_4__3side_2_4side_2_5s1 = [5, 4, 2, 2, 0, 0, 0, 0, 0, 0, 0, 2, 2, 4, 5]
pyramid_1side_4__2side_4__3side_3_4side_2_5s1 = [5, 4, 3, 2, 0, 0, 0, 0, 0, 0, 0, 2, 3, 4, 5]
pyramid_1side_4__2side_4__3side_4_4side_2_5s1 = [5, 4, 3, 3, 0, 0, 0, 0, 0, 0, 0, 3, 3, 4, 5]
pyramid_1side_4__2side_3__3side_3_4side_3_5s1 = [5, 4, 4, 1, 0, 0, 0, 0, 0, 0, 0, 1, 4, 4, 5]
pyramid_1side_4__2side_4__3side_3_4side_3_5s1 = [5, 4, 4, 2, 0, 0, 0, 0, 0, 0, 0, 2, 4, 4, 5]
pyramid_1side_4__2side_4__3side_4_4side_3_5s1 = [5, 4, 4, 3, 0, 0, 0, 0, 0, 0, 0, 3, 4, 4, 5]
pyramid_1side_4__2side_4__3side_4_4side_4_5s1 = [5, 4, 4, 4, 0, 0, 0, 0, 0, 0, 0, 4, 4, 4, 5]
# 1 3 6 10 "15" 21 28 36 45 55
# side5 OK 35
pyramid_1side_5__2side_1__3side_1_4side_1_5s1 = [5, 1, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 1, 5]
pyramid_1side_5__2side_2__3side_1_4side_1_5s1 = [5, 2, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 2, 5]
pyramid_1side_5__2side_2__3side_2_4side_1_5s1 = [5, 3, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 3, 5]
pyramid_1side_5__2side_3__3side_1_4side_1_5s1 = [5, 2, 2, 1, 1, 0, 0, 0, 0, 0, 1, 1, 2, 3, 5]
pyramid_1side_5__2side_3__3side_2_4side_1_5s1 = [5, 3, 2, 1, 1, 0, 0, 0, 0, 0, 1, 1, 2, 3, 5]
pyramid_1side_5__2side_3__3side_3_4side_1_5s1 = [5, 3, 3, 1, 1, 0, 0, 0, 0, 0, 1, 1, 3, 3, 5]
pyramid_1side_5__2side_4__3side_1_4side_1_5s1 = [5, 2, 2, 2, 1, 0, 0, 0, 0, 0, 1, 2, 2, 2, 5]
pyramid_1side_5__2side_4__3side_2_4side_1_5s1 = [5, 3, 2, 2, 1, 0, 0, 0, 0, 0, 1, 2, 2, 3, 5]
pyramid_1side_5__2side_4__3side_3_4side_1_5s1 = [5, 3, 3, 2, 1, 0, 0, 0, 0, 0, 1, 2, 3, 3, 5]
pyramid_1side_5__2side_4__3side_4_4side_1_5s1 = [5, 3, 3, 3, 1, 0, 0, 0, 0, 0, 1, 3, 3, 3, 5]
pyramid_1side_5__2side_5__3side_1_4side_1_5s1 = [5, 2, 2, 2, 2, 0, 0, 0, 0, 0, 2, 2, 2, 2, 5]
pyramid_1side_5__2side_5__3side_2_4side_1_5s1 = [5, 3, 2, 2, 2, 0, 0, 0, 0, 0, 2, 2, 2, 3, 5]
pyramid_1side_5__2side_5__3side_3_4side_1_5s1 = [5, 3, 3, 2, 2, 0, 0, 0, 0, 0, 2, 2, 3, 3, 5]
pyramid_1side_5__2side_5__3side_4_4side_1_5s1 = [5, 3, 3, 3, 2, 0, 0, 0, 0, 0, 2, 3, 3, 3, 5]
pyramid_1side_5__2side_5__3side_5_4side_1_5s1 = [5, 3, 3, 3, 3, 0, 0, 0, 0, 0, 3, 3, 3, 3, 5]
pyramid_1side_5__2side_2__3side_2_4side_2_5s1 = [5, 4, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 4, 5]
pyramid_1side_5__2side_3__3side_2_4side_2_5s1 = [5, 4, 2, 1, 1, 0, 0, 0, 0, 0, 1, 1, 2, 4, 5]
pyramid_1side_5__2side_3__3side_3_4side_2_5s1 = [5, 4, 3, 1, 1, 0, 0, 0, 0, 0, 1, 1, 3, 4, 5]
pyramid_1side_5__2side_4__3side_2_4side_2_5s1 = [5, 4, 2, 2, 1, 0, 0, 0, 0, 0, 1, 2, 2, 4, 5]
pyramid_1side_5__2side_4__3side_3_4side_2_5s1 = [5, 4, 3, 2, 1, 0, 0, 0, 0, 0, 1, 2, 3, 4, 5]
pyramid_1side_5__2side_4__3side_4_4side_2_5s1 = [5, 4, 3, 3, 1, 0, 0, 0, 0, 0, 1, 3, 3, 4, 5]
pyramid_1side_5__2side_5__3side_2_4side_2_5s1 = [5, 4, 2, 2, 2, 0, 0, 0, 0, 0, 2, 2, 2, 4, 5]
pyramid_1side_5__2side_5__3side_3_4side_2_5s1 = [5, 4, 3, 2, 2, 0, 0, 0, 0, 0, 2, 2, 3, 4, 5]
pyramid_1side_5__2side_5__3side_4_4side_2_5s1 = [5, 4, 3, 3, 2, 0, 0, 0, 0, 0, 2, 3, 3, 4, 5]
pyramid_1side_5__2side_5__3side_5_4side_2_5s1 = [5, 4, 3, 3, 3, 0, 0, 0, 0, 0, 3, 3, 3, 4, 5]
pyramid_1side_5__2side_3__3side_3_4side_3_5s1 = [5, 4, 4, 1, 1, 0, 0, 0, 0, 0, 1, 1, 4, 4, 5]
pyramid_1side_5__2side_4__3side_3_4side_3_5s1 = [5, 4, 4, 2, 1, 0, 0, 0, 0, 0, 1, 2, 4, 4, 5]
pyramid_1side_5__2side_4__3side_4_4side_3_5s1 = [5, 4, 4, 3, 1, 0, 0, 0, 0, 0, 1, 3, 4, 4, 5]
pyramid_1side_5__2side_5__3side_3_4side_3_5s1 = [5, 4, 4, 2, 2, 0, 0, 0, 0, 0, 2, 2, 4, 4, 5]
pyramid_1side_5__2side_5__3side_4_4side_3_5s1 = [5, 4, 4, 3, 2, 0, 0, 0, 0, 0, 2, 3, 4, 4, 5]
pyramid_1side_5__2side_5__3side_5_4side_3_5s1 = [5, 4, 4, 3, 3, 0, 0, 0, 0, 0, 3, 3, 4, 4, 5]
pyramid_1side_5__2side_4__3side_4_4side_4_5s1 = [5, 4, 4, 4, 1, 0, 0, 0, 0, 0, 1, 4, 4, 4, 5]
pyramid_1side_5__2side_5__3side_4_4side_4_5s1 = [5, 4, 4, 4, 2, 0, 0, 0, 0, 0, 2, 4, 4, 4, 5]
pyramid_1side_5__2side_5__3side_5_4side_4_5s1 = [5, 4, 4, 4, 3, 0, 0, 0, 0, 0, 3, 4, 4, 4, 5]
pyramid_1side_5__2side_5__3side_5_4side_5_5s1 = [5, 4, 4, 4, 4, 0, 0, 0, 0, 0, 4, 4, 4, 4, 5]
# 1 3 6 10 15 "21" 28 36 45 55
# side6 OK 56
pyramid_1side_6__2side_1__3side_1_4side_1_5s1 = [5, 1, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 1, 5]
pyramid_1side_6__2side_2__3side_1_4side_1_5s1 = [5, 2, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 2, 5]
pyramid_1side_6__2side_2__3side_2_4side_1_5s1 = [5, 3, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 3, 5]
pyramid_1side_6__2side_3__3side_1_4side_1_5s1 = [5, 2, 2, 1, 1, 1, 0, 0, 0, 1, 1, 1, 2, 2, 5]
pyramid_1side_6__2side_3__3side_2_4side_1_5s1 = [5, 3, 2, 1, 1, 1, 0, 0, 0, 1, 1, 1, 2, 3, 5]
pyramid_1side_6__2side_3__3side_3_4side_1_5s1 = [5, 3, 3, 1, 1, 1, 0, 0, 0, 1, 1, 1, 3, 3, 5]
pyramid_1side_6__2side_4__3side_1_4side_1_5s1 = [5, 2, 2, 2, 1, 1, 0, 0, 0, 1, 1, 2, 2, 2, 5]
pyramid_1side_6__2side_4__3side_2_4side_1_5s1 = [5, 3, 2, 2, 1, 1, 0, 0, 0, 1, 1, 2, 2, 3, 5]
pyramid_1side_6__2side_4__3side_3_4side_1_5s1 = [5, 3, 3, 2, 1, 1, 0, 0, 0, 1, 1, 2, 3, 3, 5]
pyramid_1side_6__2side_4__3side_4_4side_1_5s1 = [5, 3, 3, 3, 1, 1, 0, 0, 0, 1, 1, 3, 3, 3, 5]
pyramid_1side_6__2side_5__3side_1_4side_1_5s1 = [5, 2, 2, 2, 2, 1, 0, 0, 0, 1, 2, 2, 2, 2, 5]
pyramid_1side_6__2side_5__3side_2_4side_1_5s1 = [5, 3, 2, 2, 2, 1, 0, 0, 0, 1, 2, 2, 2, 3, 5]
pyramid_1side_6__2side_5__3side_3_4side_1_5s1 = [5, 3, 3, 2, 2, 1, 0, 0, 0, 1, 2, 2, 3, 3, 5]
pyramid_1side_6__2side_5__3side_4_4side_1_5s1 = [5, 3, 3, 3, 2, 1, 0, 0, 0, 1, 2, 3, 3, 3, 5]
pyramid_1side_6__2side_5__3side_5_4side_1_5s1 = [5, 3, 3, 3, 3, 1, 0, 0, 0, 1, 3, 3, 3, 3, 5]
pyramid_1side_6__2side_6__3side_1_4side_1_5s1 = [5, 2, 2, 2, 2, 2, 0, 0, 0, 2, 2, 2, 2, 2, 5]
pyramid_1side_6__2side_6__3side_2_4side_1_5s1 = [5, 3, 2, 2, 2, 2, 0, 0, 0, 2, 2, 2, 2, 3, 5]
pyramid_1side_6__2side_6__3side_3_4side_1_5s1 = [5, 3, 3, 2, 2, 2, 0, 0, 0, 2, 2, 2, 3, 3, 5]
pyramid_1side_6__2side_6__3side_4_4side_1_5s1 = [5, 3, 3, 3, 2, 2, 0, 0, 0, 2, 2, 3, 3, 3, 5]
pyramid_1side_6__2side_6__3side_5_4side_1_5s1 = [5, 3, 3, 3, 3, 2, 0, 0, 0, 2, 3, 3, 3, 3, 5]
pyramid_1side_6__2side_6__3side_6_4side_1_5s1 = [5, 3, 3, 3, 3, 3, 0, 0, 0, 3, 3, 3, 3, 3, 5]
pyramid_1side_6__2side_2__3side_2_4side_2_5s1 = [5, 4, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 4, 5]
pyramid_1side_6__2side_3__3side_2_4side_2_5s1 = [5, 4, 2, 1, 1, 1, 0, 0, 0, 1, 1, 1, 2, 4, 5]
pyramid_1side_6__2side_3__3side_3_4side_2_5s1 = [5, 4, 3, 1, 1, 1, 0, 0, 0, 1, 1, 1, 3, 4, 5]
pyramid_1side_6__2side_4__3side_2_4side_2_5s1 = [5, 4, 2, 2, 1, 1, 0, 0, 0, 1, 1, 2, 2, 4, 5]
pyramid_1side_6__2side_4__3side_3_4side_2_5s1 = [5, 4, 3, 2, 1, 1, 0, 0, 0, 1, 1, 2, 3, 4, 5]
pyramid_1side_6__2side_4__3side_4_4side_2_5s1 = [5, 4, 3, 3, 1, 1, 0, 0, 0, 1, 1, 3, 3, 4, 5]
pyramid_1side_6__2side_5__3side_2_4side_2_5s1 = [5, 4, 2, 2, 2, 1, 0, 0, 0, 1, 2, 2, 2, 4, 5]
pyramid_1side_6__2side_5__3side_3_4side_2_5s1 = [5, 4, 3, 2, 2, 1, 0, 0, 0, 1, 2, 2, 3, 4, 5]
pyramid_1side_6__2side_5__3side_4_4side_2_5s1 = [5, 4, 3, 3, 2, 1, 0, 0, 0, 1, 2, 3, 3, 4, 5]
pyramid_1side_6__2side_5__3side_5_4side_2_5s1 = [5, 4, 3, 3, 3, 1, 0, 0, 0, 1, 3, 3, 3, 4, 5]
pyramid_1side_6__2side_6__3side_2_4side_2_5s1 = [5, 4, 2, 2, 2, 2, 0, 0, 0, 2, 2, 2, 2, 4, 5]
pyramid_1side_6__2side_6__3side_3_4side_2_5s1 = [5, 4, 3, 2, 2, 2, 0, 0, 0, 2, 2, 2, 3, 4, 5]
pyramid_1side_6__2side_6__3side_4_4side_2_5s1 = [5, 4, 3, 3, 2, 2, 0, 0, 0, 2, 2, 3, 3, 4, 5]
pyramid_1side_6__2side_6__3side_5_4side_2_5s1 = [5, 4, 3, 3, 3, 2, 0, 0, 0, 2, 3, 3, 3, 4, 5]
pyramid_1side_6__2side_6__3side_6_4side_2_5s1 = [5, 4, 3, 3, 3, 3, 0, 0, 0, 3, 3, 3, 3, 4, 5]
pyramid_1side_6__2side_3__3side_3_4side_3_5s1 = [5, 4, 4, 1, 1, 1, 0, 0, 0, 1, 1, 1, 4, 4, 5]
pyramid_1side_6__2side_4__3side_3_4side_3_5s1 = [5, 4, 4, 2, 1, 1, 0, 0, 0, 1, 1, 2, 4, 4, 5]
pyramid_1side_6__2side_4__3side_4_4side_3_5s1 = [5, 4, 4, 3, 1, 1, 0, 0, 0, 1, 1, 3, 4, 4, 5]
pyramid_1side_6__2side_5__3side_3_4side_3_5s1 = [5, 4, 4, 2, 2, 1, 0, 0, 0, 1, 2, 2, 4, 4, 5]
pyramid_1side_6__2side_5__3side_4_4side_3_5s1 = [5, 4, 4, 3, 2, 1, 0, 0, 0, 1, 2, 3, 4, 4, 5]
pyramid_1side_6__2side_5__3side_5_4side_3_5s1 = [5, 4, 4, 3, 3, 1, 0, 0, 0, 1, 3, 3, 4, 4, 5]
pyramid_1side_6__2side_6__3side_3_4side_3_5s1 = [5, 4, 4, 2, 2, 2, 0, 0, 0, 2, 2, 2, 4, 4, 5]
pyramid_1side_6__2side_6__3side_4_4side_3_5s1 = [5, 4, 4, 3, 2, 2, 0, 0, 0, 2, 2, 3, 4, 4, 5]
pyramid_1side_6__2side_6__3side_5_4side_3_5s1 = [5, 4, 4, 3, 3, 2, 0, 0, 0, 2, 3, 3, 4, 4, 5]
pyramid_1side_6__2side_6__3side_6_4side_3_5s1 = [5, 4, 4, 3, 3, 3, 0, 0, 0, 3, 3, 3, 4, 4, 5]
pyramid_1side_6__2side_4__3side_4_4side_4_5s1 = [5, 4, 4, 4, 1, 1, 0, 0, 0, 1, 1, 4, 4, 4, 5]
pyramid_1side_6__2side_5__3side_4_4side_4_5s1 = [5, 4, 4, 4, 2, 1, 0, 0, 0, 1, 2, 4, 4, 4, 5]
pyramid_1side_6__2side_5__3side_5_4side_4_5s1 = [5, 4, 4, 4, 3, 1, 0, 0, 0, 1, 3, 4, 4, 4, 5]
pyramid_1side_6__2side_6__3side_4_4side_4_5s1 = [5, 4, 4, 4, 2, 2, 0, 0, 0, 2, 2, 4, 4, 4, 5]
pyramid_1side_6__2side_6__3side_5_4side_4_5s1 = [5, 4, 4, 4, 3, 2, 0, 0, 0, 2, 3, 4, 4, 4, 5]
pyramid_1side_6__2side_6__3side_6_4side_4_5s1 = [5, 4, 4, 4, 3, 3, 0, 0, 0, 3, 3, 4, 4, 4, 5]
pyramid_1side_6__2side_5__3side_5_4side_5_5s1 = [5, 4, 4, 4, 4, 1, 0, 0, 0, 1, 4, 4, 4, 4, 5]
pyramid_1side_6__2side_6__3side_5_4side_5_5s1 = [5, 4, 4, 4, 4, 2, 0, 0, 0, 2, 4, 4, 4, 4, 5]
pyramid_1side_6__2side_6__3side_6_4side_5_5s1 = [5, 4, 4, 4, 4, 3, 0, 0, 0, 3, 4, 4, 4, 4, 5]
pyramid_1side_6__2side_6__3side_6_4side_6_5s1 = [5, 4, 4, 4, 4, 4, 0, 0, 0, 4, 4, 4, 4, 4, 5]
# 1 3 6 10 15 21 "28" 36 45 55
# side7 OK 84
pyramid_1side_7__2side_1__3side_1_4side_1_5s1 = [5, 1, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 5]
pyramid_1side_7__2side_2__3side_1_4side_1_5s1 = [5, 2, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 2, 5]
pyramid_1side_7__2side_2__3side_2_4side_1_5s1 = [5, 3, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 3, 5]
pyramid_1side_7__2side_3__3side_1_4side_1_5s1 = [5, 2, 2, 1, 1, 1, 1, 0, 1, 1, 1, 1, 2, 2, 5]
pyramid_1side_7__2side_3__3side_2_4side_1_5s1 = [5, 3, 2, 1, 1, 1, 1, 0, 1, 1, 1, 1, 2, 3, 5]
pyramid_1side_7__2side_3__3side_3_4side_1_5s1 = [5, 3, 3, 1, 1, 1, 1, 0, 1, 1, 1, 1, 3, 3, 5]
pyramid_1side_7__2side_4__3side_1_4side_1_5s1 = [5, 2, 2, 2, 1, 1, 1, 0, 1, 1, 1, 2, 2, 2, 5]
pyramid_1side_7__2side_4__3side_2_4side_1_5s1 = [5, 3, 2, 2, 1, 1, 1, 0, 1, 1, 1, 2, 2, 3, 5]
pyramid_1side_7__2side_4__3side_3_4side_1_5s1 = [5, 3, 3, 2, 1, 1, 1, 0, 1, 1, 1, 2, 3, 3, 5]
pyramid_1side_7__2side_4__3side_4_4side_1_5s1 = [5, 3, 3, 3, 1, 1, 1, 0, 1, 1, 1, 3, 3, 3, 5]
pyramid_1side_7__2side_5__3side_1_4side_1_5s1 = [5, 2, 2, 2, 2, 1, 1, 0, 1, 1, 2, 2, 2, 2, 5]
pyramid_1side_7__2side_5__3side_2_4side_1_5s1 = [5, 3, 2, 2, 2, 1, 1, 0, 1, 1, 2, 2, 2, 3, 5]
pyramid_1side_7__2side_5__3side_3_4side_1_5s1 = [5, 3, 3, 2, 2, 1, 1, 0, 1, 1, 2, 2, 3, 3, 5]
pyramid_1side_7__2side_5__3side_4_4side_1_5s1 = [5, 3, 3, 3, 2, 1, 1, 0, 1, 1, 2, 3, 3, 3, 5]
pyramid_1side_7__2side_5__3side_5_4side_1_5s1 = [5, 3, 3, 3, 3, 1, 1, 0, 1, 1, 3, 3, 3, 3, 5]
pyramid_1side_7__2side_6__3side_1_4side_1_5s1 = [5, 2, 2, 2, 2, 2, 1, 0, 1, 2, 2, 2, 2, 2, 5]
pyramid_1side_7__2side_6__3side_2_4side_1_5s1 = [5, 3, 2, 2, 2, 2, 1, 0, 1, 2, 2, 2, 2, 3, 5]
pyramid_1side_7__2side_6__3side_3_4side_1_5s1 = [5, 3, 3, 2, 2, 2, 1, 0, 1, 2, 2, 2, 3, 3, 5]
pyramid_1side_7__2side_6__3side_4_4side_1_5s1 = [5, 3, 3, 3, 2, 2, 1, 0, 1, 2, 2, 3, 3, 3, 5]
pyramid_1side_7__2side_6__3side_5_4side_1_5s1 = [5, 3, 3, 3, 3, 2, 1, 0, 1, 2, 3, 3, 3, 3, 5]
pyramid_1side_7__2side_6__3side_6_4side_1_5s1 = [5, 3, 3, 3, 3, 3, 1, 0, 1, 3, 3, 3, 3, 3, 5]
pyramid_1side_7__2side_7__3side_1_4side_1_5s1 = [5, 2, 2, 2, 2, 2, 2, 0, 2, 2, 2, 2, 2, 2, 5]
pyramid_1side_7__2side_7__3side_2_4side_1_5s1 = [5, 3, 2, 2, 2, 2, 2, 0, 2, 2, 2, 2, 2, 3, 5]
pyramid_1side_7__2side_7__3side_3_4side_1_5s1 = [5, 3, 3, 2, 2, 2, 2, 0, 2, 2, 2, 2, 3, 3, 5]
pyramid_1side_7__2side_7__3side_4_4side_1_5s1 = [5, 3, 3, 3, 2, 2, 2, 0, 2, 2, 2, 3, 3, 3, 5]
pyramid_1side_7__2side_7__3side_5_4side_1_5s1 = [5, 3, 3, 3, 3, 2, 2, 0, 2, 2, 3, 3, 3, 3, 5]
pyramid_1side_7__2side_7__3side_6_4side_1_5s1 = [5, 3, 3, 3, 3, 3, 2, 0, 2, 3, 3, 3, 3, 3, 5]
pyramid_1side_7__2side_7__3side_7_4side_1_5s1 = [5, 3, 3, 3, 3, 3, 3, 0, 3, 3, 3, 3, 3, 3, 5]
pyramid_1side_7__2side_2__3side_2_4side_2_5s1 = [5, 4, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 4, 5]
pyramid_1side_7__2side_3__3side_2_4side_2_5s1 = [5, 4, 2, 1, 1, 1, 1, 0, 1, 1, 1, 1, 2, 4, 5]
pyramid_1side_7__2side_3__3side_3_4side_2_5s1 = [5, 4, 3, 1, 1, 1, 1, 0, 1, 1, 1, 1, 3, 4, 5]
pyramid_1side_7__2side_4__3side_2_4side_2_5s1 = [5, 4, 2, 2, 1, 1, 1, 0, 1, 1, 1, 2, 2, 4, 5]
pyramid_1side_7__2side_4__3side_3_4side_2_5s1 = [5, 4, 3, 2, 1, 1, 1, 0, 1, 1, 1, 2, 3, 4, 5]
pyramid_1side_7__2side_4__3side_4_4side_2_5s1 = [5, 4, 3, 3, 1, 1, 1, 0, 1, 1, 1, 3, 3, 4, 5]
pyramid_1side_7__2side_5__3side_2_4side_2_5s1 = [5, 4, 2, 2, 2, 1, 1, 0, 1, 1, 2, 2, 2, 4, 5]
pyramid_1side_7__2side_5__3side_3_4side_2_5s1 = [5, 4, 3, 2, 2, 1, 1, 0, 1, 1, 2, 2, 3, 4, 5]
pyramid_1side_7__2side_5__3side_4_4side_2_5s1 = [5, 4, 3, 3, 2, 1, 1, 0, 1, 1, 2, 3, 3, 4, 5]
pyramid_1side_7__2side_5__3side_5_4side_2_5s1 = [5, 4, 3, 3, 3, 1, 1, 0, 1, 1, 3, 3, 3, 4, 5]
pyramid_1side_7__2side_6__3side_2_4side_2_5s1 = [5, 4, 2, 2, 2, 2, 1, 0, 1, 2, 2, 2, 2, 4, 5]
pyramid_1side_7__2side_6__3side_3_4side_2_5s1 = [5, 4, 3, 2, 2, 2, 1, 0, 1, 2, 2, 2, 3, 4, 5]
pyramid_1side_7__2side_6__3side_4_4side_2_5s1 = [5, 4, 3, 3, 2, 2, 1, 0, 1, 2, 2, 3, 3, 4, 5]
pyramid_1side_7__2side_6__3side_5_4side_2_5s1 = [5, 4, 3, 3, 3, 2, 1, 0, 1, 2, 3, 3, 3, 4, 5]
pyramid_1side_7__2side_6__3side_6_4side_2_5s1 = [5, 4, 3, 3, 3, 3, 1, 0, 1, 3, 3, 3, 3, 4, 5]
pyramid_1side_7__2side_7__3side_2_4side_2_5s1 = [5, 4, 2, 2, 2, 2, 2, 0, 2, 2, 2, 2, 2, 4, 5]
pyramid_1side_7__2side_7__3side_3_4side_2_5s1 = [5, 4, 3, 2, 2, 2, 2, 0, 2, 2, 2, 2, 3, 4, 5]
pyramid_1side_7__2side_7__3side_4_4side_2_5s1 = [5, 4, 3, 3, 2, 2, 2, 0, 2, 2, 2, 3, 3, 4, 5]
pyramid_1side_7__2side_7__3side_5_4side_2_5s1 = [5, 4, 3, 3, 3, 2, 2, 0, 2, 2, 3, 3, 3, 4, 5]
pyramid_1side_7__2side_7__3side_6_4side_2_5s1 = [5, 4, 3, 3, 3, 3, 2, 0, 2, 3, 3, 3, 3, 4, 5]
pyramid_1side_7__2side_7__3side_7_4side_2_5s1 = [5, 4, 3, 3, 3, 3, 3, 0, 3, 3, 3, 3, 3, 4, 5]
pyramid_1side_7__2side_3__3side_3_4side_3_5s1 = [5, 4, 4, 1, 1, 1, 1, 0, 1, 1, 1, 1, 4, 4, 5]
pyramid_1side_7__2side_4__3side_3_4side_3_5s1 = [5, 4, 4, 2, 1, 1, 1, 0, 1, 1, 1, 2, 4, 4, 5]
pyramid_1side_7__2side_4__3side_4_4side_3_5s1 = [5, 4, 4, 3, 1, 1, 1, 0, 1, 1, 1, 3, 4, 4, 5]
pyramid_1side_7__2side_5__3side_3_4side_3_5s1 = [5, 4, 4, 2, 2, 1, 1, 0, 1, 1, 2, 2, 4, 4, 5]
pyramid_1side_7__2side_5__3side_4_4side_3_5s1 = [5, 4, 4, 3, 2, 1, 1, 0, 1, 1, 2, 3, 4, 4, 5]
pyramid_1side_7__2side_5__3side_5_4side_3_5s1 = [5, 4, 4, 3, 3, 1, 1, 0, 1, 1, 3, 3, 4, 4, 5]
pyramid_1side_7__2side_6__3side_3_4side_3_5s1 = [5, 4, 4, 2, 2, 2, 1, 0, 1, 2, 2, 2, 4, 4, 5]
pyramid_1side_7__2side_6__3side_4_4side_3_5s1 = [5, 4, 4, 3, 2, 2, 1, 0, 1, 2, 2, 3, 4, 4, 5]
pyramid_1side_7__2side_6__3side_5_4side_3_5s1 = [5, 4, 4, 3, 3, 2, 1, 0, 1, 2, 3, 3, 4, 4, 5]
pyramid_1side_7__2side_6__3side_6_4side_3_5s1 = [5, 4, 4, 3, 3, 3, 1, 0, 1, 3, 3, 3, 4, 4, 5]
pyramid_1side_7__2side_7__3side_3_4side_3_5s1 = [5, 4, 4, 2, 2, 2, 2, 0, 2, 2, 2, 2, 4, 4, 5]
pyramid_1side_7__2side_7__3side_4_4side_3_5s1 = [5, 4, 4, 3, 2, 2, 2, 0, 2, 2, 2, 3, 4, 4, 5]
pyramid_1side_7__2side_7__3side_5_4side_3_5s1 = [5, 4, 4, 3, 3, 2, 2, 0, 2, 2, 3, 3, 4, 4, 5]
pyramid_1side_7__2side_7__3side_6_4side_3_5s1 = [5, 4, 4, 3, 3, 3, 2, 0, 2, 3, 3, 3, 4, 4, 5]
pyramid_1side_7__2side_7__3side_7_4side_3_5s1 = [5, 4, 4, 3, 3, 3, 3, 0, 3, 3, 3, 3, 4, 4, 5]
pyramid_1side_7__2side_4__3side_4_4side_4_5s1 = [5, 4, 4, 4, 1, 1, 1, 0, 1, 1, 1, 4, 4, 4, 5]
pyramid_1side_7__2side_5__3side_4_4side_4_5s1 = [5, 4, 4, 4, 2, 1, 1, 0, 1, 1, 2, 4, 4, 4, 5]
pyramid_1side_7__2side_5__3side_5_4side_4_5s1 = [5, 4, 4, 4, 3, 1, 1, 0, 1, 1, 3, 4, 4, 4, 5]
pyramid_1side_7__2side_6__3side_4_4side_4_5s1 = [5, 4, 4, 4, 2, 2, 1, 0, 1, 2, 2, 4, 4, 4, 5]
pyramid_1side_7__2side_6__3side_5_4side_4_5s1 = [5, 4, 4, 4, 3, 2, 1, 0, 1, 2, 3, 4, 4, 4, 5]
pyramid_1side_7__2side_6__3side_6_4side_4_5s1 = [5, 4, 4, 4, 3, 3, 1, 0, 1, 3, 3, 4, 4, 4, 5]
pyramid_1side_7__2side_7__3side_4_4side_4_5s1 = [5, 4, 4, 4, 2, 2, 2, 0, 2, 2, 2, 4, 4, 4, 5]
pyramid_1side_7__2side_7__3side_5_4side_4_5s1 = [5, 4, 4, 4, 3, 2, 2, 0, 2, 2, 3, 4, 4, 4, 5]
pyramid_1side_7__2side_7__3side_6_4side_4_5s1 = [5, 4, 4, 4, 3, 3, 2, 0, 2, 3, 3, 4, 4, 4, 5]
pyramid_1side_7__2side_7__3side_7_4side_4_5s1 = [5, 4, 4, 4, 3, 3, 3, 0, 3, 3, 3, 4, 4, 4, 5]
pyramid_1side_7__2side_5__3side_5_4side_5_5s1 = [5, 4, 4, 4, 4, 1, 1, 0, 1, 1, 4, 4, 4, 4, 5]
pyramid_1side_7__2side_6__3side_5_4side_5_5s1 = [5, 4, 4, 4, 4, 2, 1, 0, 1, 2, 4, 4, 4, 4, 5]
pyramid_1side_7__2side_6__3side_6_4side_5_5s1 = [5, 4, 4, 4, 4, 3, 1, 0, 1, 3, 4, 4, 4, 4, 5]
pyramid_1side_7__2side_7__3side_5_4side_5_5s1 = [5, 4, 4, 4, 4, 2, 2, 0, 2, 2, 4, 4, 4, 4, 5]
pyramid_1side_7__2side_7__3side_6_4side_5_5s1 = [5, 4, 4, 4, 4, 3, 2, 0, 2, 3, 4, 4, 4, 4, 5]
pyramid_1side_7__2side_7__3side_7_4side_5_5s1 = [5, 4, 4, 4, 4, 3, 3, 0, 3, 3, 4, 4, 4, 4, 5]
pyramid_1side_7__2side_6__3side_6_4side_6_5s1 = [5, 4, 4, 4, 4, 4, 1, 0, 1, 4, 4, 4, 4, 4, 5]
pyramid_1side_7__2side_7__3side_6_4side_6_5s1 = [5, 4, 4, 4, 4, 4, 2, 0, 2, 4, 4, 4, 4, 4, 5]
pyramid_1side_7__2side_7__3side_7_4side_6_5s1 = [5, 4, 4, 4, 4, 4, 3, 0, 3, 4, 4, 4, 4, 4, 5]
pyramid_1side_7__2side_7__3side_7_4side_7_5s1 = [5, 4, 4, 4, 4, 4, 4, 0, 4, 4, 4, 4, 4, 4, 5]
# 1 3 6 10 15 21 28 "36" 45 55
# side8 OK 120
pyramid_1side_8__2side_1__3side_1_4side_1_5s1 = [5, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5]
pyramid_1side_8__2side_2__3side_1_4side_1_5s1 = [5, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 5]
pyramid_1side_8__2side_2__3side_2_4side_1_5s1 = [5, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 5]
pyramid_1side_8__2side_3__3side_1_4side_1_5s1 = [5, 2, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 2, 5]
pyramid_1side_8__2side_3__3side_2_4side_1_5s1 = [5, 3, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 3, 5]
pyramid_1side_8__2side_3__3side_3_4side_1_5s1 = [5, 3, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 3, 5]
pyramid_1side_8__2side_4__3side_1_4side_1_5s1 = [5, 2, 2, 2, 1, 1, 1, 1, 1, 1, 1, 2, 2, 2, 5]
pyramid_1side_8__2side_4__3side_2_4side_1_5s1 = [5, 3, 2, 2, 1, 1, 1, 1, 1, 1, 1, 2, 2, 3, 5]
pyramid_1side_8__2side_4__3side_3_4side_1_5s1 = [5, 3, 3, 2, 1, 1, 1, 1, 1, 1, 1, 2, 3, 3, 5]
pyramid_1side_8__2side_4__3side_4_4side_1_5s1 = [5, 3, 3, 3, 1, 1, 1, 1, 1, 1, 1, 3, 3, 3, 5]
pyramid_1side_8__2side_5__3side_1_4side_1_5s1 = [5, 2, 2, 2, 2, 1, 1, 1, 1, 1, 2, 2, 2, 2, 5]
pyramid_1side_8__2side_5__3side_2_4side_1_5s1 = [5, 3, 2, 2, 2, 1, 1, 1, 1, 1, 2, 2, 2, 3, 5]
pyramid_1side_8__2side_5__3side_3_4side_1_5s1 = [5, 3, 3, 2, 2, 1, 1, 1, 1, 1, 2, 2, 3, 3, 5]
pyramid_1side_8__2side_5__3side_4_4side_1_5s1 = [5, 3, 3, 3, 2, 1, 1, 1, 1, 1, 2, 3, 3, 3, 5]
pyramid_1side_8__2side_5__3side_5_4side_1_5s1 = [5, 3, 3, 3, 3, 1, 1, 1, 1, 1, 3, 3, 3, 3, 5]
pyramid_1side_8__2side_6__3side_1_4side_1_5s1 = [5, 2, 2, 2, 2, 2, 1, 1, 1, 2, 2, 2, 2, 2, 5]
pyramid_1side_8__2side_6__3side_2_4side_1_5s1 = [5, 3, 2, 2, 2, 2, 1, 1, 1, 2, 2, 2, 2, 3, 5]
pyramid_1side_8__2side_6__3side_3_4side_1_5s1 = [5, 3, 3, 2, 2, 2, 1, 1, 1, 2, 2, 2, 3, 3, 5]
pyramid_1side_8__2side_6__3side_4_4side_1_5s1 = [5, 3, 3, 3, 2, 2, 1, 1, 1, 2, 2, 3, 3, 3, 5]
pyramid_1side_8__2side_6__3side_5_4side_1_5s1 = [5, 3, 3, 3, 3, 2, 1, 1, 1, 2, 3, 3, 3, 3, 5]
pyramid_1side_8__2side_6__3side_6_4side_1_5s1 = [5, 3, 3, 3, 3, 3, 1, 1, 1, 3, 3, 3, 3, 3, 5]
pyramid_1side_8__2side_7__3side_1_4side_1_5s1 = [5, 2, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 2, 5]
pyramid_1side_8__2side_7__3side_2_4side_1_5s1 = [5, 3, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 3, 5]
pyramid_1side_8__2side_7__3side_3_4side_1_5s1 = [5, 3, 3, 2, 2, 2, 2, 1, 2, 2, 2, 2, 3, 3, 5]
pyramid_1side_8__2side_7__3side_4_4side_1_5s1 = [5, 3, 3, 3, 2, 2, 2, 1, 2, 2, 2, 3, 3, 3, 5]
pyramid_1side_8__2side_7__3side_5_4side_1_5s1 = [5, 3, 3, 3, 3, 2, 2, 1, 2, 2, 3, 3, 3, 3, 5]
pyramid_1side_8__2side_7__3side_6_4side_1_5s1 = [5, 3, 3, 3, 3, 3, 2, 1, 2, 3, 3, 3, 3, 3, 5]
pyramid_1side_8__2side_7__3side_7_4side_1_5s1 = [5, 3, 3, 3, 3, 3, 3, 1, 3, 3, 3, 3, 3, 3, 5]
pyramid_1side_8__2side_8__3side_1_4side_1_5s1 = [5, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 5]
pyramid_1side_8__2side_8__3side_2_4side_1_5s1 = [5, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 5]
pyramid_1side_8__2side_8__3side_3_4side_1_5s1 = [5, 3, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 3, 5]
pyramid_1side_8__2side_8__3side_4_4side_1_5s1 = [5, 3, 3, 3, 2, 2, 2, 2, 2, 2, 2, 3, 3, 3, 5]
pyramid_1side_8__2side_8__3side_5_4side_1_5s1 = [5, 3, 3, 3, 3, 2, 2, 2, 2, 2, 3, 3, 3, 3, 5]
pyramid_1side_8__2side_8__3side_6_4side_1_5s1 = [5, 3, 3, 3, 3, 3, 2, 2, 2, 3, 3, 3, 3, 3, 5]
pyramid_1side_8__2side_8__3side_7_4side_1_5s1 = [5, 3, 3, 3, 3, 3, 3, 2, 3, 3, 3, 3, 3, 3, 5]
pyramid_1side_8__2side_8__3side_8_4side_1_5s1 = [5, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5]
pyramid_1side_8__2side_2__3side_2_4side_2_5s1 = [5, 4, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 4, 5]
pyramid_1side_8__2side_3__3side_2_4side_2_5s1 = [5, 4, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 4, 5]
pyramid_1side_8__2side_3__3side_3_4side_2_5s1 = [5, 4, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 4, 5]
pyramid_1side_8__2side_4__3side_2_4side_2_5s1 = [5, 4, 2, 2, 1, 1, 1, 1, 1, 1, 1, 2, 2, 4, 5]
pyramid_1side_8__2side_4__3side_3_4side_2_5s1 = [5, 4, 3, 2, 1, 1, 1, 1, 1, 1, 1, 2, 3, 4, 5]
pyramid_1side_8__2side_4__3side_4_4side_2_5s1 = [5, 4, 3, 3, 1, 1, 1, 1, 1, 1, 1, 3, 3, 4, 5]
pyramid_1side_8__2side_5__3side_2_4side_2_5s1 = [5, 4, 2, 2, 2, 1, 1, 1, 1, 1, 2, 2, 2, 4, 5]
pyramid_1side_8__2side_5__3side_3_4side_2_5s1 = [5, 4, 3, 2, 2, 1, 1, 1, 1, 1, 2, 2, 3, 4, 5]
pyramid_1side_8__2side_5__3side_4_4side_2_5s1 = [5, 4, 3, 3, 2, 1, 1, 1, 1, 1, 2, 3, 3, 4, 5]
pyramid_1side_8__2side_5__3side_5_4side_2_5s1 = [5, 4, 3, 3, 3, 1, 1, 1, 1, 1, 3, 3, 3, 4, 5]
pyramid_1side_8__2side_6__3side_2_4side_2_5s1 = [5, 4, 2, 2, 2, 2, 1, 1, 1, 2, 2, 2, 2, 4, 5]
pyramid_1side_8__2side_6__3side_3_4side_2_5s1 = [5, 4, 3, 2, 2, 2, 1, 1, 1, 2, 2, 2, 3, 4, 5]
pyramid_1side_8__2side_6__3side_4_4side_2_5s1 = [5, 4, 3, 3, 2, 2, 1, 1, 1, 2, 2, 3, 3, 4, 5]
pyramid_1side_8__2side_6__3side_5_4side_2_5s1 = [5, 4, 3, 3, 3, 2, 1, 1, 1, 2, 3, 3, 3, 4, 5]
pyramid_1side_8__2side_6__3side_6_4side_2_5s1 = [5, 4, 3, 3, 3, 3, 1, 1, 1, 3, 3, 3, 3, 4, 5]
pyramid_1side_8__2side_7__3side_2_4side_2_5s1 = [5, 4, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 4, 5]
pyramid_1side_8__2side_7__3side_3_4side_2_5s1 = [5, 4, 3, 2, 2, 2, 2, 1, 2, 2, 2, 2, 3, 4, 5]
pyramid_1side_8__2side_7__3side_4_4side_2_5s1 = [5, 4, 3, 3, 2, 2, 2, 1, 2, 2, 2, 3, 3, 4, 5]
pyramid_1side_8__2side_7__3side_5_4side_2_5s1 = [5, 4, 3, 3, 3, 2, 2, 1, 2, 2, 3, 3, 3, 4, 5]
pyramid_1side_8__2side_7__3side_6_4side_2_5s1 = [5, 4, 3, 3, 3, 3, 2, 1, 2, 3, 3, 3, 3, 4, 5]
pyramid_1side_8__2side_7__3side_7_4side_2_5s1 = [5, 4, 3, 3, 3, 3, 3, 1, 3, 3, 3, 3, 3, 4, 5]
pyramid_1side_8__2side_8__3side_2_4side_2_5s1 = [5, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 4, 5]
pyramid_1side_8__2side_8__3side_3_4side_2_5s1 = [5, 4, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 4, 5]
pyramid_1side_8__2side_8__3side_4_4side_2_5s1 = [5, 4, 3, 3, 2, 2, 2, 2, 2, 2, 2, 3, 3, 4, 5]
pyramid_1side_8__2side_8__3side_5_4side_2_5s1 = [5, 4, 3, 3, 3, 2, 2, 2, 2, 2, 3, 3, 3, 4, 5]
pyramid_1side_8__2side_8__3side_6_4side_2_5s1 = [5, 4, 3, 3, 3, 3, 2, 2, 2, 3, 3, 3, 3, 4, 5]
pyramid_1side_8__2side_8__3side_7_4side_2_5s1 = [5, 4, 3, 3, 3, 3, 3, 2, 3, 3, 3, 3, 3, 4, 5]
pyramid_1side_8__2side_8__3side_8_4side_2_5s1 = [5, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 5]
pyramid_1side_8__2side_3__3side_3_4side_3_5s1 = [5, 4, 4, 1, 1, 1, 1, 1, 1, 1, 1, 1, 4, 4, 5]
pyramid_1side_8__2side_4__3side_3_4side_3_5s1 = [5, 4, 4, 2, 1, 1, 1, 1, 1, 1, 1, 2, 4, 4, 5]
pyramid_1side_8__2side_4__3side_4_4side_3_5s1 = [5, 4, 4, 3, 1, 1, 1, 1, 1, 1, 1, 3, 4, 4, 5]
pyramid_1side_8__2side_5__3side_3_4side_3_5s1 = [5, 4, 4, 2, 2, 1, 1, 1, 1, 1, 2, 2, 4, 4, 5]
pyramid_1side_8__2side_5__3side_4_4side_3_5s1 = [5, 4, 4, 3, 2, 1, 1, 1, 1, 1, 2, 3, 4, 4, 5]
pyramid_1side_8__2side_5__3side_5_4side_3_5s1 = [5, 4, 4, 3, 3, 1, 1, 1, 1, 1, 3, 3, 4, 4, 5]
pyramid_1side_8__2side_6__3side_3_4side_3_5s1 = [5, 4, 4, 2, 2, 2, 1, 1, 1, 2, 2, 2, 4, 4, 5]
pyramid_1side_8__2side_6__3side_4_4side_3_5s1 = [5, 4, 4, 3, 2, 2, 1, 1, 1, 2, 2, 3, 4, 4, 5]
pyramid_1side_8__2side_6__3side_5_4side_3_5s1 = [5, 4, 4, 3, 3, 2, 1, 1, 1, 2, 3, 3, 4, 4, 5]
pyramid_1side_8__2side_6__3side_6_4side_3_5s1 = [5, 4, 4, 3, 3, 3, 1, 1, 1, 3, 3, 3, 4, 4, 5]
pyramid_1side_8__2side_7__3side_3_4side_3_5s1 = [5, 4, 4, 2, 2, 2, 2, 1, 2, 2, 2, 2, 4, 4, 5]
pyramid_1side_8__2side_7__3side_4_4side_3_5s1 = [5, 4, 4, 3, 2, 2, 2, 1, 2, 2, 2, 3, 4, 4, 5]
pyramid_1side_8__2side_7__3side_5_4side_3_5s1 = [5, 4, 4, 3, 3, 2, 2, 1, 2, 2, 3, 3, 4, 4, 5]
pyramid_1side_8__2side_7__3side_6_4side_3_5s1 = [5, 4, 4, 3, 3, 3, 2, 1, 2, 3, 3, 3, 4, 4, 5]
pyramid_1side_8__2side_7__3side_7_4side_3_5s1 = [5, 4, 4, 3, 3, 3, 3, 1, 3, 3, 3, 3, 4, 4, 5]
pyramid_1side_8__2side_8__3side_3_4side_3_5s1 = [5, 4, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 4, 4, 5]
pyramid_1side_8__2side_8__3side_4_4side_3_5s1 = [5, 4, 4, 3, 2, 2, 2, 2, 2, 2, 2, 3, 4, 4, 5]
pyramid_1side_8__2side_8__3side_5_4side_3_5s1 = [5, 4, 4, 3, 3, 2, 2, 2, 2, 2, 3, 3, 4, 4, 5]
pyramid_1side_8__2side_8__3side_6_4side_3_5s1 = [5, 4, 4, 3, 3, 3, 2, 2, 2, 3, 3, 3, 4, 4, 5]
pyramid_1side_8__2side_8__3side_7_4side_3_5s1 = [5, 4, 4, 3, 3, 3, 3, 2, 3, 3, 3, 3, 4, 4, 5]
pyramid_1side_8__2side_8__3side_8_4side_3_5s1 = [5, 4, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 4, 5]
pyramid_1side_8__2side_4__3side_4_4side_4_5s1 = [5, 4, 4, 4, 1, 1, 1, 1, 1, 1, 1, 4, 4, 4, 5]
pyramid_1side_8__2side_5__3side_4_4side_4_5s1 = [5, 4, 4, 4, 2, 1, 1, 1, 1, 1, 2, 4, 4, 4, 5]
pyramid_1side_8__2side_5__3side_5_4side_4_5s1 = [5, 4, 4, 4, 3, 1, 1, 1, 1, 1, 3, 4, 4, 4, 5]
pyramid_1side_8__2side_6__3side_4_4side_4_5s1 = [5, 4, 4, 4, 2, 2, 1, 1, 1, 2, 2, 4, 4, 4, 5]
pyramid_1side_8__2side_6__3side_5_4side_4_5s1 = [5, 4, 4, 4, 3, 2, 1, 1, 1, 2, 3, 4, 4, 4, 5]
pyramid_1side_8__2side_6__3side_6_4side_4_5s1 = [5, 4, 4, 4, 3, 3, 1, 1, 1, 3, 3, 4, 4, 4, 5]
pyramid_1side_8__2side_7__3side_4_4side_4_5s1 = [5, 4, 4, 4, 2, 2, 2, 1, 2, 2, 2, 4, 4, 4, 5]
pyramid_1side_8__2side_7__3side_5_4side_4_5s1 = [5, 4, 4, 4, 3, 2, 2, 1, 2, 2, 3, 4, 4, 4, 5]
pyramid_1side_8__2side_7__3side_6_4side_4_5s1 = [5, 4, 4, 4, 3, 3, 2, 1, 2, 3, 3, 4, 4, 4, 5]
pyramid_1side_8__2side_7__3side_7_4side_4_5s1 = [5, 4, 4, 4, 3, 3, 3, 1, 3, 3, 3, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_4_4side_4_5s1 = [5, 4, 4, 4, 2, 2, 2, 2, 2, 2, 2, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_5_4side_4_5s1 = [5, 4, 4, 4, 3, 2, 2, 2, 2, 2, 3, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_6_4side_4_5s1 = [5, 4, 4, 4, 3, 3, 2, 2, 2, 3, 3, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_7_4side_4_5s1 = [5, 4, 4, 4, 3, 3, 3, 2, 3, 3, 3, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_8_4side_4_5s1 = [5, 4, 4, 4, 3, 3, 3, 3, 3, 3, 3, 4, 4, 4, 5]
pyramid_1side_8__2side_5__3side_5_4side_5_5s1 = [5, 4, 4, 4, 4, 1, 1, 1, 1, 1, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_6__3side_5_4side_5_5s1 = [5, 4, 4, 4, 4, 2, 1, 1, 1, 2, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_6__3side_6_4side_5_5s1 = [5, 4, 4, 4, 4, 3, 1, 1, 1, 3, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_7__3side_5_4side_5_5s1 = [5, 4, 4, 4, 4, 2, 2, 1, 2, 2, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_7__3side_6_4side_5_5s1 = [5, 4, 4, 4, 4, 3, 2, 1, 2, 3, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_7__3side_7_4side_5_5s1 = [5, 4, 4, 4, 4, 3, 3, 1, 3, 3, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_5_4side_5_5s1 = [5, 4, 4, 4, 4, 2, 2, 2, 2, 2, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_6_4side_5_5s1 = [5, 4, 4, 4, 4, 3, 2, 2, 2, 3, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_7_4side_5_5s1 = [5, 4, 4, 4, 4, 3, 3, 2, 3, 3, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_8_4side_5_5s1 = [5, 4, 4, 4, 4, 3, 3, 3, 3, 3, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_6__3side_6_4side_6_5s1 = [5, 4, 4, 4, 4, 4, 1, 1, 1, 4, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_7__3side_6_4side_6_5s1 = [5, 4, 4, 4, 4, 4, 2, 1, 2, 4, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_7__3side_7_4side_6_5s1 = [5, 4, 4, 4, 4, 4, 3, 1, 3, 4, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_6_4side_6_5s1 = [5, 4, 4, 4, 4, 4, 2, 2, 2, 4, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_7_4side_6_5s1 = [5, 4, 4, 4, 4, 4, 3, 2, 3, 4, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_8_4side_6_5s1 = [5, 4, 4, 4, 4, 4, 3, 3, 3, 4, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_7__3side_7_4side_7_5s1 = [5, 4, 4, 4, 4, 4, 4, 1, 4, 4, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_7_4side_7_5s1 = [5, 4, 4, 4, 4, 4, 4, 2, 4, 4, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_8_4side_7_5s1 = [5, 4, 4, 4, 4, 4, 4, 3, 4, 4, 4, 4, 4, 4, 5]
pyramid_1side_8__2side_8__3side_8_4side_8_5s1 = [5, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5]
##################################
### 5side2
##################################
# "1" 3 6 10 15 21 28 36 45 55
# side3 OK 1
pyramid_1side_2__2side_2__3side_2_4side_2_5s2 = [5, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 5]
# 1 "3" 6 10 15 21 28 36 45 55
# side3 OK 4
pyramid_1side_3__2side_2__3side_2_4side_2_5s2 = [5, 5, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 5, 5]
pyramid_1side_3__2side_3__3side_2_4side_2_5s2 = [5, 5, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 5, 5]
pyramid_1side_3__2side_3__3side_3_4side_2_5s2 = [5, 5, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 5, 5]
pyramid_1side_3__2side_3__3side_3_4side_3_5s2 = [5, 5, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 5, 5]
# 1 3 "6" 10 15 21 28 36 45 55
# side3 OK 10
pyramid_1side_4__2side_2__3side_2_4side_2_5s2 = [5, 5, 1, 1, 0, 0, 0, 0, 0, 0, 0, 1, 1, 5, 5]
pyramid_1side_4__2side_3__3side_2_4side_2_5s2 = [5, 5, 2, 1, 0, 0, 0, 0, 0, 0, 0, 1, 2, 5, 5]
pyramid_1side_4__2side_3__3side_3_4side_2_5s2 = [5, 5, 3, 1, 0, 0, 0, 0, 0, 0, 0, 1, 3, 5, 5]
pyramid_1side_4__2side_4__3side_2_4side_2_5s2 = [5, 5, 2, 2, 0, 0, 0, 0, 0, 0, 0, 2, 2, 5, 5]
pyramid_1side_4__2side_4__3side_3_4side_2_5s2 = [5, 5, 3, 2, 0, 0, 0, 0, 0, 0, 0, 2, 3, 5, 5]
pyramid_1side_4__2side_4__3side_4_4side_2_5s2 = [5, 5, 3, 3, 0, 0, 0, 0, 0, 0, 0, 3, 3, 5, 5]
pyramid_1side_4__2side_3__3side_3_4side_3_5s2 = [5, 5, 4, 1, 0, 0, 0, 0, 0, 0, 0, 1, 4, 5, 5]
pyramid_1side_4__2side_4__3side_3_4side_3_5s2 = [5, 5, 4, 2, 0, 0, 0, 0, 0, 0, 0, 2, 4, 5, 5]
pyramid_1side_4__2side_4__3side_4_4side_3_5s2 = [5, 5, 4, 3, 0, 0, 0, 0, 0, 0, 0, 3, 4, 5, 5]
pyramid_1side_4__2side_4__3side_4_4side_4_5s2 = [5, 5, 4, 4, 0, 0, 0, 0, 0, 0, 0, 4, 4, 5, 5]
# 1 3 6 "10" 15 21 28 36 45 55
# side4 OK 20
pyramid_1side_5__2side_2__3side_2_4side_2_5s2 = [5, 5, 1, 1, 1, 0, 0, 0, 0, 0, 1, 1, 1, 5, 5]
pyramid_1side_5__2side_3__3side_2_4side_2_5s2 = [5, 5, 2, 1, 1, 0, 0, 0, 0, 0, 1, 1, 2, 5, 5]
pyramid_1side_5__2side_3__3side_3_4side_2_5s2 = [5, 5, 3, 1, 1, 0, 0, 0, 0, 0, 1, 1, 3, 5, 5]
pyramid_1side_5__2side_4__3side_2_4side_2_5s2 = [5, 5, 2, 2, 1, 0, 0, 0, 0, 0, 1, 2, 2, 5, 5]
pyramid_1side_5__2side_4__3side_3_4side_2_5s2 = [5, 5, 3, 2, 1, 0, 0, 0, 0, 0, 1, 2, 3, 5, 5]
pyramid_1side_5__2side_4__3side_4_4side_2_5s2 = [5, 5, 3, 3, 1, 0, 0, 0, 0, 0, 1, 3, 3, 5, 5]
pyramid_1side_5__2side_5__3side_2_4side_2_5s2 = [5, 5, 2, 2, 2, 0, 0, 0, 0, 0, 2, 2, 2, 5, 5]
pyramid_1side_5__2side_5__3side_3_4side_2_5s2 = [5, 5, 3, 2, 2, 0, 0, 0, 0, 0, 2, 2, 3, 5, 5]
pyramid_1side_5__2side_5__3side_4_4side_2_5s2 = [5, 5, 3, 3, 2, 0, 0, 0, 0, 0, 2, 3, 3, 5, 5]
pyramid_1side_5__2side_5__3side_5_4side_2_5s2 = [5, 5, 3, 3, 3, 0, 0, 0, 0, 0, 3, 3, 3, 5, 5]
pyramid_1side_5__2side_3__3side_3_4side_3_5s2 = [5, 5, 4, 1, 1, 0, 0, 0, 0, 0, 1, 1, 4, 5, 5]
pyramid_1side_5__2side_4__3side_3_4side_3_5s2 = [5, 5, 4, 2, 1, 0, 0, 0, 0, 0, 1, 2, 4, 5, 5]
pyramid_1side_5__2side_4__3side_4_4side_3_5s2 = [5, 5, 4, 3, 1, 0, 0, 0, 0, 0, 1, 3, 4, 5, 5]
pyramid_1side_5__2side_5__3side_3_4side_3_5s2 = [5, 5, 4, 2, 2, 0, 0, 0, 0, 0, 2, 2, 4, 5, 5]
pyramid_1side_5__2side_5__3side_4_4side_3_5s2 = [5, 5, 4, 3, 2, 0, 0, 0, 0, 0, 2, 3, 4, 5, 5]
pyramid_1side_5__2side_5__3side_5_4side_3_5s2 = [5, 5, 4, 3, 3, 0, 0, 0, 0, 0, 3, 3, 4, 5, 5]
pyramid_1side_5__2side_4__3side_4_4side_4_5s2 = [5, 5, 4, 4, 1, 0, 0, 0, 0, 0, 1, 4, 4, 5, 5]
pyramid_1side_5__2side_5__3side_4_4side_4_5s2 = [5, 5, 4, 4, 2, 0, 0, 0, 0, 0, 2, 4, 4, 5, 5]
pyramid_1side_5__2side_5__3side_5_4side_4_5s2 = [5, 5, 4, 4, 3, 0, 0, 0, 0, 0, 3, 4, 4, 5, 5]
pyramid_1side_5__2side_5__3side_5_4side_5_5s2 = [5, 5, 4, 4, 4, 0, 0, 0, 0, 0, 4, 4, 4, 5, 5]
# 1 3 6 10 "15" 21 28 36 45 55
# side5 OK 35
pyramid_1side_6__2side_2__3side_2_4side_2_5s2 = [5, 5, 1, 1, 1, 1, 0, 0, 0, 1, 1, 1, 1, 5, 5]
pyramid_1side_6__2side_3__3side_2_4side_2_5s2 = [5, 5, 2, 1, 1, 1, 0, 0, 0, 1, 1, 1, 2, 5, 5]
pyramid_1side_6__2side_3__3side_3_4side_2_5s2 = [5, 5, 3, 1, 1, 1, 0, 0, 0, 1, 1, 1, 3, 5, 5]
pyramid_1side_6__2side_4__3side_2_4side_2_5s2 = [5, 5, 2, 2, 1, 1, 0, 0, 0, 1, 1, 2, 2, 5, 5]
pyramid_1side_6__2side_4__3side_3_4side_2_5s2 = [5, 5, 3, 2, 1, 1, 0, 0, 0, 1, 1, 2, 3, 5, 5]
pyramid_1side_6__2side_4__3side_4_4side_2_5s2 = [5, 5, 3, 3, 1, 1, 0, 0, 0, 1, 1, 3, 3, 5, 5]
pyramid_1side_6__2side_5__3side_2_4side_2_5s2 = [5, 5, 2, 2, 2, 1, 0, 0, 0, 1, 2, 2, 2, 5, 5]
pyramid_1side_6__2side_5__3side_3_4side_2_5s2 = [5, 5, 3, 2, 2, 1, 0, 0, 0, 1, 2, 2, 3, 5, 5]
pyramid_1side_6__2side_5__3side_4_4side_2_5s2 = [5, 5, 3, 3, 2, 1, 0, 0, 0, 1, 2, 3, 3, 5, 5]
pyramid_1side_6__2side_5__3side_5_4side_2_5s2 = [5, 5, 3, 3, 3, 1, 0, 0, 0, 1, 3, 3, 3, 5, 5]
pyramid_1side_6__2side_6__3side_2_4side_2_5s2 = [5, 5, 2, 2, 2, 2, 0, 0, 0, 2, 2, 2, 2, 5, 5]
pyramid_1side_6__2side_6__3side_3_4side_2_5s2 = [5, 5, 3, 2, 2, 2, 0, 0, 0, 2, 2, 2, 3, 5, 5]
pyramid_1side_6__2side_6__3side_4_4side_2_5s2 = [5, 5, 3, 3, 2, 2, 0, 0, 0, 2, 2, 3, 3, 5, 5]
pyramid_1side_6__2side_6__3side_5_4side_2_5s2 = [5, 5, 3, 3, 3, 2, 0, 0, 0, 2, 3, 3, 3, 5, 5]
pyramid_1side_6__2side_6__3side_6_4side_2_5s2 = [5, 5, 3, 3, 3, 3, 0, 0, 0, 3, 3, 3, 3, 5, 5]
pyramid_1side_6__2side_3__3side_3_4side_3_5s2 = [5, 5, 4, 1, 1, 1, 0, 0, 0, 1, 1, 1, 4, 5, 5]
pyramid_1side_6__2side_4__3side_3_4side_3_5s2 = [5, 5, 4, 2, 1, 1, 0, 0, 0, 1, 1, 2, 4, 5, 5]
pyramid_1side_6__2side_4__3side_4_4side_3_5s2 = [5, 5, 4, 3, 1, 1, 0, 0, 0, 1, 1, 3, 4, 5, 5]
pyramid_1side_6__2side_5__3side_3_4side_3_5s2 = [5, 5, 4, 2, 2, 1, 0, 0, 0, 1, 2, 2, 4, 5, 5]
pyramid_1side_6__2side_5__3side_4_4side_3_5s2 = [5, 5, 4, 3, 2, 1, 0, 0, 0, 1, 2, 3, 4, 5, 5]
pyramid_1side_6__2side_5__3side_5_4side_3_5s2 = [5, 5, 4, 3, 3, 1, 0, 0, 0, 1, 3, 3, 4, 5, 5]
pyramid_1side_6__2side_6__3side_3_4side_3_5s2 = [5, 5, 4, 2, 2, 2, 0, 0, 0, 2, 2, 2, 4, 5, 5]
pyramid_1side_6__2side_6__3side_4_4side_3_5s2 = [5, 5, 4, 3, 2, 2, 0, 0, 0, 2, 2, 3, 4, 5, 5]
pyramid_1side_6__2side_6__3side_5_4side_3_5s2 = [5, 5, 4, 3, 3, 2, 0, 0, 0, 2, 3, 3, 4, 5, 5]
pyramid_1side_6__2side_6__3side_6_4side_3_5s2 = [5, 5, 4, 3, 3, 3, 0, 0, 0, 3, 3, 3, 4, 5, 5]
pyramid_1side_6__2side_4__3side_4_4side_4_5s2 = [5, 5, 4, 4, 1, 1, 0, 0, 0, 1, 1, 4, 4, 5, 5]
pyramid_1side_6__2side_5__3side_4_4side_4_5s2 = [5, 5, 4, 4, 2, 1, 0, 0, 0, 1, 2, 4, 4, 5, 5]
pyramid_1side_6__2side_5__3side_5_4side_4_5s2 = [5, 5, 4, 4, 3, 1, 0, 0, 0, 1, 3, 4, 4, 5, 5]
pyramid_1side_6__2side_6__3side_4_4side_4_5s2 = [5, 5, 4, 4, 2, 2, 0, 0, 0, 2, 2, 4, 4, 5, 5]
pyramid_1side_6__2side_6__3side_5_4side_4_5s2 = [5, 5, 4, 4, 3, 2, 0, 0, 0, 2, 3, 4, 4, 5, 5]
pyramid_1side_6__2side_6__3side_6_4side_4_5s2 = [5, 5, 4, 4, 3, 3, 0, 0, 0, 3, 3, 4, 4, 5, 5]
pyramid_1side_6__2side_5__3side_5_4side_5_5s2 = [5, 5, 4, 4, 4, 1, 0, 0, 0, 1, 4, 4, 4, 5, 5]
pyramid_1side_6__2side_6__3side_5_4side_5_5s2 = [5, 5, 4, 4, 4, 2, 0, 0, 0, 2, 4, 4, 4, 5, 5]
pyramid_1side_6__2side_6__3side_6_4side_5_5s2 = [5, 5, 4, 4, 4, 3, 0, 0, 0, 3, 4, 4, 4, 5, 5]
pyramid_1side_6__2side_6__3side_6_4side_6_5s2 = [5, 5, 4, 4, 4, 4, 0, 0, 0, 4, 4, 4, 4, 5, 5]
# 1 3 6 10 15 "21" 28 36 45 55
# side6 OK 56
pyramid_1side_7__2side_2__3side_2_4side_2_5s2 = [5, 5, 1, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 5, 5]
pyramid_1side_7__2side_3__3side_2_4side_2_5s2 = [5, 5, 2, 1, 1, 1, 1, 0, 1, 1, 1, 1, 2, 5, 5]
pyramid_1side_7__2side_3__3side_3_4side_2_5s2 = [5, 5, 3, 1, 1, 1, 1, 0, 1, 1, 1, 1, 3, 5, 5]
pyramid_1side_7__2side_4__3side_2_4side_2_5s2 = [5, 5, 2, 2, 1, 1, 1, 0, 1, 1, 1, 2, 2, 5, 5]
pyramid_1side_7__2side_4__3side_3_4side_2_5s2 = [5, 5, 3, 2, 1, 1, 1, 0, 1, 1, 1, 2, 3, 5, 5]
pyramid_1side_7__2side_4__3side_4_4side_2_5s2 = [5, 5, 3, 3, 1, 1, 1, 0, 1, 1, 1, 3, 3, 5, 5]
pyramid_1side_7__2side_5__3side_2_4side_2_5s2 = [5, 5, 2, 2, 2, 1, 1, 0, 1, 1, 2, 2, 2, 5, 5]
pyramid_1side_7__2side_5__3side_3_4side_2_5s2 = [5, 5, 3, 2, 2, 1, 1, 0, 1, 1, 2, 2, 3, 5, 5]
pyramid_1side_7__2side_5__3side_4_4side_2_5s2 = [5, 5, 3, 3, 2, 1, 1, 0, 1, 1, 2, 3, 3, 5, 5]
pyramid_1side_7__2side_5__3side_5_4side_2_5s2 = [5, 5, 3, 3, 3, 1, 1, 0, 1, 1, 3, 3, 3, 5, 5]
pyramid_1side_7__2side_6__3side_2_4side_2_5s2 = [5, 5, 2, 2, 2, 2, 1, 0, 1, 2, 2, 2, 2, 5, 5]
pyramid_1side_7__2side_6__3side_3_4side_2_5s2 = [5, 5, 3, 2, 2, 2, 1, 0, 1, 2, 2, 2, 3, 5, 5]
pyramid_1side_7__2side_6__3side_4_4side_2_5s2 = [5, 5, 3, 3, 2, 2, 1, 0, 1, 2, 2, 3, 3, 5, 5]
pyramid_1side_7__2side_6__3side_5_4side_2_5s2 = [5, 5, 3, 3, 3, 2, 1, 0, 1, 2, 3, 3, 3, 5, 5]
pyramid_1side_7__2side_6__3side_6_4side_2_5s2 = [5, 5, 3, 3, 3, 3, 1, 0, 1, 3, 3, 3, 3, 5, 5]
pyramid_1side_7__2side_7__3side_2_4side_2_5s2 = [5, 5, 2, 2, 2, 2, 2, 0, 2, 2, 2, 2, 2, 5, 5]
pyramid_1side_7__2side_7__3side_3_4side_2_5s2 = [5, 5, 3, 2, 2, 2, 2, 0, 2, 2, 2, 2, 3, 5, 5]
pyramid_1side_7__2side_7__3side_4_4side_2_5s2 = [5, 5, 3, 3, 2, 2, 2, 0, 2, 2, 2, 3, 3, 5, 5]
pyramid_1side_7__2side_7__3side_5_4side_2_5s2 = [5, 5, 3, 3, 3, 2, 2, 0, 2, 2, 3, 3, 3, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_2_5s2 = [5, 5, 3, 3, 3, 3, 2, 0, 2, 3, 3, 3, 3, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_2_5s2 = [5, 5, 3, 3, 3, 3, 3, 0, 3, 3, 3, 3, 3, 5, 5]
pyramid_1side_7__2side_3__3side_3_4side_3_5s2 = [5, 5, 4, 1, 1, 1, 1, 0, 1, 1, 1, 1, 4, 5, 5]
pyramid_1side_7__2side_4__3side_3_4side_3_5s2 = [5, 5, 4, 2, 1, 1, 1, 0, 1, 1, 1, 2, 4, 5, 5]
pyramid_1side_7__2side_4__3side_4_4side_3_5s2 = [5, 5, 4, 3, 1, 1, 1, 0, 1, 1, 1, 3, 4, 5, 5]
pyramid_1side_7__2side_5__3side_3_4side_3_5s2 = [5, 5, 4, 2, 2, 1, 1, 0, 1, 1, 2, 2, 4, 5, 5]
pyramid_1side_7__2side_5__3side_4_4side_3_5s2 = [5, 5, 4, 3, 2, 1, 1, 0, 1, 1, 2, 3, 4, 5, 5]
pyramid_1side_7__2side_5__3side_5_4side_3_5s2 = [5, 5, 4, 3, 3, 1, 1, 0, 1, 1, 3, 3, 4, 5, 5]
pyramid_1side_7__2side_6__3side_3_4side_3_5s2 = [5, 5, 4, 2, 2, 2, 1, 0, 1, 2, 2, 2, 4, 5, 5]
pyramid_1side_7__2side_6__3side_4_4side_3_5s2 = [5, 5, 4, 3, 2, 2, 1, 0, 1, 2, 2, 3, 4, 5, 5]
pyramid_1side_7__2side_6__3side_5_4side_3_5s2 = [5, 5, 4, 3, 3, 2, 1, 0, 1, 2, 3, 3, 4, 5, 5]
pyramid_1side_7__2side_6__3side_6_4side_3_5s2 = [5, 5, 4, 3, 3, 3, 1, 0, 1, 3, 3, 3, 4, 5, 5]
pyramid_1side_7__2side_7__3side_3_4side_3_5s2 = [5, 5, 4, 2, 2, 2, 2, 0, 2, 2, 2, 2, 4, 5, 5]
pyramid_1side_7__2side_7__3side_4_4side_3_5s2 = [5, 5, 4, 3, 2, 2, 2, 0, 2, 2, 2, 3, 4, 5, 5]
pyramid_1side_7__2side_7__3side_5_4side_3_5s2 = [5, 5, 4, 3, 3, 2, 2, 0, 2, 2, 3, 3, 4, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_3_5s2 = [5, 5, 4, 3, 3, 3, 2, 0, 2, 3, 3, 3, 4, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_3_5s2 = [5, 5, 4, 3, 3, 3, 3, 0, 3, 3, 3, 3, 4, 5, 5]
pyramid_1side_7__2side_4__3side_4_4side_4_5s2 = [5, 5, 4, 4, 1, 1, 1, 0, 1, 1, 1, 4, 4, 5, 5]
pyramid_1side_7__2side_5__3side_4_4side_4_5s2 = [5, 5, 4, 4, 2, 1, 1, 0, 1, 1, 2, 4, 4, 5, 5]
pyramid_1side_7__2side_5__3side_5_4side_4_5s2 = [5, 5, 4, 4, 3, 1, 1, 0, 1, 1, 3, 4, 4, 5, 5]
pyramid_1side_7__2side_6__3side_4_4side_4_5s2 = [5, 5, 4, 4, 2, 2, 1, 0, 1, 2, 2, 4, 4, 5, 5]
pyramid_1side_7__2side_6__3side_5_4side_4_5s2 = [5, 5, 4, 4, 3, 2, 1, 0, 1, 2, 3, 4, 4, 5, 5]
pyramid_1side_7__2side_6__3side_6_4side_4_5s2 = [5, 5, 4, 4, 3, 3, 1, 0, 1, 3, 3, 4, 4, 5, 5]
pyramid_1side_7__2side_7__3side_4_4side_4_5s2 = [5, 5, 4, 4, 2, 2, 2, 0, 2, 2, 2, 4, 4, 5, 5]
pyramid_1side_7__2side_7__3side_5_4side_4_5s2 = [5, 5, 4, 4, 3, 2, 2, 0, 2, 2, 3, 4, 4, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_4_5s2 = [5, 5, 4, 4, 3, 3, 2, 0, 2, 3, 3, 4, 4, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_4_5s2 = [5, 5, 4, 4, 3, 3, 3, 0, 3, 3, 3, 4, 4, 5, 5]
pyramid_1side_7__2side_5__3side_5_4side_5_5s2 = [5, 5, 4, 4, 4, 1, 1, 0, 1, 1, 4, 4, 4, 5, 5]
pyramid_1side_7__2side_6__3side_5_4side_5_5s2 = [5, 5, 4, 4, 4, 2, 1, 0, 1, 2, 4, 4, 4, 5, 5]
pyramid_1side_7__2side_6__3side_6_4side_5_5s2 = [5, 5, 4, 4, 4, 3, 1, 0, 1, 3, 4, 4, 4, 5, 5]
pyramid_1side_7__2side_7__3side_5_4side_5_5s2 = [5, 5, 4, 4, 4, 2, 2, 0, 2, 2, 4, 4, 4, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_5_5s2 = [5, 5, 4, 4, 4, 3, 2, 0, 2, 3, 4, 4, 4, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_5_5s2 = [5, 5, 4, 4, 4, 3, 3, 0, 3, 3, 4, 4, 4, 5, 5]
pyramid_1side_7__2side_6__3side_6_4side_6_5s2 = [5, 5, 4, 4, 4, 4, 1, 0, 1, 4, 4, 4, 4, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_6_5s2 = [5, 5, 4, 4, 4, 4, 2, 0, 2, 4, 4, 4, 4, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_6_5s2 = [5, 5, 4, 4, 4, 4, 3, 0, 3, 4, 4, 4, 4, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_7_5s2 = [5, 5, 4, 4, 4, 4, 4, 0, 4, 4, 4, 4, 4, 5, 5]
# 1 3 6 10 15 21 "28" 36 45 55
# side7 OK 84
pyramid_1side_8__2side_2__3side_2_4side_2_5s2 = [5, 5, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 5]
pyramid_1side_8__2side_3__3side_2_4side_2_5s2 = [5, 5, 2, 1, 1, 1, 1, 1, 1, 1, 1, 1, 2, 5, 5]
pyramid_1side_8__2side_3__3side_3_4side_2_5s2 = [5, 5, 3, 1, 1, 1, 1, 1, 1, 1, 1, 1, 3, 5, 5]
pyramid_1side_8__2side_4__3side_2_4side_2_5s2 = [5, 5, 2, 2, 1, 1, 1, 1, 1, 1, 1, 2, 2, 5, 5]
pyramid_1side_8__2side_4__3side_3_4side_2_5s2 = [5, 5, 3, 2, 1, 1, 1, 1, 1, 1, 1, 2, 3, 5, 5]
pyramid_1side_8__2side_4__3side_4_4side_2_5s2 = [5, 5, 3, 3, 1, 1, 1, 1, 1, 1, 1, 3, 3, 5, 5]
pyramid_1side_8__2side_5__3side_2_4side_2_5s2 = [5, 5, 2, 2, 2, 1, 1, 1, 1, 1, 2, 2, 2, 5, 5]
pyramid_1side_8__2side_5__3side_3_4side_2_5s2 = [5, 5, 3, 2, 2, 1, 1, 1, 1, 1, 2, 2, 3, 5, 5]
pyramid_1side_8__2side_5__3side_4_4side_2_5s2 = [5, 5, 3, 3, 2, 1, 1, 1, 1, 1, 2, 3, 3, 5, 5]
pyramid_1side_8__2side_5__3side_5_4side_2_5s2 = [5, 5, 3, 3, 3, 1, 1, 1, 1, 1, 3, 3, 3, 5, 5]
pyramid_1side_8__2side_6__3side_2_4side_2_5s2 = [5, 5, 2, 2, 2, 2, 1, 1, 1, 2, 2, 2, 2, 5, 5]
pyramid_1side_8__2side_6__3side_3_4side_2_5s2 = [5, 5, 3, 2, 2, 2, 1, 1, 1, 2, 2, 2, 3, 5, 5]
pyramid_1side_8__2side_6__3side_4_4side_2_5s2 = [5, 5, 3, 3, 2, 2, 1, 1, 1, 2, 2, 3, 3, 5, 5]
pyramid_1side_8__2side_6__3side_5_4side_2_5s2 = [5, 5, 3, 3, 3, 2, 1, 1, 1, 2, 3, 3, 3, 5, 5]
pyramid_1side_8__2side_6__3side_6_4side_2_5s2 = [5, 5, 3, 3, 3, 3, 1, 1, 1, 3, 3, 3, 3, 5, 5]
pyramid_1side_8__2side_7__3side_2_4side_2_5s2 = [5, 5, 2, 2, 2, 2, 2, 1, 2, 2, 2, 2, 2, 5, 5]
pyramid_1side_8__2side_7__3side_3_4side_2_5s2 = [5, 5, 3, 2, 2, 2, 2, 1, 2, 2, 2, 2, 3, 5, 5]
pyramid_1side_8__2side_7__3side_4_4side_2_5s2 = [5, 5, 3, 3, 2, 2, 2, 1, 2, 2, 2, 3, 3, 5, 5]
pyramid_1side_8__2side_7__3side_5_4side_2_5s2 = [5, 5, 3, 3, 3, 2, 2, 1, 2, 2, 3, 3, 3, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_2_5s2 = [5, 5, 3, 3, 3, 3, 2, 1, 2, 3, 3, 3, 3, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_2_5s2 = [5, 5, 3, 3, 3, 3, 3, 1, 3, 3, 3, 3, 3, 5, 5]
pyramid_1side_8__2side_8__3side_2_4side_2_5s2 = [5, 5, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 5, 5]
pyramid_1side_8__2side_8__3side_3_4side_2_5s2 = [5, 5, 3, 2, 2, 2, 2, 2, 2, 2, 2, 2, 3, 5, 5]
pyramid_1side_8__2side_8__3side_4_4side_2_5s2 = [5, 5, 3, 3, 2, 2, 2, 2, 2, 2, 2, 3, 3, 5, 5]
pyramid_1side_8__2side_8__3side_5_4side_2_5s2 = [5, 5, 3, 3, 3, 2, 2, 2, 2, 2, 3, 3, 3, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_2_5s2 = [5, 5, 3, 3, 3, 3, 2, 2, 2, 3, 3, 3, 3, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_2_5s2 = [5, 5, 3, 3, 3, 3, 3, 2, 3, 3, 3, 3, 3, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_2_5s2 = [5, 5, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 5]
pyramid_1side_8__2side_3__3side_3_4side_3_5s2 = [5, 5, 4, 1, 1, 1, 1, 1, 1, 1, 1, 1, 4, 5, 5]
pyramid_1side_8__2side_4__3side_3_4side_3_5s2 = [5, 5, 4, 2, 1, 1, 1, 1, 1, 1, 1, 2, 4, 5, 5]
pyramid_1side_8__2side_4__3side_4_4side_3_5s2 = [5, 5, 4, 3, 1, 1, 1, 1, 1, 1, 1, 3, 4, 5, 5]
pyramid_1side_8__2side_5__3side_3_4side_3_5s2 = [5, 5, 4, 2, 2, 1, 1, 1, 1, 1, 2, 2, 4, 5, 5]
pyramid_1side_8__2side_5__3side_4_4side_3_5s2 = [5, 5, 4, 3, 2, 1, 1, 1, 1, 1, 2, 3, 4, 5, 5]
pyramid_1side_8__2side_5__3side_5_4side_3_5s2 = [5, 5, 4, 3, 3, 1, 1, 1, 1, 1, 3, 3, 4, 5, 5]
pyramid_1side_8__2side_6__3side_3_4side_3_5s2 = [5, 5, 4, 2, 2, 2, 1, 1, 1, 2, 2, 2, 4, 5, 5]
pyramid_1side_8__2side_6__3side_4_4side_3_5s2 = [5, 5, 4, 3, 2, 2, 1, 1, 1, 2, 2, 3, 4, 5, 5]
pyramid_1side_8__2side_6__3side_5_4side_3_5s2 = [5, 5, 4, 3, 3, 2, 1, 1, 1, 2, 3, 3, 4, 5, 5]
pyramid_1side_8__2side_6__3side_6_4side_3_5s2 = [5, 5, 4, 3, 3, 3, 1, 1, 1, 3, 3, 3, 4, 5, 5]
pyramid_1side_8__2side_7__3side_3_4side_3_5s2 = [5, 5, 4, 2, 2, 2, 2, 1, 2, 2, 2, 2, 4, 5, 5]
pyramid_1side_8__2side_7__3side_4_4side_3_5s2 = [5, 5, 4, 3, 2, 2, 2, 1, 2, 2, 2, 3, 4, 5, 5]
pyramid_1side_8__2side_7__3side_5_4side_3_5s2 = [5, 5, 4, 3, 3, 2, 2, 1, 2, 2, 3, 3, 4, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_3_5s2 = [5, 5, 4, 3, 3, 3, 2, 1, 2, 3, 3, 3, 4, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_3_5s2 = [5, 5, 4, 3, 3, 3, 3, 1, 3, 3, 3, 3, 4, 5, 5]
pyramid_1side_8__2side_8__3side_3_4side_3_5s2 = [5, 5, 4, 2, 2, 2, 2, 2, 2, 2, 2, 2, 4, 5, 5]
pyramid_1side_8__2side_8__3side_4_4side_3_5s2 = [5, 5, 4, 3, 2, 2, 2, 2, 2, 2, 2, 3, 4, 5, 5]
pyramid_1side_8__2side_8__3side_5_4side_3_5s2 = [5, 5, 4, 3, 3, 2, 2, 2, 2, 2, 3, 3, 4, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_3_5s2 = [5, 5, 4, 3, 3, 3, 2, 2, 2, 3, 3, 3, 4, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_3_5s2 = [5, 5, 4, 3, 3, 3, 3, 2, 3, 3, 3, 3, 4, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_3_5s2 = [5, 5, 4, 3, 3, 3, 3, 3, 3, 3, 3, 3, 4, 5, 5]
pyramid_1side_8__2side_4__3side_4_4side_4_5s2 = [5, 5, 4, 4, 1, 1, 1, 1, 1, 1, 1, 4, 4, 5, 5]
pyramid_1side_8__2side_5__3side_4_4side_4_5s2 = [5, 5, 4, 4, 2, 1, 1, 1, 1, 1, 2, 4, 4, 5, 5]
pyramid_1side_8__2side_5__3side_5_4side_4_5s2 = [5, 5, 4, 4, 3, 1, 1, 1, 1, 1, 3, 4, 4, 5, 5]
pyramid_1side_8__2side_6__3side_4_4side_4_5s2 = [5, 5, 4, 4, 2, 2, 1, 1, 1, 2, 2, 4, 4, 5, 5]
pyramid_1side_8__2side_6__3side_5_4side_4_5s2 = [5, 5, 4, 4, 3, 2, 1, 1, 1, 2, 3, 4, 4, 5, 5]
pyramid_1side_8__2side_6__3side_6_4side_4_5s2 = [5, 5, 4, 4, 3, 3, 1, 1, 1, 3, 3, 4, 4, 5, 5]
pyramid_1side_8__2side_7__3side_4_4side_4_5s2 = [5, 5, 4, 4, 2, 2, 2, 1, 2, 2, 2, 4, 4, 5, 5]
pyramid_1side_8__2side_7__3side_5_4side_4_5s2 = [5, 5, 4, 4, 3, 2, 2, 1, 2, 2, 3, 4, 4, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_4_5s2 = [5, 5, 4, 4, 3, 3, 2, 1, 2, 3, 3, 4, 4, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_4_5s2 = [5, 5, 4, 4, 3, 3, 3, 1, 3, 3, 3, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_4_4side_4_5s2 = [5, 5, 4, 4, 2, 2, 2, 2, 2, 2, 2, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_5_4side_4_5s2 = [5, 5, 4, 4, 3, 2, 2, 2, 2, 2, 3, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_4_5s2 = [5, 5, 4, 4, 3, 3, 2, 2, 2, 3, 3, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_4_5s2 = [5, 5, 4, 4, 3, 3, 3, 2, 3, 3, 3, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_4_5s2 = [5, 5, 4, 4, 3, 3, 3, 3, 3, 3, 3, 4, 4, 5, 5]
pyramid_1side_8__2side_5__3side_5_4side_5_5s2 = [5, 5, 4, 4, 4, 1, 1, 1, 1, 1, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_6__3side_5_4side_5_5s2 = [5, 5, 4, 4, 4, 2, 1, 1, 1, 2, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_6__3side_6_4side_5_5s2 = [5, 5, 4, 4, 4, 3, 1, 1, 1, 3, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_7__3side_5_4side_5_5s2 = [5, 5, 4, 4, 4, 2, 2, 1, 2, 2, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_5_5s2 = [5, 5, 4, 4, 4, 3, 2, 1, 2, 3, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_5_5s2 = [5, 5, 4, 4, 4, 3, 3, 1, 3, 3, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_5_4side_5_5s2 = [5, 5, 4, 4, 4, 2, 2, 2, 2, 2, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_5_5s2 = [5, 5, 4, 4, 4, 3, 2, 2, 2, 3, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_5_5s2 = [5, 5, 4, 4, 4, 3, 3, 2, 3, 3, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_5_5s2 = [5, 5, 4, 4, 4, 3, 3, 3, 3, 3, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_6__3side_6_4side_6_5s2 = [5, 5, 4, 4, 4, 4, 1, 1, 1, 4, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_6_5s2 = [5, 5, 4, 4, 4, 4, 2, 1, 2, 4, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_6_5s2 = [5, 5, 4, 4, 4, 4, 3, 1, 3, 4, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_6_5s2 = [5, 5, 4, 4, 4, 4, 2, 2, 2, 4, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_6_5s2 = [5, 5, 4, 4, 4, 4, 3, 2, 3, 4, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_6_5s2 = [5, 5, 4, 4, 4, 4, 3, 3, 3, 4, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_7_5s2 = [5, 5, 4, 4, 4, 4, 4, 1, 4, 4, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_7_5s2 = [5, 5, 4, 4, 4, 4, 4, 2, 4, 4, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_7_5s2 = [5, 5, 4, 4, 4, 4, 4, 3, 4, 4, 4, 4, 4, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_8_5s2 = [5, 5, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5]
##################################
### 5side3
##################################
# "1" 3 6 10 15 21 28 36 45 55
# side3 OK 1
pyramid_1side_3__2side_3__3side_3_4side_3_5s3 = [5, 5, 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 5, 5, 5]
# 1 "3" 6 10 15 21 28 36 45 55
# side3 OK 4
pyramid_1side_4__2side_3__3side_3_4side_3_5s3 = [5, 5, 5, 1, 0, 0, 0, 0, 0, 0, 0, 1, 5, 5, 5]
pyramid_1side_4__2side_4__3side_3_4side_3_5s3 = [5, 5, 5, 2, 0, 0, 0, 0, 0, 0, 0, 2, 5, 5, 5]
pyramid_1side_4__2side_4__3side_4_4side_3_5s3 = [5, 5, 5, 3, 0, 0, 0, 0, 0, 0, 0, 3, 5, 5, 5]
pyramid_1side_4__2side_4__3side_4_4side_4_5s3 = [5, 5, 5, 4, 0, 0, 0, 0, 0, 0, 0, 4, 5, 5, 5]
# 1 3 "6" 10 15 21 28 36 45 55
# side3 OK 10
pyramid_1side_5__2side_3__3side_3_4side_3_5s3 = [5, 5, 5, 1, 1, 0, 0, 0, 0, 0, 1, 1, 5, 5, 5]
pyramid_1side_5__2side_4__3side_3_4side_3_5s3 = [5, 5, 5, 2, 1, 0, 0, 0, 0, 0, 1, 2, 5, 5, 5]
pyramid_1side_5__2side_4__3side_4_4side_3_5s3 = [5, 5, 5, 3, 1, 0, 0, 0, 0, 0, 1, 3, 5, 5, 5]
pyramid_1side_5__2side_5__3side_3_4side_3_5s3 = [5, 5, 5, 2, 2, 0, 0, 0, 0, 0, 2, 2, 5, 5, 5]
pyramid_1side_5__2side_5__3side_4_4side_3_5s3 = [5, 5, 5, 3, 2, 0, 0, 0, 0, 0, 2, 3, 5, 5, 5]
pyramid_1side_5__2side_5__3side_5_4side_3_5s3 = [5, 5, 5, 3, 3, 0, 0, 0, 0, 0, 3, 3, 5, 5, 5]
pyramid_1side_5__2side_4__3side_4_4side_4_5s3 = [5, 5, 5, 4, 1, 0, 0, 0, 0, 0, 1, 4, 5, 5, 5]
pyramid_1side_5__2side_5__3side_4_4side_4_5s3 = [5, 5, 5, 4, 2, 0, 0, 0, 0, 0, 2, 4, 5, 5, 5]
pyramid_1side_5__2side_5__3side_5_4side_4_5s3 = [5, 5, 5, 4, 3, 0, 0, 0, 0, 0, 3, 4, 5, 5, 5]
pyramid_1side_5__2side_5__3side_5_4side_5_5s3 = [5, 5, 5, 4, 4, 0, 0, 0, 0, 0, 4, 4, 5, 5, 5]
# 1 3 6 "10" 15 21 28 36 45 55
# side4 OK 20
pyramid_1side_6__2side_3__3side_3_4side_3_5s3 = [5, 5, 5, 1, 1, 1, 0, 0, 0, 1, 1, 1, 5, 5, 5]
pyramid_1side_6__2side_4__3side_3_4side_3_5s3 = [5, 5, 5, 2, 1, 1, 0, 0, 0, 1, 1, 2, 5, 5, 5]
pyramid_1side_6__2side_4__3side_4_4side_3_5s3 = [5, 5, 5, 3, 1, 1, 0, 0, 0, 1, 1, 3, 5, 5, 5]
pyramid_1side_6__2side_5__3side_3_4side_3_5s3 = [5, 5, 5, 2, 2, 1, 0, 0, 0, 1, 2, 2, 5, 5, 5]
pyramid_1side_6__2side_5__3side_4_4side_3_5s3 = [5, 5, 5, 3, 2, 1, 0, 0, 0, 1, 2, 3, 5, 5, 5]
pyramid_1side_6__2side_5__3side_5_4side_3_5s3 = [5, 5, 5, 3, 3, 1, 0, 0, 0, 1, 3, 3, 5, 5, 5]
pyramid_1side_6__2side_6__3side_3_4side_3_5s3 = [5, 5, 5, 2, 2, 2, 0, 0, 0, 2, 2, 2, 5, 5, 5]
pyramid_1side_6__2side_6__3side_4_4side_3_5s3 = [5, 5, 5, 3, 2, 2, 0, 0, 0, 2, 2, 3, 5, 5, 5]
pyramid_1side_6__2side_6__3side_5_4side_3_5s3 = [5, 5, 5, 3, 3, 2, 0, 0, 0, 2, 3, 3, 5, 5, 5]
pyramid_1side_6__2side_6__3side_6_4side_3_5s3 = [5, 5, 5, 3, 3, 3, 0, 0, 0, 3, 3, 3, 5, 5, 5]
pyramid_1side_6__2side_4__3side_4_4side_4_5s3 = [5, 5, 5, 4, 1, 1, 0, 0, 0, 1, 1, 4, 5, 5, 5]
pyramid_1side_6__2side_5__3side_4_4side_4_5s3 = [5, 5, 5, 4, 2, 1, 0, 0, 0, 1, 2, 4, 5, 5, 5]
pyramid_1side_6__2side_5__3side_5_4side_4_5s3 = [5, 5, 5, 4, 3, 1, 0, 0, 0, 1, 3, 4, 5, 5, 5]
pyramid_1side_6__2side_6__3side_4_4side_4_5s3 = [5, 5, 5, 4, 2, 2, 0, 0, 0, 2, 2, 4, 5, 5, 5]
pyramid_1side_6__2side_6__3side_5_4side_4_5s3 = [5, 5, 5, 4, 3, 2, 0, 0, 0, 2, 3, 4, 5, 5, 5]
pyramid_1side_6__2side_6__3side_6_4side_4_5s3 = [5, 5, 5, 4, 3, 3, 0, 0, 0, 3, 3, 4, 5, 5, 5]
pyramid_1side_6__2side_5__3side_5_4side_5_5s3 = [5, 5, 5, 4, 4, 1, 0, 0, 0, 1, 4, 4, 5, 5, 5]
pyramid_1side_6__2side_6__3side_5_4side_5_5s3 = [5, 5, 5, 4, 4, 2, 0, 0, 0, 2, 4, 4, 5, 5, 5]
pyramid_1side_6__2side_6__3side_6_4side_5_5s3 = [5, 5, 5, 4, 4, 3, 0, 0, 0, 3, 4, 4, 5, 5, 5]
pyramid_1side_6__2side_6__3side_6_4side_6_5s3 = [5, 5, 5, 4, 4, 4, 0, 0, 0, 4, 4, 4, 5, 5, 5]
# 1 3 6 10 "15" 21 28 36 45 55
# side5 OK 35
pyramid_1side_7__2side_3__3side_3_4side_3_5s3 = [5, 5, 5, 1, 1, 1, 1, 0, 1, 1, 1, 1, 5, 5, 5]
pyramid_1side_7__2side_4__3side_3_4side_3_5s3 = [5, 5, 5, 2, 1, 1, 1, 0, 1, 1, 1, 2, 5, 5, 5]
pyramid_1side_7__2side_4__3side_4_4side_3_5s3 = [5, 5, 5, 3, 1, 1, 1, 0, 1, 1, 1, 3, 5, 5, 5]
pyramid_1side_7__2side_5__3side_3_4side_3_5s3 = [5, 5, 5, 2, 2, 1, 1, 0, 1, 1, 2, 2, 5, 5, 5]
pyramid_1side_7__2side_5__3side_4_4side_3_5s3 = [5, 5, 5, 3, 2, 1, 1, 0, 1, 1, 2, 3, 5, 5, 5]
pyramid_1side_7__2side_5__3side_5_4side_3_5s3 = [5, 5, 5, 3, 3, 1, 1, 0, 1, 1, 3, 3, 5, 5, 5]
pyramid_1side_7__2side_6__3side_3_4side_3_5s3 = [5, 5, 5, 2, 2, 2, 1, 0, 1, 2, 2, 2, 5, 5, 5]
pyramid_1side_7__2side_6__3side_4_4side_3_5s3 = [5, 5, 5, 3, 2, 2, 1, 0, 1, 2, 2, 3, 5, 5, 5]
pyramid_1side_7__2side_6__3side_5_4side_3_5s3 = [5, 5, 5, 3, 3, 2, 1, 0, 1, 2, 3, 3, 5, 5, 5]
pyramid_1side_7__2side_6__3side_6_4side_3_5s3 = [5, 5, 5, 3, 3, 3, 1, 0, 1, 3, 3, 3, 5, 5, 5]
pyramid_1side_7__2side_7__3side_3_4side_3_5s3 = [5, 5, 5, 2, 2, 2, 2, 0, 2, 2, 2, 2, 5, 5, 5]
pyramid_1side_7__2side_7__3side_4_4side_3_5s3 = [5, 5, 5, 3, 2, 2, 2, 0, 2, 2, 2, 3, 5, 5, 5]
pyramid_1side_7__2side_7__3side_5_4side_3_5s3 = [5, 5, 5, 3, 3, 2, 2, 0, 2, 2, 3, 3, 5, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_3_5s3 = [5, 5, 5, 3, 3, 3, 2, 0, 2, 3, 3, 3, 5, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_3_5s3 = [5, 5, 5, 3, 3, 3, 3, 0, 3, 3, 3, 3, 5, 5, 5]
pyramid_1side_7__2side_4__3side_4_4side_4_5s3 = [5, 5, 5, 4, 1, 1, 1, 0, 1, 1, 1, 4, 5, 5, 5]
pyramid_1side_7__2side_5__3side_4_4side_4_5s3 = [5, 5, 5, 4, 2, 1, 1, 0, 1, 1, 2, 4, 5, 5, 5]
pyramid_1side_7__2side_5__3side_5_4side_4_5s3 = [5, 5, 5, 4, 3, 1, 1, 0, 1, 1, 3, 4, 5, 5, 5]
pyramid_1side_7__2side_6__3side_4_4side_4_5s3 = [5, 5, 5, 4, 2, 2, 1, 0, 1, 2, 2, 4, 5, 5, 5]
pyramid_1side_7__2side_6__3side_5_4side_4_5s3 = [5, 5, 5, 4, 3, 2, 1, 0, 1, 2, 3, 4, 5, 5, 5]
pyramid_1side_7__2side_6__3side_6_4side_4_5s3 = [5, 5, 5, 4, 3, 3, 1, 0, 1, 3, 3, 4, 5, 5, 5]
pyramid_1side_7__2side_7__3side_4_4side_4_5s3 = [5, 5, 5, 4, 2, 2, 2, 0, 2, 2, 2, 4, 5, 5, 5]
pyramid_1side_7__2side_7__3side_5_4side_4_5s3 = [5, 5, 5, 4, 3, 2, 2, 0, 2, 2, 3, 4, 5, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_4_5s3 = [5, 5, 5, 4, 3, 3, 2, 0, 2, 3, 3, 4, 5, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_4_5s3 = [5, 5, 5, 4, 3, 3, 3, 0, 3, 3, 3, 4, 5, 5, 5]
pyramid_1side_7__2side_5__3side_5_4side_5_5s3 = [5, 5, 5, 4, 4, 1, 1, 0, 1, 1, 4, 4, 5, 5, 5]
pyramid_1side_7__2side_6__3side_5_4side_5_5s3 = [5, 5, 5, 4, 4, 2, 1, 0, 1, 2, 4, 4, 5, 5, 5]
pyramid_1side_7__2side_6__3side_6_4side_5_5s3 = [5, 5, 5, 4, 4, 3, 1, 0, 1, 3, 4, 4, 5, 5, 5]
pyramid_1side_7__2side_7__3side_5_4side_5_5s3 = [5, 5, 5, 4, 4, 2, 2, 0, 2, 2, 4, 4, 5, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_5_5s3 = [5, 5, 5, 4, 4, 3, 2, 0, 2, 3, 4, 4, 5, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_5_5s3 = [5, 5, 5, 4, 4, 3, 3, 0, 3, 3, 4, 4, 5, 5, 5]
pyramid_1side_7__2side_6__3side_6_4side_6_5s3 = [5, 5, 5, 4, 4, 4, 1, 0, 1, 4, 4, 4, 5, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_6_5s3 = [5, 5, 5, 4, 4, 4, 2, 0, 2, 4, 4, 4, 5, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_6_5s3 = [5, 5, 5, 4, 4, 4, 3, 0, 3, 4, 4, 4, 5, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_7_5s3 = [5, 5, 5, 4, 4, 4, 4, 0, 4, 4, 4, 4, 5, 5, 5]
# 1 3 6 10 15 "21" 28 36 45 55
# side6 OK 56
pyramid_1side_8__2side_3__3side_3_4side_3_5s3 = [5, 5, 5, 1, 1, 1, 1, 1, 1, 1, 1, 1, 5, 5, 5]
pyramid_1side_8__2side_4__3side_3_4side_3_5s3 = [5, 5, 5, 2, 1, 1, 1, 1, 1, 1, 1, 2, 5, 5, 5]
pyramid_1side_8__2side_4__3side_4_4side_3_5s3 = [5, 5, 5, 3, 1, 1, 1, 1, 1, 1, 1, 3, 5, 5, 5]
pyramid_1side_8__2side_5__3side_3_4side_3_5s3 = [5, 5, 5, 2, 2, 1, 1, 1, 1, 1, 2, 2, 5, 5, 5]
pyramid_1side_8__2side_5__3side_4_4side_3_5s3 = [5, 5, 5, 3, 2, 1, 1, 1, 1, 1, 2, 3, 5, 5, 5]
pyramid_1side_8__2side_5__3side_5_4side_3_5s3 = [5, 5, 5, 3, 3, 1, 1, 1, 1, 1, 3, 3, 5, 5, 5]
pyramid_1side_8__2side_6__3side_3_4side_3_5s3 = [5, 5, 5, 2, 2, 2, 1, 1, 1, 2, 2, 2, 5, 5, 5]
pyramid_1side_8__2side_6__3side_4_4side_3_5s3 = [5, 5, 5, 3, 2, 2, 1, 1, 1, 2, 2, 3, 5, 5, 5]
pyramid_1side_8__2side_6__3side_5_4side_3_5s3 = [5, 5, 5, 3, 3, 2, 1, 1, 1, 2, 3, 3, 5, 5, 5]
pyramid_1side_8__2side_6__3side_6_4side_3_5s3 = [5, 5, 5, 3, 3, 3, 1, 1, 1, 3, 3, 3, 5, 5, 5]
pyramid_1side_8__2side_7__3side_3_4side_3_5s3 = [5, 5, 5, 2, 2, 2, 2, 1, 2, 2, 2, 2, 5, 5, 5]
pyramid_1side_8__2side_7__3side_4_4side_3_5s3 = [5, 5, 5, 3, 2, 2, 2, 1, 2, 2, 2, 3, 5, 5, 5]
pyramid_1side_8__2side_7__3side_5_4side_3_5s3 = [5, 5, 5, 3, 3, 2, 2, 1, 2, 2, 3, 3, 5, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_3_5s3 = [5, 5, 5, 3, 3, 3, 2, 1, 2, 3, 3, 3, 5, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_3_5s3 = [5, 5, 5, 3, 3, 3, 3, 1, 3, 3, 3, 3, 5, 5, 5]
pyramid_1side_8__2side_8__3side_3_4side_3_5s3 = [5, 5, 5, 2, 2, 2, 2, 2, 2, 2, 2, 2, 5, 5, 5]
pyramid_1side_8__2side_8__3side_4_4side_3_5s3 = [5, 5, 5, 3, 2, 2, 2, 2, 2, 2, 2, 3, 5, 5, 5]
pyramid_1side_8__2side_8__3side_5_4side_3_5s3 = [5, 5, 5, 3, 3, 2, 2, 2, 2, 2, 3, 3, 5, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_3_5s3 = [5, 5, 5, 3, 3, 3, 2, 2, 2, 3, 3, 3, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_3_5s3 = [5, 5, 5, 3, 3, 3, 3, 2, 3, 3, 3, 3, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_3_5s3 = [5, 5, 5, 3, 3, 3, 3, 3, 3, 3, 3, 3, 5, 5, 5]
pyramid_1side_8__2side_4__3side_4_4side_4_5s3 = [5, 5, 5, 4, 1, 1, 1, 1, 1, 1, 1, 4, 5, 5, 5]
pyramid_1side_8__2side_5__3side_4_4side_4_5s3 = [5, 5, 5, 4, 2, 1, 1, 1, 1, 1, 2, 4, 5, 5, 5]
pyramid_1side_8__2side_5__3side_5_4side_4_5s3 = [5, 5, 5, 4, 3, 1, 1, 1, 1, 1, 3, 4, 5, 5, 5]
pyramid_1side_8__2side_6__3side_4_4side_4_5s3 = [5, 5, 5, 4, 2, 2, 1, 1, 1, 2, 2, 4, 5, 5, 5]
pyramid_1side_8__2side_6__3side_5_4side_4_5s3 = [5, 5, 5, 4, 3, 2, 1, 1, 1, 2, 3, 4, 5, 5, 5]
pyramid_1side_8__2side_6__3side_6_4side_4_5s3 = [5, 5, 5, 4, 3, 3, 1, 1, 1, 3, 3, 4, 5, 5, 5]
pyramid_1side_8__2side_7__3side_4_4side_4_5s3 = [5, 5, 5, 4, 2, 2, 2, 1, 2, 2, 2, 4, 5, 5, 5]
pyramid_1side_8__2side_7__3side_5_4side_4_5s3 = [5, 5, 5, 4, 3, 2, 2, 1, 2, 2, 3, 4, 5, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_4_5s3 = [5, 5, 5, 4, 3, 3, 2, 1, 2, 3, 3, 4, 5, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_4_5s3 = [5, 5, 5, 4, 3, 3, 3, 1, 3, 3, 3, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_4_4side_4_5s3 = [5, 5, 5, 4, 2, 2, 2, 2, 2, 2, 2, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_5_4side_4_5s3 = [5, 5, 5, 4, 3, 2, 2, 2, 2, 2, 3, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_4_5s3 = [5, 5, 5, 4, 3, 3, 2, 2, 2, 3, 3, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_4_5s3 = [5, 5, 5, 4, 3, 3, 3, 2, 3, 3, 3, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_4_5s3 = [5, 5, 5, 4, 3, 3, 3, 3, 3, 3, 3, 4, 5, 5, 5]
pyramid_1side_8__2side_5__3side_5_4side_5_5s3 = [5, 5, 5, 4, 4, 1, 1, 1, 1, 1, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_6__3side_5_4side_5_5s3 = [5, 5, 5, 4, 4, 2, 1, 1, 1, 2, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_6__3side_6_4side_5_5s3 = [5, 5, 5, 4, 4, 3, 1, 1, 1, 3, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_7__3side_5_4side_5_5s3 = [5, 5, 5, 4, 4, 2, 2, 1, 2, 2, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_5_5s3 = [5, 5, 5, 4, 4, 3, 2, 1, 2, 3, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_5_5s3 = [5, 5, 5, 4, 4, 3, 3, 1, 3, 3, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_5_4side_5_5s3 = [5, 5, 5, 4, 4, 2, 2, 2, 2, 2, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_5_5s3 = [5, 5, 5, 4, 4, 3, 2, 2, 2, 3, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_5_5s3 = [5, 5, 5, 4, 4, 3, 3, 2, 3, 3, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_5_5s3 = [5, 5, 5, 4, 4, 3, 3, 3, 3, 3, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_6__3side_6_4side_6_5s3 = [5, 5, 5, 4, 4, 4, 1, 1, 1, 4, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_6_5s3 = [5, 5, 5, 4, 4, 4, 2, 1, 2, 4, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_6_5s3 = [5, 5, 5, 4, 4, 4, 3, 1, 3, 4, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_6_5s3 = [5, 5, 5, 4, 4, 4, 2, 2, 2, 4, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_6_5s3 = [5, 5, 5, 4, 4, 4, 3, 2, 3, 4, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_6_5s3 = [5, 5, 5, 4, 4, 4, 3, 3, 3, 4, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_7_5s3 = [5, 5, 5, 4, 4, 4, 4, 1, 4, 4, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_7_5s3 = [5, 5, 5, 4, 4, 4, 4, 2, 4, 4, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_7_5s3 = [5, 5, 5, 4, 4, 4, 4, 3, 4, 4, 4, 4, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_8_5s3 = [5, 5, 5, 4, 4, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5]
##################################
### 5side4
##################################
# "1" 3 6 10 15 21 28 36 45 55
# side3 OK 1
pyramid_1side_4__2side_4__3side_4_4side_4_5s4 = [5, 5, 5, 5, 0, 0, 0, 0, 0, 0, 0, 5, 5, 5, 5]
# 1 "3" 6 10 15 21 28 36 45 55
# side3 OK 4
pyramid_1side_5__2side_4__3side_4_4side_4_5s4 = [5, 5, 5, 5, 1, 0, 0, 0, 0, 0, 1, 5, 5, 5, 5]
pyramid_1side_5__2side_5__3side_4_4side_4_5s4 = [5, 5, 5, 5, 2, 0, 0, 0, 0, 0, 2, 5, 5, 5, 5]
pyramid_1side_5__2side_5__3side_5_4side_4_5s4 = [5, 5, 5, 5, 3, 0, 0, 0, 0, 0, 3, 5, 5, 5, 5]
pyramid_1side_5__2side_5__3side_5_4side_5_5s4 = [5, 5, 5, 5, 4, 0, 0, 0, 0, 0, 4, 5, 5, 5, 5]
# 1 3 "6" 10 15 21 28 36 45 55
# side3 OK 10
pyramid_1side_6__2side_4__3side_4_4side_4_5s4 = [5, 5, 5, 5, 1, 1, 0, 0, 0, 1, 1, 5, 5, 5, 5]
pyramid_1side_6__2side_5__3side_4_4side_4_5s4 = [5, 5, 5, 5, 2, 1, 0, 0, 0, 1, 2, 5, 5, 5, 5]
pyramid_1side_6__2side_5__3side_5_4side_4_5s4 = [5, 5, 5, 5, 3, 1, 0, 0, 0, 1, 3, 5, 5, 5, 5]
pyramid_1side_6__2side_6__3side_4_4side_4_5s4 = [5, 5, 5, 5, 2, 2, 0, 0, 0, 2, 2, 5, 5, 5, 5]
pyramid_1side_6__2side_6__3side_5_4side_4_5s4 = [5, 5, 5, 5, 3, 2, 0, 0, 0, 2, 3, 5, 5, 5, 5]
pyramid_1side_6__2side_6__3side_6_4side_4_5s4 = [5, 5, 5, 5, 3, 3, 0, 0, 0, 3, 3, 5, 5, 5, 5]
pyramid_1side_6__2side_5__3side_5_4side_5_5s4 = [5, 5, 5, 5, 4, 1, 0, 0, 0, 1, 4, 5, 5, 5, 5]
pyramid_1side_6__2side_6__3side_5_4side_5_5s4 = [5, 5, 5, 5, 4, 2, 0, 0, 0, 2, 4, 5, 5, 5, 5]
pyramid_1side_6__2side_6__3side_6_4side_5_5s4 = [5, 5, 5, 5, 4, 3, 0, 0, 0, 3, 4, 5, 5, 5, 5]
pyramid_1side_6__2side_6__3side_6_4side_6_5s4 = [5, 5, 5, 5, 4, 4, 0, 0, 0, 4, 4, 5, 5, 5, 5]
# 1 3 6 "10" 15 21 28 36 45 55
# side4 OK 20
pyramid_1side_7__2side_4__3side_4_4side_4_5s4 = [5, 5, 5, 5, 1, 1, 1, 0, 1, 1, 1, 5, 5, 5, 5]
pyramid_1side_7__2side_5__3side_4_4side_4_5s4 = [5, 5, 5, 5, 2, 1, 1, 0, 1, 1, 2, 5, 5, 5, 5]
pyramid_1side_7__2side_5__3side_5_4side_4_5s4 = [5, 5, 5, 5, 3, 1, 1, 0, 1, 1, 3, 5, 5, 5, 5]
pyramid_1side_7__2side_6__3side_4_4side_4_5s4 = [5, 5, 5, 5, 2, 2, 1, 0, 1, 2, 2, 5, 5, 5, 5]
pyramid_1side_7__2side_6__3side_5_4side_4_5s4 = [5, 5, 5, 5, 3, 2, 1, 0, 1, 2, 3, 5, 5, 5, 5]
pyramid_1side_7__2side_6__3side_6_4side_4_5s4 = [5, 5, 5, 5, 3, 3, 1, 0, 1, 3, 3, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_4_4side_4_5s4 = [5, 5, 5, 5, 2, 2, 2, 0, 2, 2, 2, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_5_4side_4_5s4 = [5, 5, 5, 5, 3, 2, 2, 0, 2, 2, 3, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_4_5s4 = [5, 5, 5, 5, 3, 3, 2, 0, 2, 3, 3, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_4_5s4 = [5, 5, 5, 5, 3, 3, 3, 0, 3, 3, 3, 5, 5, 5, 5]
pyramid_1side_7__2side_5__3side_5_4side_5_5s4 = [5, 5, 5, 5, 4, 1, 1, 0, 1, 1, 4, 5, 5, 5, 5]
pyramid_1side_7__2side_6__3side_5_4side_5_5s4 = [5, 5, 5, 5, 4, 2, 1, 0, 1, 2, 4, 5, 5, 5, 5]
pyramid_1side_7__2side_6__3side_6_4side_5_5s4 = [5, 5, 5, 5, 4, 3, 1, 0, 1, 3, 4, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_5_4side_5_5s4 = [5, 5, 5, 5, 4, 2, 2, 0, 2, 2, 4, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_5_5s4 = [5, 5, 5, 5, 4, 3, 2, 0, 2, 3, 4, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_5_5s4 = [5, 5, 5, 5, 4, 3, 3, 0, 3, 3, 4, 5, 5, 5, 5]
pyramid_1side_7__2side_6__3side_6_4side_6_5s4 = [5, 5, 5, 5, 4, 4, 1, 0, 1, 4, 4, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_6_5s4 = [5, 5, 5, 5, 4, 4, 2, 0, 2, 4, 4, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_6_5s4 = [5, 5, 5, 5, 4, 4, 3, 0, 3, 4, 4, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_7_5s4 = [5, 5, 5, 5, 4, 4, 4, 0, 4, 4, 4, 5, 5, 5, 5]
# 1 3 6 10 "15" 21 28 36 45 55
# side5 OK 35
pyramid_1side_8__2side_4__3side_4_4side_4_5s4 = [5, 5, 5, 5, 1, 1, 1, 1, 1, 1, 1, 5, 5, 5, 5]
pyramid_1side_8__2side_5__3side_4_4side_4_5s4 = [5, 5, 5, 5, 2, 1, 1, 1, 1, 1, 2, 5, 5, 5, 5]
pyramid_1side_8__2side_5__3side_5_4side_4_5s4 = [5, 5, 5, 5, 3, 1, 1, 1, 1, 1, 3, 5, 5, 5, 5]
pyramid_1side_8__2side_6__3side_4_4side_4_5s4 = [5, 5, 5, 5, 2, 2, 1, 1, 1, 2, 2, 5, 5, 5, 5]
pyramid_1side_8__2side_6__3side_5_4side_4_5s4 = [5, 5, 5, 5, 3, 2, 1, 1, 1, 2, 3, 5, 5, 5, 5]
pyramid_1side_8__2side_6__3side_6_4side_4_5s4 = [5, 5, 5, 5, 3, 3, 1, 1, 1, 3, 3, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_4_4side_4_5s4 = [5, 5, 5, 5, 2, 2, 2, 1, 2, 2, 2, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_5_4side_4_5s4 = [5, 5, 5, 5, 3, 2, 2, 1, 2, 2, 3, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_4_5s4 = [5, 5, 5, 5, 3, 3, 2, 1, 2, 3, 3, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_4_5s4 = [5, 5, 5, 5, 3, 3, 3, 1, 3, 3, 3, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_4_4side_4_5s4 = [5, 5, 5, 5, 2, 2, 2, 2, 2, 2, 2, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_5_4side_4_5s4 = [5, 5, 5, 5, 3, 2, 2, 2, 2, 2, 3, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_4_5s4 = [5, 5, 5, 5, 3, 3, 2, 2, 2, 3, 3, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_4_5s4 = [5, 5, 5, 5, 3, 3, 3, 2, 3, 3, 3, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_4_5s4 = [5, 5, 5, 5, 3, 3, 3, 3, 3, 3, 3, 5, 5, 5, 5]
pyramid_1side_8__2side_5__3side_5_4side_5_5s4 = [5, 5, 5, 5, 4, 1, 1, 1, 1, 1, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_6__3side_5_4side_5_5s4 = [5, 5, 5, 5, 4, 2, 1, 1, 1, 2, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_6__3side_6_4side_5_5s4 = [5, 5, 5, 5, 4, 3, 1, 1, 1, 3, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_5_4side_5_5s4 = [5, 5, 5, 5, 4, 2, 2, 1, 2, 2, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_5_5s4 = [5, 5, 5, 5, 4, 3, 2, 1, 2, 3, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_5_5s4 = [5, 5, 5, 5, 4, 3, 3, 1, 3, 3, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_5_4side_5_5s4 = [5, 5, 5, 5, 4, 2, 2, 2, 2, 2, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_5_5s4 = [5, 5, 5, 5, 4, 3, 2, 2, 2, 3, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_5_5s4 = [5, 5, 5, 5, 4, 3, 3, 2, 3, 3, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_5_5s4 = [5, 5, 5, 5, 4, 3, 3, 3, 3, 3, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_6__3side_6_4side_6_5s4 = [5, 5, 5, 5, 4, 4, 1, 1, 1, 4, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_6_5s4 = [5, 5, 5, 5, 4, 4, 2, 1, 2, 4, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_6_5s4 = [5, 5, 5, 5, 4, 4, 3, 1, 3, 4, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_6_5s4 = [5, 5, 5, 5, 4, 4, 2, 2, 2, 4, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_6_5s4 = [5, 5, 5, 5, 4, 4, 3, 2, 3, 4, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_6_5s4 = [5, 5, 5, 5, 4, 4, 3, 3, 3, 4, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_7_5s4 = [5, 5, 5, 5, 4, 4, 4, 1, 4, 4, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_7_5s4 = [5, 5, 5, 5, 4, 4, 4, 2, 4, 4, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_7_5s4 = [5, 5, 5, 5, 4, 4, 4, 3, 4, 4, 4, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_8_5s4 = [5, 5, 5, 5, 4, 4, 4, 4, 4, 4, 4, 5, 5, 5, 5]
##################################
### 5side5
##################################
# "1" 3 6 10 15 21 28 36 45 55
# side3 OK 1
pyramid_1side_5__2side_5__3side_5_4side_5_5s5 = [5, 5, 5, 5, 5, 0, 0, 0, 0, 0, 5, 5, 5, 5, 5]
# 1 "3" 6 10 15 21 28 36 45 55
# side3 OK 4
pyramid_1side_6__2side_5__3side_5_4side_5_5s5 = [5, 5, 5, 5, 5, 1, 0, 0, 0, 1, 5, 5, 5, 5, 5]
pyramid_1side_6__2side_6__3side_5_4side_5_5s5 = [5, 5, 5, 5, 5, 2, 0, 0, 0, 2, 5, 5, 5, 5, 5]
pyramid_1side_6__2side_6__3side_6_4side_5_5s5 = [5, 5, 5, 5, 5, 3, 0, 0, 0, 3, 5, 5, 5, 5, 5]
pyramid_1side_6__2side_6__3side_6_4side_6_5s5 = [5, 5, 5, 5, 5, 4, 0, 0, 0, 4, 5, 5, 5, 5, 5]
# 1 3 "6" 10 15 21 28 36 45 55
# side3 OK 10
pyramid_1side_7__2side_5__3side_5_4side_5_5s5 = [5, 5, 5, 5, 5, 1, 1, 0, 1, 1, 5, 5, 5, 5, 5]
pyramid_1side_7__2side_6__3side_5_4side_5_5s5 = [5, 5, 5, 5, 5, 2, 1, 0, 1, 2, 5, 5, 5, 5, 5]
pyramid_1side_7__2side_6__3side_6_4side_5_5s5 = [5, 5, 5, 5, 5, 3, 1, 0, 1, 3, 5, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_5_4side_5_5s5 = [5, 5, 5, 5, 5, 2, 2, 0, 2, 2, 5, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_5_5s5 = [5, 5, 5, 5, 5, 3, 2, 0, 2, 3, 5, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_5_5s5 = [5, 5, 5, 5, 5, 3, 3, 0, 3, 3, 5, 5, 5, 5, 5]
pyramid_1side_7__2side_6__3side_6_4side_6_5s5 = [5, 5, 5, 5, 5, 4, 1, 0, 1, 4, 5, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_6_5s5 = [5, 5, 5, 5, 5, 4, 2, 0, 2, 4, 5, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_6_5s5 = [5, 5, 5, 5, 5, 4, 3, 0, 3, 4, 5, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_7_5s5 = [5, 5, 5, 5, 5, 4, 4, 0, 4, 4, 5, 5, 5, 5, 5]
# 1 3 6 "10" 15 21 28 36 45 55
# side4 OK 20
pyramid_1side_8__2side_5__3side_5_4side_5_5s5 = [5, 5, 5, 5, 5, 1, 1, 1, 1, 1, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_6__3side_5_4side_5_5s5 = [5, 5, 5, 5, 5, 2, 1, 1, 1, 2, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_6__3side_6_4side_5_5s5 = [5, 5, 5, 5, 5, 3, 1, 1, 1, 3, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_5_4side_5_5s5 = [5, 5, 5, 5, 5, 2, 2, 1, 2, 2, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_5_5s5 = [5, 5, 5, 5, 5, 3, 2, 1, 2, 3, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_5_5s5 = [5, 5, 5, 5, 5, 3, 3, 1, 3, 3, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_5_4side_5_5s5 = [5, 5, 5, 5, 5, 2, 2, 2, 2, 2, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_5_5s5 = [5, 5, 5, 5, 5, 3, 2, 2, 2, 3, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_5_5s5 = [5, 5, 5, 5, 5, 3, 3, 2, 3, 3, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_5_5s5 = [5, 5, 5, 5, 5, 3, 3, 3, 3, 3, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_6__3side_6_4side_6_5s5 = [5, 5, 5, 5, 5, 4, 1, 1, 1, 4, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_6_5s5 = [5, 5, 5, 5, 5, 4, 2, 1, 2, 4, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_6_5s5 = [5, 5, 5, 5, 5, 4, 3, 1, 3, 4, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_6_5s5 = [5, 5, 5, 5, 5, 4, 2, 2, 2, 4, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_6_5s5 = [5, 5, 5, 5, 5, 4, 3, 2, 3, 4, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_6_5s5 = [5, 5, 5, 5, 5, 4, 3, 3, 3, 4, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_7_5s5 = [5, 5, 5, 5, 5, 4, 4, 1, 4, 4, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_7_5s5 = [5, 5, 5, 5, 5, 4, 4, 2, 4, 4, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_7_5s5 = [5, 5, 5, 5, 5, 4, 4, 3, 4, 4, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_8_5s5 = [5, 5, 5, 5, 5, 4, 4, 4, 4, 4, 5, 5, 5, 5, 5]
##################################
### 5side6
##################################
# "1" 3 6 10 15 21 28 36 45 55
# side3 OK 1
pyramid_1side_6__2side_6__3side_6_4side_6_5s6 = [5, 5, 5, 5, 5, 5, 0, 0, 0, 5, 5, 5, 5, 5, 5]
# 1 "3" 6 10 15 21 28 36 45 55
# side3 OK 4
pyramid_1side_7__2side_6__3side_6_4side_6_5s6 = [5, 5, 5, 5, 5, 5, 1, 0, 1, 5, 5, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_6_4side_6_5s6 = [5, 5, 5, 5, 5, 5, 2, 0, 2, 5, 5, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_6_5s6 = [5, 5, 5, 5, 5, 5, 3, 0, 3, 5, 5, 5, 5, 5, 5]
pyramid_1side_7__2side_7__3side_7_4side_7_5s6 = [5, 5, 5, 5, 5, 5, 4, 0, 4, 5, 5, 5, 5, 5, 5]
# 1 3 "6" 10 15 21 28 36 45 55
# side3 OK 10
pyramid_1side_8__2side_6__3side_6_4side_6_5s6 = [5, 5, 5, 5, 5, 5, 1, 1, 1, 5, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_6_4side_6_5s6 = [5, 5, 5, 5, 5, 5, 2, 1, 2, 5, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_6_5s6 = [5, 5, 5, 5, 5, 5, 3, 1, 3, 5, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_6_4side_6_5s6 = [5, 5, 5, 5, 5, 5, 2, 2, 2, 5, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_6_5s6 = [5, 5, 5, 5, 5, 5, 3, 2, 3, 5, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_6_5s6 = [5, 5, 5, 5, 5, 5, 3, 3, 3, 5, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_7__3side_7_4side_7_5s6 = [5, 5, 5, 5, 5, 5, 4, 1, 4, 5, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_7_5s6 = [5, 5, 5, 5, 5, 5, 4, 2, 4, 5, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_7_5s6 = [5, 5, 5, 5, 5, 5, 4, 3, 4, 5, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_8_5s6 = [5, 5, 5, 5, 5, 5, 4, 4, 4, 5, 5, 5, 5, 5, 5]
##################################
### 5side7
##################################
# "1" 3 6 10 15 21 28 36 45 55
# side3 OK 1
pyramid_1side_7__2side_7__3side_7_4side_7_5s7 = [5, 5, 5, 5, 5, 5, 5, 0, 5, 5, 5, 5, 5, 5, 5]
# 1 "3" 6 10 15 21 28 36 45 55
# side3 OK 4
pyramid_1side_8__2side_7__3side_7_4side_7_5s7 = [5, 5, 5, 5, 5, 5, 5, 1, 5, 5, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_7_4side_7_5s7 = [5, 5, 5, 5, 5, 5, 5, 2, 5, 5, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_7_5s7 = [5, 5, 5, 5, 5, 5, 5, 3, 5, 5, 5, 5, 5, 5, 5]
pyramid_1side_8__2side_8__3side_8_4side_8_5s7 = [5, 5, 5, 5, 5, 5, 5, 4, 5, 5, 5, 5, 5, 5, 5]
##################################
### 5side8
##################################
# "1" 3 6 10 15 21 28 36 45 55
# side3 OK 1
pyramid_1side_8__2side_8__3side_8_4side_8_5s8 = [5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5, 5]
###############################################################################################################################################################################################
###############################################################################################################################################################################################
###############################################################################################################################################################################################
##################################
### 1side1
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_pyramid_1side_1__2side_1__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_1__2side_1__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
##################################
### 1side2
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_pyramid_1side_2__2side_1__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_2__2side_1__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 "3" 6 10 15 21 28 36 45 55
# 2side2 OK 4
ch032_pyramid_1side_2__2side_2__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_2__2side_2__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_2__2side_2__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_2__2side_2__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_2__2side_2__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_2__2side_2__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_2__2side_2__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_2__2side_2__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
##################################
### 1side3
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_pyramid_1side_3__2side_1__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_1__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 "3" 6 10 15 21 28 36 45 55
# 2side2 OK 4
ch032_pyramid_1side_3__2side_2__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_2__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_3__2side_2__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_2__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_3__2side_2__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_2__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_3__2side_2__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_2__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 "6" 10 15 21 28 36 45 55
# 2side3 OK 10
ch032_pyramid_1side_3__2side_3__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_3__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_3__2side_3__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_3__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_3__2side_3__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_3__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_3__2side_3__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_3__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_3__2side_3__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_3__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_3__2side_3__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_3__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_3__2side_3__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_3__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_3__2side_3__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_3__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_3__2side_3__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_3__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_3__2side_3__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_3__2side_3__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
##################################
### 1side4
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_pyramid_1side_4__2side_1__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_1__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 "3" 6 10 15 21 28 36 45 55
# 2side2 OK 4
ch032_pyramid_1side_4__2side_2__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_2__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_2__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_2__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_2__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_2__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_2__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_2__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 "6" 10 15 21 28 36 45 55
# 2side3 OK 10
ch032_pyramid_1side_4__2side_3__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_3__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_3__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_3__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_3__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_3__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_3__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_3__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_3__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_3__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_3__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_3__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_3__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_3__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_3__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_3__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_3__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_3__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_3__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_3__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 "10" 15 21 28 36 45 55
# 2side4 OK 20
ch032_pyramid_1side_4__2side_4__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_4__2side_4__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_4__2side_4__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
##################################
### 1side5
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_pyramid_1side_5__2side_1__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_1__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 "3" 6 10 15 21 28 36 45 55
# 2side2 OK 4
ch032_pyramid_1side_5__2side_2__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_2__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_2__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_2__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_2__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_2__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_2__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_2__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 "6" 10 15 21 28 36 45 55
# 2side3 OK 10
ch032_pyramid_1side_5__2side_3__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_3__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_3__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_3__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_3__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_3__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_3__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_3__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_3__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_3__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_3__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_3__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_3__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_3__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_3__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_3__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_3__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_3__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_3__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_3__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 "10" 15 21 28 36 45 55
# 2side4 OK 20
ch032_pyramid_1side_5__2side_4__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_4__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_4__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 10 "15" 21 28 36 45 55
# 2side5 OK 35
ch032_pyramid_1side_5__2side_5__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_5__2side_5__3side_5_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_5__2side_5__3side_5_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
##################################
### 5side6
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_pyramid_1side_6__2side_1__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_1__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 "3" 6 10 15 21 28 36 45 55
# 2side2 OK 4
ch032_pyramid_1side_6__2side_2__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_2__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_2__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_2__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_2__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_2__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_2__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_2__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 "6" 10 15 21 28 36 45 55
# 2side3 OK 10
ch032_pyramid_1side_6__2side_3__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_3__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_3__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_3__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_3__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_3__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_3__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_3__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_3__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_3__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_3__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_3__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_3__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_3__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_3__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_3__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_3__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_3__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_3__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_3__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 "10" 15 21 28 36 45 55
# 2side4 OK 20
ch032_pyramid_1side_6__2side_4__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_4__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_4__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 10 "15" 21 28 36 45 55
# 2side5 OK 35
ch032_pyramid_1side_6__2side_5__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_5__3side_5_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_5__3side_5_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 10 15 "21" 28 36 45 55
# 2side6 OK 56
ch032_pyramid_1side_6__2side_6__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_5_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_5_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_6_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_6_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_6_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_6_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_6_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_6__2side_6__3side_6_4side_6_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_6__2side_6__3side_6_4side_6_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
##################################
### 1side7
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side1 OK 1
ch032_pyramid_1side_7__2side_1__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_1__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 "3" 6 10 15 21 28 36 45 55
# 2side2 OK 4
ch032_pyramid_1side_7__2side_2__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_2__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_2__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_2__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_2__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_2__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_2__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_2__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 "6" 10 15 21 28 36 45 55
# 2side3 OK 10
ch032_pyramid_1side_7__2side_3__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_3__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_3__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_3__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_3__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_3__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_3__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_3__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_3__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_3__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_3__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_3__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_3__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_3__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_3__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_3__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_3__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_3__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_3__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_3__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 "10" 15 21 28 36 45 55
# 2side4 OK 20
ch032_pyramid_1side_7__2side_4__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_4__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_4__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 10 "15" 21 28 36 45 55
# 2side5 OK 35
ch032_pyramid_1side_7__2side_5__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_5__3side_5_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_5__3side_5_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 10 15 "21" 28 36 45 55
# 2side6 OK 56
ch032_pyramid_1side_7__2side_6__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_5_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_5_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_6_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_6_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_6_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_6_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_6_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_6_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_6_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_6_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_6_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_6_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_6__3side_6_4side_6_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_6__3side_6_4side_6_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 10 15 21 "28" 36 45 55
# 2side7 OK 84
ch032_pyramid_1side_7__2side_7__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_5_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_5_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_6_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_6_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_6_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_6_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_6_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_6_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_6_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_6_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_6_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_6_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_6_4side_6_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_6_4side_6_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_6_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_6_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_6_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_6_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_6_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_6_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_6_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_6_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_6_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_6_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_6_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_6_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_7_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_7_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_7_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_7_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_7_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_7_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_7_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_7_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_7_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_7_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_7_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_7_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_7__2side_7__3side_7_4side_7_5s7 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_7__2side_7__3side_7_4side_7_5s7, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
##################################
### 1side8
##################################
# "1" 3 6 10 15 21 28 36 45 55
# 2side3 OK 1
ch032_pyramid_1side_8__2side_1__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_1__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 "3" 6 10 15 21 28 36 45 55
# 2side3 OK 4
ch032_pyramid_1side_8__2side_2__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_2__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_2__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_2__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_2__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_2__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_2__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_2__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 "6" 10 15 21 28 36 45 55
# 2side3 OK 10
ch032_pyramid_1side_8__2side_3__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_3__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_3__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_3__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_3__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_3__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_3__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_3__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_3__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_3__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_3__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_3__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_3__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_3__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_3__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_3__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_3__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_3__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_3__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_3__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 "10" 15 21 28 36 45 55
# 2side4 OK 20
ch032_pyramid_1side_8__2side_4__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_4__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_4__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 10 "15" 21 28 36 45 55
# 2side5 OK 35
ch032_pyramid_1side_8__2side_5__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_5__3side_5_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_5__3side_5_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 10 15 "21" 28 36 45 55
# 2side6 OK 56
ch032_pyramid_1side_8__2side_6__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_5_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_5_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_6_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_6_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_6_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_6_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_6_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_6_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_6_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_6_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_6_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_6_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_6__3side_6_4side_6_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_6__3side_6_4side_6_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 10 15 21 "28" 36 45 55
# 2side7 OK 84
ch032_pyramid_1side_8__2side_7__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_5_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_5_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_6_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_6_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_6_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_6_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_6_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_6_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_6_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_6_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_6_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_6_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_6_4side_6_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_6_4side_6_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_6_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_6_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_6_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_6_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_6_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_6_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_6_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_6_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_6_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_6_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_6_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_6_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_7_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_7_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_7_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_7_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_7_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_7_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_7_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_7_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_7_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_7_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_7_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_7_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_7__3side_7_4side_7_5s7 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_7__3side_7_4side_7_5s7, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
# 1 3 6 10 15 21 28 "36" 45 55
# 2side8 OK 120
ch032_pyramid_1side_8__2side_8__3side_1_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_1_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_2_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_2_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_2_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_2_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_2_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_2_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_3_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_3_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_3_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_3_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_3_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_3_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_3_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_3_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_3_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_3_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_3_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_3_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_4_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_4_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_4_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_4_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_4_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_4_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_4_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_4_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_4_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_4_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_4_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_4_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_4_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_4_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_4_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_4_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_4_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_4_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_4_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_4_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_5_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_5_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_6_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_6_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_6_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_6_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_6_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_6_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_6_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_6_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_6_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_6_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_6_4side_6_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_6_4side_6_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_6_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_6_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_6_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_6_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_6_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_6_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_6_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_6_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_6_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_6_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_6_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_6_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_7_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_7_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_7_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_7_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_7_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_7_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_7_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_7_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_7_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_7_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_7_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_7_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_7_4side_7_5s7 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_7_4side_7_5s7, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_1_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_1_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_2_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_2_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_2_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_2_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_3_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_3_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_3_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_3_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_3_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_3_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_4_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_4_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_4_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_4_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_4_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_4_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_4_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_4_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_5_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_5_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_5_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_5_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_5_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_5_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_5_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_5_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_5_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_5_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_6_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_6_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_6_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_6_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_6_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_6_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_6_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_6_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_6_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_6_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_6_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_6_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_7_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_7_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_7_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_7_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_7_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_7_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_7_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_7_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_7_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_7_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_7_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_7_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_7_5s7 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_7_5s7, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_8_5s1 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_8_5s1, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_8_5s2 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_8_5s2, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_8_5s3 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_8_5s3, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_8_5s4 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_8_5s4, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_8_5s5 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_8_5s5, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_8_5s6 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_8_5s6, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_8_5s7 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_8_5s7, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
ch032_pyramid_1side_8__2side_8__3side_8_4side_8_5s8 = KModel_builder().set_model_name(MODEL_NAME.flow_unet2).set_unet3(out_conv_block=True, concat_before_down=True, kernel_size=3, padding="valid", hid_ch= 32, depth_level=7, out_ch=1, unet_acti="sigmoid", conv_block_num=pyramid_1side_8__2side_8__3side_8_4side_8_5s8, ch_upper_bound= 2 ** 14).set_gen_op( use_gen_op ).set_train_step( use_train_step )
###############################################################################################################################################################################################
###############################################################################################################################################################################################
if(__name__ == "__main__"):
import numpy as np
print("build_model cost time:", time.time() - start_time)
data = np.zeros(shape=(1, 512, 512, 1))
use_model = ch032_pyramid_1side_1__2side_1__3side_1_4side_1_5s1
use_model = use_model.build()
result = use_model.generator(data)
print(result.shape)
from kong_util.tf_model_util import Show_model_weights
Show_model_weights(use_model.generator)
use_model.generator.summary()
print(use_model.model_describe)
| [
"[email protected]"
] | |
89c2127cdb13c6c6248abfba21d3cdb0eba90d73 | 3b9fdd117bfcfa86b1e354b2c193727e7567cb76 | /proxyuser17/proxyuser17/apps/myapp/models.py | 9a06f4b7a626979e8ea1971891cbd06fbebd22c1 | [
"BSD-3-Clause"
] | permissive | marcofucci/django-ticket-24506 | 84beb913b2db2993034e199c3a63c2cde60fa9aa | 88a36d792e77fb70c12224ea6a02774015ddbc84 | refs/heads/master | 2021-01-21T11:37:23.621884 | 2015-03-22T12:55:25 | 2015-03-22T12:55:25 | 32,672,309 | 0 | 1 | null | 2015-03-22T12:55:25 | 2015-03-22T11:11:24 | Python | UTF-8 | Python | false | false | 314 | py | from django.db import models
class FKUserModel(models.Model):
user = models.ForeignKey('core.User')
def __unicode__(self):
return u'%s' % self.user
class OneToOneUserModel(models.Model):
user = models.OneToOneField('core.User')
def __unicode__(self):
return u'%s' % self.user
| [
"[email protected]"
] | |
c3ca98e5d8c7a2b7a60bee0667267c57f753f0a6 | 6bf97e57103b9ddd639a91a0c371f86b3bce60d8 | /pure_ee/earth_engine_start.py | 48154028788ff39dfead3649a9176c9cbc69a261 | [
"Apache-2.0"
] | permissive | geosconsulting/gee_wapor | 655372722eed9e1c00ef7880870bd85c7953d64a | c3c451fcb21664172a74647fe5d9e56f312aa1df | refs/heads/master | 2021-01-13T05:01:37.233979 | 2017-03-01T14:09:08 | 2017-03-01T14:09:08 | 81,427,303 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 327 | py | # -*- coding: utf-8 -*-
"""
Created on Tue Jan 3 04:59:03 2017
@author: fabio
"""
# Import the Earth Engine Python Package
import ee
# Initialize the Earth Engine object, using the authentication credentials.
ee.Initialize()
# Print the information for an image asset.
image = ee.Image('srtm90_v4')
print(image.getInfo())
| [
"[email protected]"
] | |
b2516c9040789df5a0e98f754aab40508283b38c | c834c1b7ef5d0039a706f174ed3f7b0ab82fa2e5 | /optOnMysql/data2mysql.py | 5903606b3171c597649676ce4e1d13f00e79309e | [] | no_license | yangze01/Laws-Search-Project | 126ffc5ec1ad1c2e9d95c2490104e8e37e766ad4 | d1fff57a9298aa0d883a1b988aa98804d0ab00c1 | refs/heads/master | 2021-08-14T15:26:27.455518 | 2017-11-16T03:59:58 | 2017-11-16T03:59:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,597 | py | #coding=utf8
import sys
import time
reload(sys)
sys.setdefaultencoding('utf8')
from optOnMysql.DocumentsOnMysql import *
from optOnMysql.DocumentUnit import *
import json
BasePath = sys.path[0]
def is_valid_date(str):
'''判断是否是一个有效的日期字符串'''
try:
time.strptime(str, "%Y-%m-%d")
return True
except:
return False#
def document_format(line, criminal):
line = json.loads(line.decode('utf8'))
document_unit = dict()
document_unit["title"] = line['title']
# print(len(document_unit['title']))
document_unit["court"] = line['court']
document_unit["url"] = line['url']
document_unit["content"] = '|'.join(line['content']).encode('utf8')
# print(len(document_unit["content"]))
document_unit["criminal"] = criminal
if(is_valid_date(line["date"])):
document_unit["date"] = line['date']
else:
document_unit["date"] = "0000-00-00"
return document_unit
def save_document2mysql(file_path, criminal):
opt = DocumentsOnMysql()
i = 0
for line in open(file_path):
print(i)
i = i + 1
document_unit = document_format(line, criminal)
opt.insertOneDocuments(document_unit)
opt.connClose()
print(u"finished")
if __name__ == "__main__":
opt = DocumentsOnMysql()
# opt.insertOneDocuments(document_unit)
# print(opt)
opt.findById("1")
a = opt.findall()
for i in a :
print(i)
opt.connClose()
# file_path = BasePath + "/../data/judgment_trafficking.txt"
# save_document2mysql(file_path,u"拐卖妇女儿童罪")
| [
"[email protected]"
] | |
9ea362e7e23645d74ada3c82eae8bd8ed6962067 | 6f6b7e1a9837fb581cc5fed92b66b4ad12ea30f5 | /19-05-161_STOCK_profit_AIC_BIC_L500_github/4plot_profit_nh6.py | 4d358b74123f153c55f5113d35716f99896e3ac4 | [
"MIT"
] | permissive | danhtaihoang/stock | f7b4f4989ff0c2b267766761d402adc599fc893d | 9c3a3e467839dda095a0152055e347254abaf271 | refs/heads/master | 2020-06-10T01:35:59.136032 | 2019-06-24T17:13:35 | 2019-06-24T17:13:35 | 193,546,744 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,052 | py | import numpy as np
import matplotlib.pyplot as plt
#=========================================================================================
# average:
p1 = np.loadtxt('profit_cost_nhmax6.dat')
p2 = np.loadtxt('profit_AIC_nhmax6.dat')
p3 = np.loadtxt('profit_BIC_nhmax6.dat')
tmax = np.shape(p1)[0]
t = np.arange(0,tmax,1)
plt.figure(figsize=(20,16))
plt.subplot(2,2,1)
#plt.figure(figsize=(5,4))
plt.title('trade everyday')
plt.plot(t, p1[:,0],'k-',label='cost')
plt.plot(t, p2[:,0],'b-',label='AIC')
plt.plot(t, p3[:,0],'r-',label='BIC')
plt.legend()
plt.xlabel('time')
plt.ylabel('cumulative profit')
plt.ylim([-1,4])
plt.grid(linestyle='dotted')
plt.subplot(2,2,2)
plt.title('not trade everyday')
plt.plot(t, p1[:,1],'k-',label='cost')
plt.plot(t, p2[:,1],'b-',label='AIC')
plt.plot(t, p3[:,1],'r-',label='BIC')
plt.legend()
plt.xlabel('time')
plt.ylabel('cumulative profit')
plt.ylim([-1,4])
plt.grid(linestyle='dotted')
#plt.tight_layout(h_pad=0.8, w_pad=1.2)
plt.savefig('profit_cost_AIC_BIC_nhmax6.pdf', format='pdf', dpi=300)
| [
"[email protected]"
] | |
5fe9b2191e2862a97b4b0500d3c4777c88eab68c | 56e96acad654d7480d17d5cae7402a2bc6cbaa76 | /share/py_module/dataset.py | fc4a162fa0c59a4f2c53f521c749910a52a91ef4 | [] | no_license | LitingLin/VehicleDC | 641b1e25c22cac2ffb1dcba519b1af5ac7d9f2c8 | 2ac0b8ad708f033b59c0bc924ca7ec169e86b063 | refs/heads/master | 2020-05-17T19:30:00.556691 | 2019-07-12T16:21:12 | 2019-07-12T16:21:12 | 183,916,512 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,885 | py | # coding: utf-8
import os
import re
import numpy as np
import torch
from torch.utils import data
from torchvision import transforms as T
from PIL import Image
color_attrs = ['Black', 'Blue', 'Brown',
'Gray', 'Green', 'Pink',
'Red', 'White', 'Yellow']
direction_attrs = ['Front', 'Rear']
type_attrs = ['passengerCar', 'saloonCar',
'shopTruck', 'suv', 'trailer', 'truck', 'van', 'waggon']
class Vehicle(data.Dataset):
"""
属性向量多标签:配合cross entropy loss的使用
使用处理过的数据: 去掉所有的unknown
"""
def __init__(self,
root,
transform=None,
is_train=True):
"""
:return:
"""
if not os.path.exists(root):
print('=> [Err]: root not exists.')
return
if is_train:
print('=> train data root: ', root)
else:
print('=> test data root: ', root)
# 统计非空子目录并按名称(类别名称)自然排序
self.img_dirs = [os.path.join(root, x) for x in os.listdir(root) \
if os.path.isdir(os.path.join(root, x))]
self.img_dirs = [x for x in self.img_dirs if len(os.listdir(x)) != 0]
if len(self.img_dirs) == 0:
print('=> [Err]: empty sub-dirs.')
return
self.img_dirs.sort() # 默认自然排序, 从小到大
# print('=> total {:d} classes for training'.format(len(self.img_dirs)))
# 将多标签分开
self.color_attrs = color_attrs
self.direction_attrs = direction_attrs
self.type_attrs = type_attrs
# 按子目录(类名)的顺序排序文件路径
self.imgs_path = []
self.labels = []
for x in self.img_dirs:
match = re.match('([a-zA-Z]+)_([a-zA-Z]+)_([a-zA-Z]+)', os.path.split(x)[1])
color = match.group(1) # 车身颜色
direction = match.group(2) # 车身方向
type = match.group(3) # 车身类型
# print('=> color: %s, direction: %s, type: %s' % (color, direction, type))
for y in os.listdir(x):
# 添加文件路径
self.imgs_path.append(os.path.join(x, y))
# 添加label
color_idx = int(np.where(self.color_attrs == np.array(color))[0])
direction_idx = int(np.where(self.direction_attrs == np.array(direction))[0])
type_idx = int(np.where(self.type_attrs == np.array(type))[0])
label = np.array([color_idx, direction_idx, type_idx], dtype=int)
label = torch.Tensor(label) # torch.from_numpy(label)
self.labels.append(label) # Tensor(label)
# print(label)
if is_train:
print('=> total {:d} samples for training.'.format(len(self.imgs_path)))
else:
print('=> total {:d} samples for testing.'.format(len(self.imgs_path)))
# 加载数据变换
if transform is not None:
self.transform = transform
else: # default image transformation
self.transform = T.Compose([
T.Resize(448),
T.CenterCrop(448),
T.ToTensor(),
T.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
])
# --------------------- serialize imgs_path to disk
# root_parent = os.path.abspath(os.path.join(root, '..'))
# print('=> parent dir: ', root_parent)
# if is_train:
# imgs_path = os.path.join(root_parent, 'train_imgs_path.pkl')
# else:
# imgs_path = os.path.join(ropytorch docot_parent, 'test_imgs_path.pkl')
# print('=> dump imgs path: ', imgs_path)
# pickle.dump(self.imgs_path, open(imgs_path, 'wb'))
def __getitem__(self, idx):
"""
:param idx:
:return:
"""
image = Image.open(self.imgs_path[idx])
# 数据变换, 灰度图转换成'RGB'
if image.mode == 'L' or image.mode == 'I': # 8bit或32bit灰度图
image = image.convert('RGB')
if self.transform is not None:
image = self.transform(image)
label = self.labels[idx]
f_path = os.path.split(self.imgs_path[idx])[0].split('/')[-2] + \
'/' + os.path.split(self.imgs_path[idx])[0].split('/')[-1] + \
'/' + os.path.split(self.imgs_path[idx])[1]
return image, label, f_path
def __len__(self):
"""os.path.split(self.imgs_path[idx])[0].split('/')[-2]
:return:
"""
return len(self.imgs_path)
| [
"[email protected]"
] | |
6321392e8ea506f89ad8138de98709a7d3aeef3a | e5f1befb7c7ca0072747b33086fc6569a6befd01 | /old/flaskold/json/008.py | f4caad8db1b8d37bfc3909c746dc3bac66210760 | [] | no_license | nepomnyashchii/TestGit | ae08d8bb1b7d2ab9389a309fd1dc9e24729b019c | c7abf4ab08ee3c2f3ea1fb09a1938bff7a3e0e5c | refs/heads/master | 2020-04-28T23:41:51.053547 | 2020-01-24T12:22:40 | 2020-01-24T12:22:40 | 175,666,093 | 0 | 1 | null | 2019-03-15T13:44:03 | 2019-03-14T17:08:58 | null | UTF-8 | Python | false | false | 147 | py | from flask import Flask
app = Flask(__name__)
@app.route('/')
def index():
return "test"
if __name__ =="__main__":
app.run(debug=True)
| [
"[email protected]"
] | |
50e9870739673efcfa7b101e2a5fab4d46cee95a | e0b7fb64e57823d24ad6b8ca4e130c657ba437a4 | /analysis/yields/plot.py | 1c98b8833d00a74347fe5b76ba3b506ff8435f4a | [] | no_license | cfangmeier/FTAnalysis | 66644189f02ddf43dadb8e029e4709950572e7cf | 6612f40b67689d6d946866710ad2e0256b790821 | refs/heads/master | 2021-09-11T00:16:35.222837 | 2018-01-09T22:26:50 | 2018-01-09T22:26:50 | 106,859,187 | 0 | 0 | null | 2017-10-13T18:23:23 | 2017-10-13T18:23:23 | null | UTF-8 | Python | false | false | 24,136 | py | import os
import sys
import ROOT as r
import sys
sys.path.insert(0,'../../')
from common.Software.dataMCplotMaker.dataMCplotMaker import dataMCplot
from analysis.limits.runLimits import get_lims
from analysis.limits.singleBinLimits import get_singlebin_limits
from analysis.limits.makeScan import make_scan
from analysis.limits.getPostFit import get_postfit_dict
def reduce_bins(h_in, ndrop=2):
# drop first [ndrop] bins
nbins_reduced = h_in.GetNbinsX() - ndrop
h_out = r.TH1F(h_in.GetName()+"_reduced"+str(ndrop), h_in.GetTitle(), nbins_reduced, 0.5, nbins_reduced+0.5)
binvals = list(h_in) # includes under and overflow, so bin 1 is index 1
for ibin,val in enumerate(binvals):
if ibin <= ndrop: continue
h_out.SetBinContent(ibin-ndrop,val)
h_out.SetBinError(ibin-ndrop,h_in.GetBinError(ibin))
return h_out
def scale_hist(h_in, scale=1.):
# return scaled histogram
h_out = h_in.Clone(h_in.GetName()+"_scaled")
h_out.Scale(scale)
return h_out
if __name__ == "__main__":
os.system("mkdir -p plots")
r.gROOT.SetBatch(1)
bginfo = [
("flips", "Charge misid.", r.kGray+2, 0.2),
("rares", "Rare", r.kMagenta-7, 0.5),
("xg", "X#gamma", r.kViolet+2, 0.5),
("ttvv", "t#bar{t}VV", r.kAzure-4, 0.5),
("ttz", "t#bar{t}Z", r.kGreen-6, 0.40),
("fakes", "Nonprompt lep.", 18, 0.30),
("tth", "t#bar{t}H", r.kBlue-5, 0.50),
("ttw", "t#bar{t}W", r.kGreen+3, 0.40),
]
bgnames, titles, colors, systs = map(list,zip(*bginfo))
f1 = r.TFile("histos.root")
cards_dir = "../limits/{0}".format(f1.Get("metadata").GetTitle())
d_postfit, fitratios = get_postfit_dict("{}/mlfit.root".format(cards_dir))
# d_postfit, fitratios = get_postfit_dict("../limits/v0.10_Jul20/mlfit.root".format(cards_dir))
# print d_postfit
# print fitratios
for proc,h1 in d_postfit.items():
if not h1: continue
vals,errs = zip(*[[h1.GetBinContent(ib),h1.GetBinError(ib)] for ib in range(1,h1.GetNbinsX()+1)])
# print proc, zip(vals,errs)
# print d_postfit, fitratios
commonopts = "--darkColorLines --lumi 35.9 --topYaxisTitle Data/Pred. --type Preliminary --poissonErrorsNoZeros --dataName Data --outOfFrame --systInclStat --systFillStyle 3344 "
d_opts_br = {
# "SR_TOTAL" : [("",), commonopts+" --xAxisLabel Region --noDivisionLabel --noXaxisUnit --isLinear --noOverflow --legendUp .03 --legendRight -0.05 --legendTaller 0.05 --yTitleOffset -0.1 --makeTable --xAxisBinLabels SR1,SR2,SR3,SR4,SR5,SR6,SR7,SR8 --yAxisLabel Events "],
# "SRCR_TOTAL" : [("",), commonopts+" --xAxisLabel Region --noDivisionLabel --noXaxisUnit --isLinear --noOverflow --legendUp .03 --legendRight -0.05 --legendTaller 0.05 --yTitleOffset -0.1 --makeTable --xAxisBinLabels CRZ,CRW,SR1,SR2,SR3,SR4,SR5,SR6,SR7,SR8 --yAxisLabel Events "],
# "ht" : [("ttzcr","ttwcr","sr","br"), commonopts+" --ratioUpperBound 4 --xAxisLabel H_{T} --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 --yAxisLabel Events "],
# "met" : [("ttzcr","ttwcr","sr","br"), commonopts+" --xAxisLabel p_{T}^{miss} --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 --yAxisLabel Events "],
# "njets" : [("ttzcr","ttwcr","sr","br"), commonopts+" --xAxisLabel N_{jets} --noXaxisUnit --nDivisions 6 --noDivisionLabel --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 --yAxisLabel Events / bin "],
# "nbtags" : [("ttzcr","ttwcr","sr","br"), commonopts+" --noDivisionLabel --noXaxisUnit --xAxisLabel N_{b} --nDivisions 4 --noXaxisUnit --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 --makeTable --yAxisLabel Events / bin "],
"SR_TOTAL" : [("",), commonopts+" --xAxisLabel Region --noDivisionLabel --noXaxisUnit --isLinear --noOverflow --legendUp -.09 --legendRight -0.08 --legendTaller 0.18 --yTitleOffset -0.15 --makeTable --xAxisBinLabels SR1,SR2,SR3,SR4,SR5,SR6,SR7,SR8 --yAxisLabel Events "],
"SRCR_TOTAL" : [("",), commonopts+" --xAxisLabel Region --noDivisionLabel --noXaxisUnit --isLinear --noOverflow --legendUp -.10 --legendRight -0.08 --legendTaller 0.20 --yTitleOffset -0.00 --makeTable --xAxisBinLabels CRZ,CRW,SR1,SR2,SR3,SR4,SR5,SR6,SR7,SR8 --yAxisLabel Events "],
"ht" : [("ttzcr","ttwcr","sr","br"), commonopts+" --ratioUpperBound 4 --xAxisLabel #it{H}_{T} --isLinear --legendUp -0.09 --legendRight -0.08 --legendTaller 0.18 --yTitleOffset -0.1 --yAxisLabel Events "],
"met" : [("ttzcr","ttwcr","sr","br"), commonopts+" --xAxisLabel #it{p}_{T}^{miss} --isLinear --legendUp -0.09 --legendRight -0.08 --legendTaller 0.18 --yTitleOffset -0.1 --yAxisLabel Events "],
"njets" : [("ttzcr","ttwcr","sr","br"), commonopts+" --xAxisLabel #it{N}_{jets} --noXaxisUnit --nDivisions 6 --noDivisionLabel --isLinear --legendUp -0.09 --legendRight -0.08 --legendTaller 0.18 --yTitleOffset -0.1 --yAxisLabel Events / bin "],
"nbtags" : [("ttzcr","ttwcr","sr","br"), commonopts+" --noDivisionLabel --noXaxisUnit --xAxisLabel #it{N}_{b} --nDivisions 4 --noXaxisUnit --isLinear --legendUp -0.09 --legendRight -0.08 --legendTaller 0.16 --yTitleOffset -0.1 --makeTable --yAxisLabel Events / bin "],
# "SR_TOTAL" : [("",), commonopts+" --xAxisLabel SR --noDivisionLabel --noXaxisUnit --isLinear --noOverflow --legendUp -.03 --legendRight -0.05 --legendTaller 0.05 --yTitleOffset -0.1 --makeTable --percentageInBox --xAxisBinLabels SR1,SR2,SR3,SR4,SR5,SR6,SR7,SR8 "],
# "SRCR_TOTAL" : [("",), commonopts+" --xAxisLabel Region --noDivisionLabel --noXaxisUnit --isLinear --noOverflow --legendUp -.03 --legendRight -0.05 --legendTaller 0.05 --yTitleOffset -0.1 --makeTable --percentageInBox --xAxisBinLabels CRZ,CRW,SR1,SR2,SR3,SR4,SR5,SR6,SR7,SR8 "],
# "ht" : [("ttzcr","ttwcr","sr","br"), commonopts+" --ratioUpperBound 4 --xAxisLabel H_{T} --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "met" : [("ttzcr","ttwcr","sr","br"), commonopts+" --xAxisLabel E_{T}^{miss} --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "mvis" : [("ttzcr","ttwcr","sr","br"), commonopts+" --xAxisLabel m^{vis} --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.0 "],
# "mtvis" : [("ttzcr","ttwcr","sr","br"), commonopts+" --xAxisLabel m_{T}^{vis} --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.0 "],
# "njets" : [("ttzcr","ttwcr","sr","br"), commonopts+" --xAxisLabel N_{jets} --noXaxisUnit --nDivisions 6 --noDivisionLabel --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "nbtags" : [("ttzcr","ttwcr","sr","br"), commonopts+" --noDivisionLabel --noXaxisUnit --xAxisLabel N_{b} --nDivisions 4 --noXaxisUnit --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 --makeTable "],
# "mtmin" : [("ttzcr","ttwcr","sr","br"), commonopts+" --xAxisLabel m_{T}^{min} --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "mll" : [("ttzcr","ttwcr","sr","br"), commonopts+" --xAxisLabel m_{ll} --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "mllos" : [("ttzcr",), commonopts+" --xAxisLabel Z cand m_{ll} --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "type" : [("ttzcr","ttwcr","sr","br"), commonopts+" --noDivisionLabel --noXaxisUnit --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 --xAxisBinLabels #mu#mu,#mu e,e#mu,ee "],
# "charge" : [("ttzcr","ttwcr","sr","br"), commonopts+" --xAxisLabel charge --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "nleps" : [("ttzcr","ttwcr","sr","br"), commonopts+" --xAxisLabel Nleps --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "l1pt" : [("ttzcr","ttwcr","sr","br"), commonopts+" --xAxisLabel ordered l1pt --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "l2pt" : [("ttzcr","ttwcr","sr","br"), commonopts+" --xAxisLabel ordered l2pt --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "l3pt" : [("ttzcr",), commonopts+" --xAxisLabel ordered l3pt --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "mtop1" : [("sr",), commonopts+" --xAxisLabel m_{top,1} --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.0 "],
# "mtop2" : [("sr",), commonopts+" --xAxisLabel m_{top,2} --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.0 "],
# # "mva" : [("sr","br"), commonopts+" --xAxisLabel lep1,2 el MVA --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "sip3d_mu_lep1" : [("sr","br"), commonopts+" --xAxisLabel lep1 mu sip3d --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "sip3d_mu_lep2" : [("sr","br"), commonopts+" --xAxisLabel lep2 mu sip3d --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "mu_l1pt" : [("sr","br"), commonopts+" --xAxisLabel lep1 mu pt --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "mu_l2pt" : [("sr","br"), commonopts+" --xAxisLabel lep2 mu pt --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# # "mu_l3pt" : [("sr","br"), commonopts+" --xAxisLabel lep3 mu pt --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "l1eta_mu" : [("sr","br"), commonopts+" --xAxisLabel lep1 mu eta --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "l2eta_mu" : [("sr","br"), commonopts+" --xAxisLabel lep2 mu eta --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# # "l3eta_mu" : [("sr","br"), commonopts+" --xAxisLabel lep3 mu eta --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "lep1_mu_miniIso" : [("sr","br"), commonopts+" --xAxisLabel lep1 mu miniIso --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "lep2_mu_miniIso" : [("sr","br"), commonopts+" --xAxisLabel lep2 mu miniIso --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "lep1_mu_ptRel" : [("sr","br"), commonopts+" --xAxisLabel lep1 mu ptRel --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "lep2_mu_ptRel" : [("sr","br"), commonopts+" --xAxisLabel lep2 mu ptRel --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "sip3d_el_lep1" : [("sr","br"), commonopts+" --xAxisLabel lep1 el sip3d --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "sip3d_el_lep2" : [("sr","br"), commonopts+" --xAxisLabel lep2 el sip3d --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "el_l1pt" : [("sr","br"), commonopts+" --xAxisLabel lep1 el pt --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "el_l2pt" : [("sr","br"), commonopts+" --xAxisLabel lep2 el pt --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# # "el_l3pt" : [("sr","br"), commonopts+" --xAxisLabel lep3 el pt --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "l1eta_el" : [("sr","br"), commonopts+" --xAxisLabel lep1 el eta --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "l2eta_el" : [("sr","br"), commonopts+" --xAxisLabel lep2 el eta --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# # "l3eta_el" : [("sr","br"), commonopts+" --xAxisLabel lep3 el eta --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "lep1_el_miniIso" : [("sr","br"), commonopts+" --xAxisLabel lep1 el miniIso --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "lep2_el_miniIso" : [("sr","br"), commonopts+" --xAxisLabel lep2 el miniIso --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "lep1_el_ptRel" : [("sr","br"), commonopts+" --xAxisLabel lep1 el ptRel --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "lep2_el_ptRel" : [("sr","br"), commonopts+" --xAxisLabel lep2 el ptRel --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "bjetpt" : [("sr","br"), commonopts+" --xAxisLabel p_{T}(bjets) --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "jetpt" : [("sr","br"), commonopts+" --xAxisLabel p_{T}(jets) --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.1 "],
# "disc" : [("br",), commonopts+" --isLinear --xAxisLabel disc --legendUp .0 --legendRight -0.08 --legendTaller 0.05 --yTitleOffset -0.1 --makeTable "],
# "disc2" : [("br",), commonopts+" --isLinear --xAxisLabel disc2 --legendUp .0 --legendRight -0.08 --legendTaller 0.05 --yTitleOffset -0.1 --makeTable "],
# "SRDISC_TOTAL" : [("",), commonopts+" --xAxisLabel SR_{disc} --noDivisionLabel --noXaxisUnit --isLinear --noOverflow --legendUp -.03 --legendRight -0.05 --legendTaller 0.05 --yTitleOffset -0.1 --makeTable --percentageInBox "],
# "ntops" : [("sr",), commonopts+" --xAxisLabel N_{tops} --noXaxisUnit --nDivisions 5 --noDivisionLabel --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset 0.1 --makeTable "],
# "ntopness" : [("sr",), commonopts+" --xAxisLabel N_{tops}ness --isLinear --legendUp -.15 --legendRight -0.08 --legendTaller 0.15 --yTitleOffset -0.0 "],
}
do_stats = True
for key in d_opts_br.keys():
types, opts_str = d_opts_br[key]
for typ in types:
if len(typ) == 0:
name = key[:]
else:
name = "{}_{}".format(typ,key)
oname = "plots/%s.pdf" % name.replace("_TOTAL","")
# title = typ.upper()
title = ""
subtitle = ""
d_newopts = {
"outputName": oname,
}
# if key == "njets" and typ == "ttwcr": subtitle = "(a)"
# if key == "nbtags" and typ == "ttwcr": subtitle = "(b)"
# if key == "njets" and typ == "ttzcr": subtitle = "(c)"
# if key == "nbtags" and typ == "ttzcr": subtitle = "(d)"
# if key == "njets" and typ == "sr": subtitle = "(a)"
# if key == "nbtags" and typ == "sr": subtitle = "(b)"
# if key == "ht" and typ == "sr": subtitle = "(c)"
# if key == "met" and typ == "sr": subtitle = "(d)"
# if key == "SRCR_TOTAL": subtitle = "(a)"
# if key == "SR_TOTAL": subtitle = "(b)"
if key == "njets" and typ == "ttwcr": subtitle = "CRW"
if key == "nbtags" and typ == "ttwcr": subtitle = "CRW"
if key == "njets" and typ == "ttzcr": subtitle = "CRZ"
if key == "nbtags" and typ == "ttzcr": subtitle = "CRZ"
if key == "njets" and typ == "sr": subtitle = ""
if key == "nbtags" and typ == "sr": subtitle = ""
if key == "ht" and typ == "sr": subtitle = ""
if key == "met" and typ == "sr": subtitle = ""
if key == "SRCR_TOTAL": subtitle = ""
if key == "SR_TOTAL": subtitle = ""
if typ in ["ttzcr","sr"] and ("njets" in name or "nbtags" in name or "met" in name):
d_newopts["ratioUpperBound"] = 4.0
if key in ["njets","nbtags","ht","met"] and typ == "sr":
d_newopts["ratioUpperBound"] = 5.0
print name, typ
bgs = map(lambda x: f1.Get("{0}_{1}".format(name,x)), ["data", "tttt"]+bgnames)
h_data,h_tttt,bgs = bgs[0], bgs[1], bgs[2:]
h_data_empty = h_data.Clone("empty")
h_data_empty.Reset()
h_tttt.Sumw2()
tttt_sf = 5.0
h_tttt.Scale(tttt_sf)
do_unblind = True
d_newopts["noDataWidth"] = True
# if do_stats and key == "SRCR_TOTAL":
# # if key == "SRCR_TOTAL":
# make_scan(cards_dir, do_blind=not do_unblind)
# os.system("cp scan.pdf plots/scan.pdf")
# if do_stats and key in ["SRCR_TOTAL"]:
# regions="srcr"
# if "DISC" in key: regions="srdisc"
# d_lims = get_lims(card=cards_dir, regions=regions, redocard=True, redolimits=True, domcfakes=False)
# exp, expp1, expm1 = d_lims["exp"], d_lims["sp1"]-d_lims["exp"], d_lims["exp"]-d_lims["sm1"]
# subtitle = "#sigma^{UL}_{exp} = %.2f^{+%.1f}_{-%.1f} fb" % (exp, expp1, expm1)
# do_unblind = typ in ["ttwcr","ttzcr", "sr"]
do_blind = not do_unblind
if do_unblind:
if "l3eta_el" not in name and "el_l3pt" not in name:
d_newopts["noTextBetweenPads"] = True
d_newopts["noGrass"] = True
dataMCplot(h_data, bgs=bgs, sigs=[h_tttt], sigtitles=["t#bar{t}t#bar{t} x 5"], systs=systs, titles=titles, title=title, subtitle=subtitle, colors=colors, opts=d_newopts, opts_str=opts_str)
new_d_newopts = d_newopts.copy()
new_h_tttt = h_tttt.Clone("new_tttt")
new_h_tttt.Scale(1.0/tttt_sf) # undo above scaling
new_bgs = bgs+[new_h_tttt]
new_colors = colors+[r.kPink-1]
new_systs = systs+[0.1]
new_titles = titles+["t#bar{t}t#bar{t}"]
new_d_newopts["poissonErrorsNoZeros"] = False
new_d_newopts["noTextBetweenPads"] = False
new_d_newopts["preserveBackgroundOrder"] = True
def get_name(hist):
return hist.GetName().rsplit("_",1)[-1]
if do_stats and key == "SR_TOTAL":
# new_d_newopts["outputName"] = d_newopts["outputName"].replace(".pdf","_postfit.pdf")
# dataMCplot(h_data_empty, bgs=new_bgs, systs=new_systs, titles=new_titles, title="Prefit", subtitle=subtitle, colors=new_colors, opts=new_d_newopts, opts_str=opts_str)
new_d_newopts["outputName"] = d_newopts["outputName"].replace(".pdf","_postfit.pdf")
new_d_newopts["noTextBetweenPads"] = True
del new_d_newopts["noGrass"]
postfit_bgs = [reduce_bins(d_postfit[get_name(bg)],2) for bg in new_bgs]
h_totalsyst = reduce_bins(d_postfit["total"],2) # total_background is tot bg, total is totbg+sig
dataMCplot(h_data, bgs=postfit_bgs, titles=new_titles, title="", subtitle=subtitle, colors=new_colors, opts=new_d_newopts, opts_str=opts_str, total_syst=h_totalsyst)
if do_stats and key == "SRCR_TOTAL":
new_d_newopts["outputName"] = d_newopts["outputName"].replace(".pdf","_postfit.pdf")
new_d_newopts["noTextBetweenPads"] = True
del new_d_newopts["noGrass"]
this_opts_str = opts_str.replace("--isLinear","--setMinimum 0.1")
# this_opts_str = this_opts_str.replace("--legendUp -.05","--legendUp .00")
postfit_bgs = [reduce_bins(d_postfit[get_name(bg)],0) for bg in new_bgs]
h_totalsyst = reduce_bins(d_postfit["total"],0) # total_background is tot bg, total is totbg+sig
dataMCplot(h_data, bgs=postfit_bgs, titles=new_titles, title="", subtitle=subtitle, colors=new_colors, opts=new_d_newopts, opts_str=this_opts_str, total_syst=h_totalsyst)
if do_stats and key not in ["SR_TOTAL","SRCR_TOTAL"]:
new_d_newopts["outputName"] = d_newopts["outputName"].replace(".pdf","_postfit.pdf")
new_d_newopts["noGrass"] = True
postfit_bgs = [scale_hist(bg,scale=fitratios[get_name(bg)]) for bg in new_bgs]
# dataMCplot(h_data, bgs=postfit_bgs, titles=new_titles, title="Postfit "+title, subtitle=subtitle, colors=new_colors, opts=new_d_newopts, opts_str=opts_str, systs=new_systs)
dataMCplot(h_data, bgs=postfit_bgs, titles=new_titles, title=""+title, subtitle=subtitle, colors=new_colors, opts=new_d_newopts, opts_str=opts_str, systs=new_systs)
if do_stats and key not in ["SR_TOTAL","SRCR_TOTAL"]:
new_d_newopts["noGrass"] = True
new_d_newopts["outputName"] = oname.replace(".pdf","_stacked.pdf")
dataMCplot(h_data, bgs=new_bgs, titles=new_titles, title=title, subtitle=subtitle, colors=new_colors, opts=new_d_newopts, opts_str=opts_str, systs=new_systs)
# if do_blind:
# d_newopts["outputName"] = d_newopts["outputName"].replace(".pdf","_blind.pdf")
# d_newopts["poissonErrorsNoZeros"] = False
# d_newopts["noTextBetweenPads"] = False
# # For SRCR, "blind" is actually partially blind (first two bins -- CRZ,CRW -- are unblinded)
# # make data with only CR unblinded (first two bins)
# h_data_cronly = h_data.Clone("cronly")
# for i in range(1,h_data.GetNbinsX()+1):
# if i in [1,2]: h_data_cronly.SetBinContent(i, h_data.GetBinContent(i))
# else: h_data_cronly.SetBinContent(i, 0)
# if key == "SRCR_TOTAL":
# dataMCplot(h_data_cronly, bgs=bgs, sigs=[h_tttt], sigtitles=["t#bar{t}t#bar{t} x 5"], systs=systs, titles=titles, title=title, subtitle=subtitle, colors=colors, opts=d_newopts, opts_str=opts_str)
# else:
# dataMCplot(h_data_empty, bgs=bgs, sigs=[h_tttt], sigtitles=["t#bar{t}t#bar{t} x 5"], systs=systs, titles=titles, title=title, subtitle=subtitle, colors=colors, opts=d_newopts, opts_str=opts_str)
# os.system("ic plots/SRCR_postfit.pdf")
# os.system("niceplots plots plots_tttt_Jul20_unblind")
# os.system("niceplots plots plots_tttt_Aug1_sr4")
# os.system("niceplots plots plots_tttt_Aug8")
# os.system("niceplots plots plots_tttt_Sep11")
os.system("niceplots plots plots_tttt_Oct9")
| [
"[email protected]"
] | |
f4a850cce56f0f8cf0a4527768d60ba75d2eb5df | e06ff08424324ac5d6c567ae9cd6954290ff9bd4 | /Yudi TANG/axe/KNN/KNN_dating.py | ba4684b05e29ddc86468b5905cf90baf69208d11 | [
"Apache-2.0"
] | permissive | JKChang2015/Machine_Learning | b1bdfcf9ea43a98fc7efd5c0624bbaf5d9dbf495 | f8b46bf23e4d1972de6bd652dd4286e9322ed62f | refs/heads/master | 2021-06-06T19:18:16.596549 | 2020-05-03T22:28:18 | 2020-05-03T22:28:18 | 119,390,891 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,671 | py | # KNN_dating
# Created by JKChang
# 29/01/2020, 10:20
# Tag:
# Description: dating people recommendation
# Feature: 1. Number of frequent flyer miles earned per year
# 2. Percentage of time spent playing video games
# 3. Liters of ice cream consumed per week
# classifies:1. doesn't like
# 2. small like
# 3. large like
import operator
import matplotlib.pyplot as plt
# from mpl_toolkits import mplot3d
import numpy as np
def viewMatrix(matrix, labels, arg1, arg2):
fig = plt.figure()
ax = fig.add_subplot(111)
ax.scatter(matrix[:, arg1 - 1], matrix[:, arg2 - 1], 15.0 * np.array(labels), 15.0 * np.array(labels))
plt.show()
def view3DMatrix(matrix, labels):
fig = plt.figure()
ax = plt.axes(projection='3d')
# Data for a three-dimensional line
zline = np.linspace(0, 1, 1000)
xline = np.sin(zline)
yline = np.cos(zline)
ax.plot3D(xline, yline, zline, 'gray')
# Data for three-dimensional scattered points
zdata = matrix[:, 0]
xdata = matrix[:, 1]
ydata = matrix[:, 2]
ax.scatter3D(xdata, ydata, zdata, c=labels)
fig.show()
def kNNClassify(newInput, dataSet, labels, k):
numSamples = dataSet.shape[0] # shape[0] stands for the number of rows
# Step 1: calculate Euclidean distance
diff = np.tile(newInput, (numSamples, 1)) - dataSet
squareDiff = diff ** 2
squareSum = squareDiff.sum(axis=1)
distance = squareSum ** 0.5
# Step 2: Sort distance
# argsort() returns the indices that would sort an array in a ascending order
sortedDistIndicies = distance.argsort()
classCount = {} # key: label , value: laebl count
for i in range(k):
# Step 3: choose the min k distance
voteLabel = labels[sortedDistIndicies[i]]
# Step 4: count the label frequency
classCount[voteLabel] = classCount.get(voteLabel, 0) + 1
# Step 5: the max voted class label will return
# Sort the dictionary according to the values
sortedClassCount = sorted(classCount.items(), key=operator.itemgetter(1), reverse=True)
return sortedClassCount[0][0]
def file2matrix(filename):
with open(filename, 'r') as f:
resMatrix = np.zeros((1, 3))
labels = []
for line in f.readlines():
content = line.split('\t')
lineVector = np.asfarray([content[:3]])
resMatrix = np.r_[resMatrix, lineVector]
labels.append(int(content[-1]))
DataMatrix = np.delete(resMatrix, (0), axis=0)
return DataMatrix, labels
def autoNorm(dataSet):
# normalization:
# nor_value = (old_Value - minimum_value) / (max - min)
# get list of minimum value for each col
minValue = dataSet.min(0)
# get list of maximum value for each col
maxValue = dataSet.max(0)
normDataSet = np.zeros(np.shape(dataSet))
m = dataSet.shape[0]
# copy the minValue to size(m,1) matrix
normDataSet = dataSet - np.tile(minValue, (m, 1))
normDataSet = normDataSet / np.tile(maxValue - minValue, (m, 1))
return normDataSet, maxValue - minValue, minValue
def datingClassTest(filename):
hoRatio = 0.1
dataMatrix, labels = file2matrix(filename)
norm_matrix, ranges, min = autoNorm(dataMatrix)
# row number
m = norm_matrix.shape[0]
# number of test row
numTestVecs = int(m * hoRatio)
errorCount = 0.0
for i in range(numTestVecs):
res = kNNClassify(norm_matrix[i, :], norm_matrix[numTestVecs:m, :], labels[numTestVecs:m], 3)
print('The classifier came back with: %d, the real answer is %d' % (res, labels[i]))
if (res != labels[i]):
errorCount += 1.0
print('the total error rate is: %f' % (errorCount / float(numTestVecs)))
def classifypersion(testSetName):
resultList = ['not at all', 'in small doses', 'in large doses']
percentTats = float(input('percentage of time spent playing video games? '))
ffMiles = float(input('frequent flier miles earned per year? '))
iceCream = float(input('liters of ice creamconsued per year? '))
datingDataMat, datingLabels = file2matrix(testSetName)
normMat, ranges, minVals = autoNorm(datingDataMat)
inArr = np.array([ffMiles, percentTats, iceCream])
classifierResult = kNNClassify((inArr - minVals / ranges), normMat, datingLabels, 3)
print('You will probably like this persion : %s' % resultList[int(classifierResult) - 1])
filename = '../resource/dating/datingTestSet2.txt'
# matrix, labels = file2matrix(filename)
# norm_matrix, ranges, min = autoNorm(matrix)
# view3DMatrix(norm_matrix, labels)
# datingClassTest(filename)
classifypersion(filename)
| [
"[email protected]"
] | |
f8d2154649e59afa419b79b4777684cdda82eb5c | 56b4d00870af18752b4414495b08e2ec3adf3ae4 | /src/clims/api/endpoints/process_assignments.py | c5fd2f83c03d0928f0637275f0b82856ee822b26 | [
"BSD-2-Clause"
] | permissive | commonlims/commonlims | 26c3f937eaa18e6935c5d3fcec823053ab7fefd9 | 36a02ed244c7b59ee1f2523e64e4749e404ab0f7 | refs/heads/develop | 2021-07-01T17:20:46.586630 | 2021-02-02T08:53:22 | 2021-02-02T08:53:22 | 185,200,241 | 4 | 1 | NOASSERTION | 2021-02-02T08:53:23 | 2019-05-06T13:16:37 | Python | UTF-8 | Python | false | false | 1,028 | py | from __future__ import absolute_import
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from sentry.api.base import Endpoint, SessionAuthentication
class ProcessAssignmentsEndpoint(Endpoint):
authentication_classes = (SessionAuthentication, )
permission_classes = (IsAuthenticated, )
def post(self, request, organization_slug):
"""
Assign one or more item to a workflow. The items are assigned by global_id.
"""
# TODO-auth: Ensure that the user is only assigning samples that are under the organization
# Entities is a list of global ids (e.g. Substance-100)
entities = request.data["entities"]
definition = request.data["definitionId"]
variables = request.data["variables"]
assignments = list()
assignments += self.app.workflows.batch_assign(
entities, definition, request.user, variables)
return Response({"assignments": len(assignments)}, status=201)
| [
"[email protected]"
] | |
1186de1cba914cdcc904a0e0a09520080aa16289 | 46492cc7429c83fe362b0ed566fc54982e52c46e | /pitches/main/forms.py | bb9c5b6a6c3f20f413c47970a696323c03307838 | [
"MIT"
] | permissive | jakhax/pitches | 15c8d87825c879b56cd931d26d398e736636134f | e56358d00089bd46addd54192220bcca0478e0da | refs/heads/master | 2020-03-18T00:36:09.254870 | 2018-05-20T14:48:14 | 2018-05-20T14:48:14 | 134,102,974 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 7,399 | py | from flask import current_app, session
from flask_babel import lazy_gettext
from flask_wtf import FlaskForm
from wtforms import StringField, TextAreaField, BooleanField, SelectField, SubmitField, IntegerField
from wtforms import ValidationError
from wtforms.validators import DataRequired, InputRequired, Length, Email, Regexp
from ..models import Role, User, TopicGroup
class FormHelpersMixIn(object):
@property
def submit_fields(self):
return [getattr(self, field) for field, field_type in self._fields.items()
if isinstance(field_type, SubmitField)]
@staticmethod
def is_has_data(*fields):
return any([field.data for field in fields])
def get_flashed_errors(self):
errors = session.pop('_form_errors') if '_form_errors' in session else {}
self.errors.update(errors)
for field, errors in errors.items():
if hasattr(self, field):
form_field = getattr(self, field)
if form_field.errors:
form_field.errors.extend(errors)
else:
form_field.errors = errors
class EditProfileForm(FlaskForm):
name = StringField(lazy_gettext('Real name'), validators=[Length(0, 64)])
homeland = StringField(lazy_gettext('Homeland'), validators=[Length(0, 64)])
about = TextAreaField(lazy_gettext('About me'))
avatar = StringField(lazy_gettext('Link to avatar'), validators=[Length(0, 256)])
submit = SubmitField(lazy_gettext('Save'))
class EditProfileAdminForm(FlaskForm):
email = StringField(lazy_gettext('Email'), validators=[DataRequired(), Length(1, 64), Email()])
username = StringField(lazy_gettext('Username'), validators=[
DataRequired(), Length(1, 32), Regexp('^[A-Za-z][A-Za-z0-9_.]*$', 0, lazy_gettext(
'Usernames must have only letters, numbers, dots or underscores'))])
confirmed = BooleanField(lazy_gettext('Confirmed'))
role = SelectField(lazy_gettext('Role'), coerce=int)
name = StringField(lazy_gettext('Real name'), validators=[Length(0, 64)])
homeland = StringField(lazy_gettext('Homeland'), validators=[Length(0, 64)])
about = TextAreaField(lazy_gettext('About me'))
avatar = StringField(lazy_gettext('Link to avatar'), validators=[Length(0, 256)])
submit = SubmitField(lazy_gettext('Save'))
def __init__(self, user, *args, **kwargs):
super(EditProfileAdminForm, self).__init__(*args, **kwargs)
self.role.choices = [(role.id, role.name) for role in Role.query.order_by(Role.name).all()]
self.user = user
def validate_email(self, field):
if (field.data.lower() != self.user.email
and User.query.filter_by(email=field.data.lower()).first()):
raise ValidationError(lazy_gettext('Email already registered.'))
def validate_username(self, field):
if (field.data.lower() != self.user.username_normalized
and User.query.filter_by(username_normalized=field.data.lower()).first()):
raise ValidationError(lazy_gettext('Username already in use.'))
class TopicForm(FlaskForm):
title = StringField(lazy_gettext('Title'), validators=[DataRequired(), Length(0, 128)])
group_id = IntegerField(lazy_gettext('Topic group ID'), validators=[InputRequired()])
body = TextAreaField(lazy_gettext('Text'), validators=[DataRequired()], render_kw={'rows': 20})
poll_question="Rank"
poll_answers="Upvote\n Downvote"
submit = SubmitField(lazy_gettext('Save'))
cancel = SubmitField(lazy_gettext('Cancel'))
delete = SubmitField(lazy_gettext('Delete'))
def remove_edit_fields(self):
del self.group_id
del self.delete
def validate_group_id(self, field):
if not TopicGroup.query.filter_by(id=field.data).first():
raise ValidationError(lazy_gettext('Topic group with such ID is not exist.'))
class TopicWithPollForm(FlaskForm):
title = StringField(lazy_gettext('Title'), validators=[DataRequired(), Length(0, 128)])
group_id = IntegerField(lazy_gettext('Topic group ID'), validators=[InputRequired()])
body = TextAreaField(lazy_gettext('Text'), validators=[DataRequired()], render_kw={'rows': 20})
poll_question = StringField(lazy_gettext('Poll question'), validators=[DataRequired(), Length(0, 256)])
poll_answers = TextAreaField(lazy_gettext('Poll answers'), validators=[DataRequired()], render_kw={'rows': 10})
submit = SubmitField(lazy_gettext('Save'))
cancel = SubmitField(lazy_gettext('Cancel'))
delete = SubmitField(lazy_gettext('Delete'))
def remove_edit_fields(self):
del self.group_id
del self.delete
def validate_group_id(self, field):
if not TopicGroup.query.filter_by(id=field.data).first():
raise ValidationError(lazy_gettext('Topic group with such ID is not exist.'))
class TopicGroupForm(FlaskForm):
title = StringField(lazy_gettext('Title'), validators=[DataRequired(), Length(0, 64)])
group_id = IntegerField(lazy_gettext('Parent topic group ID'), validators=[InputRequired()])
priority = SelectField(lazy_gettext('Priority'), coerce=int)
protected = BooleanField(lazy_gettext('Moderators only'))
submit = SubmitField(lazy_gettext('Save'))
cancel = SubmitField(lazy_gettext('Cancel'))
delete = SubmitField(lazy_gettext('Delete'))
def __init__(self, *args, **kwargs):
super(TopicGroupForm, self).__init__(*args, **kwargs)
self.priority.choices = [(p, p) for p in current_app.config['TOPIC_GROUP_PRIORITY']]
def remove_edit_fields(self):
del self.group_id
del self.delete
def validate_group_id(self, field):
if not TopicGroup.query.filter_by(id=field.data).first():
raise ValidationError(lazy_gettext('Topic group with such ID is not exist.'))
class CommentForm(FlaskForm):
body = TextAreaField(lazy_gettext('Leave your comment, {username}:'), validators=[DataRequired()],
render_kw={'rows': 4})
submit = SubmitField(lazy_gettext('Submit'))
def __init__(self, user, *args, **kwargs):
super(CommentForm, self).__init__(*args, **kwargs)
self.body.label.text = self.body.label.text.format(username=user.username)
class CommentEditForm(FlaskForm):
body = TextAreaField(lazy_gettext('Text'), validators=[DataRequired()], render_kw={'rows': 8})
submit = SubmitField(lazy_gettext('Save'))
cancel = SubmitField(lazy_gettext('Cancel'))
delete = SubmitField(lazy_gettext('Delete'))
class MessageReplyForm(FlaskForm):
title = StringField(lazy_gettext('Subject'), validators=[DataRequired(), Length(0, 128)])
body = TextAreaField(lazy_gettext('Text'), validators=[DataRequired()], render_kw={'rows': 4})
send = SubmitField(lazy_gettext('Send'))
close = SubmitField(lazy_gettext('Close'))
delete = SubmitField(lazy_gettext('Delete'))
class MessageSendForm(FlaskForm):
title = StringField(lazy_gettext('Subject'), validators=[DataRequired(), Length(0, 128)])
body = TextAreaField(lazy_gettext('Text'), validators=[DataRequired()], render_kw={'rows': 4})
send = SubmitField(lazy_gettext('Send'))
cancel = SubmitField(lazy_gettext('Cancel'))
class SearchForm(FlaskForm):
text = StringField('', validators=[DataRequired(), Length(1, 64)])
search = SubmitField(lazy_gettext('Search'))
| [
"[email protected]"
] | |
eef750f84f81a27c35f5f451faf9e9a1b93c1cc4 | 4c117ea3617a576ddd07d8ea8aaab1a925fc402f | /bin/individualization/VennPlot.py | 18f444e66a82a4f9f64427b83e18f591f031b0f6 | [] | no_license | 452990729/Rep-seq | 7be6058ba3284bea81282f2db7fd3bd7895173ba | e217b115791e0aba064b2426e4502a5c1b032a94 | refs/heads/master | 2021-12-11T14:27:46.912144 | 2019-06-04T03:49:40 | 2019-06-04T03:49:40 | 190,124,555 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,549 | py | #!/usr/bin/env python
import os
import sys
import re
import matplotlib
matplotlib.use('Agg')
import venn
from matplotlib import pyplot as plt
def HandleFq(file_in):
base = '_'.join(re.split('_', os.path.basename(file_in))[:2])
list_tmp = []
m = 0
with open(file_in, 'r') as in1:
for line in in1:
m += 1
if m%4 == 2:
list_tmp.append(line.strip())
return set(list_tmp), base
def ReadTab(file_in):
list_tmp = []
label = '_'.join(re.split('_', os.path.basename(file_in))[:2])
with open(file_in, 'r') as in1:
for line in in1.readlines()[1:]:
list_tmp.append(re.split('\t', line.strip())[36])
return set(list_tmp), label
def main():
len_arg = len(sys.argv)
if sys.argv[1] == 'fastq':
func = HandleFq
elif sys.argv[1] == 'tab':
func = ReadTab
list_l = []
list_lb = []
for i in range(len_arg-2):
l, lb = func(sys.argv[i+2])
list_l.append(l)
list_lb.append(lb)
labels = venn.get_labels(list_l, fill=['number',])
if len_arg == 4:
fig, ax = venn.venn2(labels, names=list_lb)
elif len_arg == 5:
fig, ax = venn.venn3(labels, names=list_lb)
elif len_arg == 6:
fig, ax = venn.venn4(labels, names=list_lb)
elif len_arg == 7:
fig, ax = venn.venn5(labels, names=list_lb)
elif len_arg == 8:
fig, ax = venn.venn6(labels, names=list_lb)
plt.savefig('{}wayvenn.png'.format(str(len_arg-2)))
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
23206587aae4835dbc893edeaad63d67170d75c3 | 23e877d2e65cdc49cf9a456845470f97194674bc | /src/main/resources/http/http_request.py | e9a3e1cdc87380b5ff871b18466c069841a84cdd | [
"MIT"
] | permissive | xebialabs-community/xld-github-dynamic-dictionaries-plugin | 77da6a4fea1ca2b96207d77b0396011e088ac850 | 67c3a596f4a7f58f9d0a939bb57091d1f82c51ee | refs/heads/master | 2021-07-13T17:15:15.222551 | 2020-11-02T12:49:14 | 2020-11-02T12:49:14 | 68,606,897 | 2 | 2 | MIT | 2021-03-26T22:14:23 | 2016-09-19T13:09:01 | Python | UTF-8 | Python | false | false | 9,826 | py | #
# Copyright 2020 XEBIALABS
#
# Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
import re
import urllib
from java.lang import String
from org.apache.commons.codec.binary import Base64
from org.apache.http import HttpHost
from org.apache.http.client.config import RequestConfig
from org.apache.http.client.methods import HttpGet, HttpPost, HttpPut, HttpDelete
from org.apache.http.util import EntityUtils
from org.apache.http.impl.client import HttpClients
from http.http_response import HttpResponse
class HttpRequest:
def __init__(self, params, username = None, password = None):
"""
Builds an HttpRequest
:param params: an HttpConnection
:param username: the username
(optional, it will override the credentials defined on the HttpConnection object)
:param password: an password
(optional, it will override the credentials defined on the HttpConnection object)
"""
self.params = params
self.username = username
self.password = password
def do_request(self, **options):
"""
Performs an HTTP Request
:param options: A keyword arguments object with the following properties :
method: the HTTP method : 'GET', 'PUT', 'POST', 'DELETE'
(optional: GET will be used if empty)
context: the context url
(optional: the url on HttpConnection will be used if empty)
body: the body of the HTTP request for PUT & POST calls
(optional: an empty body will be used if empty)
contentType: the content type to use
(optional, no content type will be used if empty)
headers: a dictionary of headers key/values
(optional, no headers will be used if empty)
:return: an HttpResponse instance
"""
request = self.build_request(
options.get('method', 'GET'),
options.get('context', ''),
options.get('entity', ''),
options.get('contentType', None),
options.get('headers', None))
return self.execute_request(request)
def do_request_without_headers(self, **options):
"""
Performs an HTTP Request
:param options: A keyword arguments object with the following properties :
method: the HTTP method : 'GET', 'PUT', 'POST', 'DELETE'
(optional: GET will be used if empty)
context: the context url
(optional: the url on HttpConnection will be used if empty)
body: the body of the HTTP request for PUT & POST calls
(optional: an empty body will be used if empty)
contentType: the content type to use
(optional, no content type will be used if empty)
headers: a dictionary of headers key/values
(optional, no headers will be used if empty)
:return: an HttpResponse instance
"""
request = self.build_request_without_headers(
options.get('method', 'GET'),
options.get('context', ''),
options.get('entity', ''))
return self.execute_request(request)
def get(self, context, **options):
"""
Performs an Http GET Request
:param context: the context url
:param options: the options keyword argument described in do_request()
:return: an HttpResponse instance
"""
options['method'] = 'GET'
options['context'] = context
return self.do_request(**options)
def put(self, context, entity, **options):
"""
Performs an Http PUT Request
:param context: the context url
:param body: the body of the HTTP request
:param options: the options keyword argument described in do_request()
:return: an HttpResponse instance
"""
options['method'] = 'PUT'
options['context'] = context
options['entity'] = entity
return self.do_request(**options)
def post(self, context, entity, **options):
"""
Performs an Http POST Request
:param context: the context url
:param body: the body of the HTTP request
:param options: the options keyword argument described in do_request()
:return: an HttpResponse instance
"""
options['method'] = 'POST'
options['context'] = context
options['entity'] = entity
return self.do_request(**options)
def post_without_headers(self, context, entity, **options):
"""
Performs an Http POST Request
:param context: the context url
:param body: the body of the HTTP request
:param options: the options keyword argument described in do_request()
:return: an HttpResponse instance
"""
options['method'] = 'POST'
options['context'] = context
options['entity'] = entity
return self.do_request_without_headers(**options)
def delete(self, context, **options):
"""
Performs an Http DELETE Request
:param context: the context url
:param options: the options keyword argument described in do_request()
:return: an HttpResponse instance
"""
options['method'] = 'DELETE'
options['context'] = context
return self.do_request(**options)
def build_request(self, method, context, entity, contentType, headers):
url = self.quote(self.create_path(self.params.getUrl(), context))
method = method.upper()
if method == 'GET':
request = HttpGet(url)
elif method == 'POST':
request = HttpPost(url)
request.setEntity(entity)
elif method == 'PUT':
request = HttpPut(url)
request.setEntity(entity)
elif method == 'DELETE':
request = HttpDelete(url)
else:
raise Exception('Unsupported method: ' + method)
request.addHeader('Content-Type', contentType)
request.addHeader('Accept', contentType)
self.set_credentials(request)
self.set_proxy(request)
self.setHeaders(request, headers)
return request
def build_request_without_headers(self, method, context, entity):
url = self.quote(self.create_path(self.params.getUrl(), context))
method = method.upper()
if method == 'GET':
request = HttpGet(url)
elif method == 'POST':
request = HttpPost(url)
request.setEntity(entity)
elif method == 'PUT':
request = HttpPut(url)
request.setEntity(entity)
elif method == 'DELETE':
request = HttpDelete(url)
else:
raise Exception('Unsupported method: ' + method)
self.set_credentials(request)
self.set_proxy(request)
return request
def create_path(self, url, context):
url = re.sub('/*$', '', url)
if context is None:
return url
elif context.startswith('/'):
return url + context
else:
return url + '/' + context
def quote(self, url):
return urllib.quote(url, ':/?&=%')
def set_credentials(self, request):
if self.username:
username = self.username
password = self.password
elif self.params.getUsername():
username = self.params.getUsername()
password = self.params.getPassword()
else:
return
encoding = Base64.encodeBase64String(String(username + ':' + password).getBytes())
request.addHeader('Authorization', 'Basic ' + encoding)
def set_proxy(self, request):
if not self.params.getProxyHost():
return
proxy = HttpHost(self.params.getProxyHost(), int(self.params.getProxyPort()))
config = RequestConfig.custom().setProxy(proxy).build()
request.setConfig(config)
def setHeaders(self, request, headers):
if headers:
for key in headers:
request.setHeader(key, headers[key])
def execute_request(self, request):
client = None
response = None
try:
client = HttpClients.createDefault()
response = client.execute(request)
status = response.getStatusLine().getStatusCode()
entity = response.getEntity()
result = EntityUtils.toString(entity, "UTF-8") if entity else None
headers = response.getAllHeaders()
EntityUtils.consume(entity)
return HttpResponse(status, result, headers)
finally:
if response:
response.close()
if client:
client.close()
| [
"[email protected]"
] | |
c423950c678b966d72c428c4dadd7d1045308bbb | c536c764aab4170c64f3f8b78bd91593dcb161a3 | /vigenereb62.py | 037292215097560084e9451db9c5655b7c2fb996 | [] | no_license | numberly/vigenere-b62 | 63bbc95c1f9390e9623a5882a9c2a14d110851b4 | 3dea3394ee557ba2e589af014cbc4454ebbbc874 | refs/heads/master | 2023-02-16T02:13:31.254670 | 2021-01-11T15:24:58 | 2021-01-11T15:24:58 | 328,698,862 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 535 | py | def iter_reverse_digits(number, base):
while number != 0:
digit = number % base
yield digit
number -= digit
number //= base
def encode(alphabets, seed, size=6):
if len(alphabets) < size:
raise ValueError("There should be an alphabet per character you want")
secret = "".join(
alphabets[i][digit]
for i, digit in enumerate(iter_reverse_digits(seed, len(alphabets[0])))
)
secret += "".join(alphabets[i][0] for i in range(len(secret), size))
return secret
| [
"[email protected]"
] | |
705c2db27a5d0906938b557caab4e18133150a24 | 19ac1d0131a14ba218fd2c55d585170222eb9400 | /social_login/wsgi.py | 9523f947cda705e24cea5e1c828e7fb9ee17044c | [] | no_license | oereo/social-login | 4ed27658c585dd0a24484e628e053070fe012518 | 41e67b889354189c986da45bcf03c20c1f1063e3 | refs/heads/master | 2023-01-15T22:38:06.667909 | 2020-11-22T12:12:08 | 2020-11-22T12:12:08 | 303,985,281 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 401 | py | """
WSGI config for social_login project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'social_login.settings')
application = get_wsgi_application()
| [
"[email protected]"
] | |
1bff440e67a7189665b42fe0833a0c9b007950e7 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_defenders.py | bb7548df4efbbe4fec4aeb39f3eec118e52a2ba7 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 252 | py |
from xai.brain.wordbase.nouns._defender import _DEFENDER
#calss header
class _DEFENDERS(_DEFENDER, ):
def __init__(self,):
_DEFENDER.__init__(self)
self.name = "DEFENDERS"
self.specie = 'nouns'
self.basic = "defender"
self.jsondata = {}
| [
"[email protected]"
] | |
8339c4b6670fe18b61771e18903739838373f58c | 01ce2eec1fbad3fb2d98085ebfa9f27c7efb4152 | /itertools/itertools-combinations.py | b32166fe2a76aece52bb636b0b8705a63f17c3ce | [
"MIT"
] | permissive | anishLearnsToCode/hackerrank-python | 4cfeaf85e33f05342df887896fa60dae5cc600a5 | 7d707c07af051e7b00471ebe547effd7e1d6d9d9 | refs/heads/master | 2023-01-01T23:39:01.143328 | 2020-11-01T07:31:15 | 2020-11-01T07:31:15 | 265,767,347 | 8 | 4 | null | null | null | null | UTF-8 | Python | false | false | 252 | py | # https://www.hackerrank.com/challenges/itertools-combinations/problem
import itertools
line = input().split()
word = sorted(line[0])
k = int(line[1])
for i in range(1, k + 1):
for j in itertools.combinations(word, i):
print(''.join(j))
| [
"[email protected]"
] | |
e4603076015ad9b338c87de21b02807faa509853 | 91948d5be26636f1f2b941cb933701ea626a695b | /amazon_longest_substring_with_no_repeat.py | 30208e55e14fb6ba9b3eabe03ddda30851bc6a3b | [
"MIT"
] | permissive | loghmanb/daily-coding-problem | 4ae7dd201fde5ee1601e0acae9e9fc468dcd75c9 | b2055dded4276611e0e7f1eb088e0027f603aa7b | refs/heads/master | 2023-08-14T05:53:12.678760 | 2023-08-05T18:12:38 | 2023-08-05T18:12:38 | 212,894,228 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,709 | py | '''
Longest Substring Without Repeat
Asked in: Amazon
https://www.interviewbit.com/problems/longest-substring-without-repeat/
Given a string,
find the length of the longest substring without repeating characters.
Example:
The longest substring without repeating letters for "abcabcbb" is "abc", which the length is 3.
For "bbbbb" the longest substring is "b", with the length of 1.
'''
# @param A : string
# @return an integer
def lengthOfLongestSubstring(A):
if not A: return 0
result = 0
letters = set()
N = len(A)
i = j = 0
while i<N and j<N:
if A[j] in letters:
letters.remove(A[i])
i += 1
else:
letters.add(A[j])
j += 1
result = max(result, j-i)
return result
if __name__ == "__main__":
data = [
['abcabcbb', 3],
['Wnb9z9dMc7E8v1RTUaZPoDNIAXRlzkqLaa97KMWLzbitaCkRpiE4J4hJWhRcGnC8H6mwasgDfZ76VKdXhvEYmYrZY4Cfmf4HoSlchYWFEb1xllGKyEEmZOLPh1V6RuM7Mxd7xK72aNrWS4MEaUmgEn7L4rW3o14Nq9l2EN4HH6uJWljI8a5irvuODHY7A7ku4PJY2anSWnfJJE1w8p12Ks3oZRxAF3atqGBlzVQ0gltOwYmeynttUmQ4QBDLDrS4zn4VRZLosOITo4JlIqPD6t4NjhHThOjJxpMp9fICkrgJeGiDAwsb8a3I7Txz5BBKV9bEfMsKNhCuY3W0ZHqY0MhBfz1CbYCzwZZdM4p65ppP9s5QJcfjadmMMi26JKz0TVVwvNA8LP5Vi1QsxId4SI19jfcUH97wmZu0pbw1zFtyJ8GAp5yjjQTzFIboC1iRzklnOJzJld9TMaxqvBNBJKIyDjWrdfLOY8FGMOcPhfJ97Dph35zfxYyUf4DIqFi94lm9J0skYqGz9JT0kiAABQZDazZcNi80dSSdveSl6h3dJjHmlK8qHIlDsqFd5FMhlEirax8WA0v3NDPT8vPhwKpxcnVeu14Gcxr3h1wAXXV0y7Xy9qqB2NQ5HQLJ7cyXAckEYHsLCPSy28xcdNJatx1KLWohOQado4WywJbGvsFR17rKmvOPABweXnFD3odrbSMD4Na4nuBBswvMmFRTUOcf7jZi4z5JnJqXz6hitaPnaEtjoSEBq82a52nvqYy7hhldBoxen2et2OMadVEHeTYLL7GLsIhTP6UizHIuzcJMljo4lFgW5AyrfUlIBPAlhwaSiJtTvcbVZynDSM6RO1PqFKWKg2MHIgNhjuzENg2oFCfW7z5KJvEL9qWqKzZNc0o3BMRjS04NCHFvhtsteQoQRgz84XZBHBJRdekCdcVVXu9c01gYRAz7oIAxN3zKZb64EFKssfQ4HW971jv3H7x5E9dAszA0HrKTONyZDGYtHWt4QLhNsIs8mo4AIN7ecFKewyvGECAnaJpDn1MTTS4yTgZnm6N6qnmfjVt6ZU51F9BxH0jVG0kovTGSjTUkmb1mRTLQE5mTlVHcEz3yBOh4WiFFJjKJdi1HBIBaDL4r45HzaBvmYJPlWIomkqKEmQ4rLAbYG7C5rFfpMu8rHvjU7hP0JVvteGtaGn7mqeKsn7CgrJX1tb8t0ldaS3iUy8SEKAo5IZHNKOfEaij3nI4oRVzeVOZsH91pMsA4jRYgEohubPW8ciXwVrFi1qEWjvB8gfalyP60n1fHyjsiLW0T5uY1JzQWHKCbLVh7QFoJFAEV0L516XmzIo556yRH1vhPnceOCjebqgsmO78AQ8Ir2d4pHFFHAGB9lESn3OtJye1Lcyq9D6X93UakA3JKVKEt6JZDLVBMp4msOefkPKSw59Uix9d9kOQm8WCepJTangdNSOKaxblZDNJ5eHvEroYacBhd9UdafEitdF3nfStF7AhkSfQVC61YWWkKTNdx96OoJGTnxuqt4oFZNFtO7aMuN3IJAkw3m3kgZFRGyd3D3wweagNL9XlYtvZwejbjpkDOZz33C0jbEWaMEaUPw6BG49XqyQoUwtriguO0yvWyaJqD4ye3o0E46huKYAsdKAq6MLWMxF6tfyPVaoqOGd0eOBHbAF89XXmDd4AIkoFPXkAOW8hln5nXnIWP6RBbfEkPPbxoToMbV', 27]
]
for d in data:
print('input', d[0], lengthOfLongestSubstring(d[0])) | [
"[email protected]"
] | |
5722c5bd79ba59802f5e4174de590823f9b31f54 | 6b2a8dd202fdce77c971c412717e305e1caaac51 | /solutions_5631989306621952_1/Python/Hotshot8325/Q2.py | c61b1a46284a8ff8a0e7daff7477923bbd7b7f0f | [] | no_license | alexandraback/datacollection | 0bc67a9ace00abbc843f4912562f3a064992e0e9 | 076a7bc7693f3abf07bfdbdac838cb4ef65ccfcf | refs/heads/master | 2021-01-24T18:27:24.417992 | 2017-05-23T09:23:38 | 2017-05-23T09:23:38 | 84,313,442 | 2 | 4 | null | null | null | null | UTF-8 | Python | false | false | 968 | py | #CodeJam pancake problem
import csv
import string
#import data from test file in the form [[[],[]],[[],[]].... with [[],[]] being one test case
with open('a-large.in') as csvfile:
testCase = csv.reader(csvfile, delimiter = ' ', quotechar='|')
rowNum = 0
inputText = []
#swapCount = []
for row in testCase:
#row = [str(i) for i in row]
if rowNum == 0:
numTestCases = int(row[0])
else:
inputText.append(row)
rowNum = rowNum + 1
for i in range(0,numTestCases):
letterInput = inputText[i][0]
lastWord = letterInput[0]
for j in range(1,len(letterInput)):
if string.uppercase.index(letterInput[j])>=string.uppercase.index(lastWord[0]):
lastWord = letterInput[j]+lastWord
else:
lastWord = lastWord +letterInput[j]
print "Case #"+str(i+1)+": "+lastWord
| [
"[email protected]"
] | |
566302b568f0103bd3c6c2d54e6988ac6dd06f4b | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /JD9vSKZGrxQhLbA9r_11.py | 8153c6d8cc99992256ea1d82f8771cd6328f44f3 | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 255 | py |
def pile_of_cubes(m):
if m >= 10252519345963644753026: return None
x = m**0.5
if (x%1==0):
c = 1
while (x != c and x > 0):
x = x - c
c = c + 1
if (x == c):
return c
return None
| [
"[email protected]"
] | |
05f1c23936d977e70fdef1e44fc27ab9f069cadf | 55647a80c8b412af9df0ba3f50595cc2f29c25e6 | /res/scripts/common/Lib/encodings/gbk.py | 4b4a46dcbfdea9c2f98724c76a52405e54febf9c | [] | no_license | cnsuhao/WOT-0.9.17-CT | 0035eb6070fb4fab8d8ee9f8bbc676c10d511cfb | d1f932d8cabaf8aa21708622e87f83c8d24d6451 | refs/heads/master | 2021-06-08T18:11:07.039293 | 2016-11-19T19:12:37 | 2016-11-19T19:12:37 | null | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 1,136 | py | # 2016.11.19 19:58:56 Střední Evropa (běžný čas)
# Embedded file name: scripts/common/Lib/encodings/gbk.py
import _codecs_cn, codecs
import _multibytecodec as mbc
codec = _codecs_cn.getcodec('gbk')
class Codec(codecs.Codec):
encode = codec.encode
decode = codec.decode
class IncrementalEncoder(mbc.MultibyteIncrementalEncoder, codecs.IncrementalEncoder):
codec = codec
class IncrementalDecoder(mbc.MultibyteIncrementalDecoder, codecs.IncrementalDecoder):
codec = codec
class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
codec = codec
class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
codec = codec
def getregentry():
return codecs.CodecInfo(name='gbk', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter)
# okay decompyling c:\Users\PC\wotsources\files\originals\res\scripts\common\Lib\encodings\gbk.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2016.11.19 19:58:56 Střední Evropa (běžný čas)
| [
"[email protected]"
] | |
cd75f26df497e0e47746786f0197f8dc9b218f06 | 930c207e245c320b108e9699bbbb036260a36d6a | /BRICK-RDFAlchemy/generatedCode/brick/brickschema/org/schema/_1_0_2/Brick/FCU_Return_Air_Temperature_Sensor.py | d4ac39c9698a57051d03037b2f79dc41b5511c4b | [] | no_license | InnovationSE/BRICK-Generated-By-OLGA | 24d278f543471e1ce622f5f45d9e305790181fff | 7874dfa450a8a2b6a6f9927c0f91f9c7d2abd4d2 | refs/heads/master | 2021-07-01T14:13:11.302860 | 2017-09-21T12:44:17 | 2017-09-21T12:44:17 | 104,251,784 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 457 | py | from rdflib import Namespace, Graph, Literal, RDF, URIRef
from rdfalchemy.rdfSubject import rdfSubject
from rdfalchemy import rdfSingle, rdfMultiple, rdfList
from brick.brickschema.org.schema._1_0_2.Brick.Return_Air_Temperature_Sensor import Return_Air_Temperature_Sensor
class FCU_Return_Air_Temperature_Sensor(Return_Air_Temperature_Sensor):
rdf_type = Namespace('https://brickschema.org/schema/1.0.2/Brick#').FCU_Return_Air_Temperature_Sensor
| [
"[email protected]"
] | |
1158acb79cf822c0ded1ea29f10b77727305c073 | cd142a4e15d3576546fcb44841417039f0b8fb00 | /build/double/catkin_generated/pkg.installspace.context.pc.py | 9b014836f2e3e476722b6c40aa901294660dad37 | [] | no_license | mgou123/rplidar | 4389819eb1998d404d1066c7b4a983972d236ce7 | 608c1f6da2d3e5a8bac06e8d55d8569af828a40b | refs/heads/master | 2022-11-10T05:51:56.403293 | 2020-06-29T04:16:14 | 2020-06-29T04:16:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 393 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "roscpp;std_msgs;sensor_msgs".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "double"
PROJECT_SPACE_DIR = "/home/xu/dogkin_ws/install"
PROJECT_VERSION = "0.0.0"
| [
"[email protected]"
] | |
237743cb29e83580cbade37977253888764a05b4 | f4f54015298eedfbbdfcaaf5e2a9603112f803a5 | /sachin/gocept.filestore-0.3/gocept.filestore-0.3/src/gocept/filestore/tests.py | 39487c46c2cf44f18a2df60610d46b4e1e9848c4 | [] | no_license | raviramawat8/Old_Python_Codes | f61e19bff46856fda230a096aa789c7e54bd97ca | f940aed0611b0636e1a1b6826fa009ceb2473c2b | refs/heads/master | 2020-03-22T22:54:50.964816 | 2018-06-16T01:39:43 | 2018-06-16T01:39:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 335 | py | # Copyright (c) 2007 gocept gmbh & co. kg
# See also LICENSE.txt
# $Id: tests.py 5111 2007-08-30 11:27:23Z zagy $
import unittest
from zope.testing import doctest
def test_suite():
suite = unittest.TestSuite()
suite.addTest(doctest.DocFileSuite(
'README.txt',
optionflags=doctest.ELLIPSIS))
return suite
| [
"[email protected]"
] | |
1eb7d4b356ecdfbafd7359821f946512d7724998 | bc9f66258575dd5c8f36f5ad3d9dfdcb3670897d | /lib/googlecloudsdk/generated_clients/apis/artifactregistry/v1beta2/resources.py | 1c5440583e39b379a1c8a68cde0b2d6841f35146 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | google-cloud-sdk-unofficial/google-cloud-sdk | 05fbb473d629195f25887fc5bfaa712f2cbc0a24 | 392abf004b16203030e6efd2f0af24db7c8d669e | refs/heads/master | 2023-08-31T05:40:41.317697 | 2023-08-23T18:23:16 | 2023-08-23T18:23:16 | 335,182,594 | 9 | 2 | NOASSERTION | 2022-10-29T20:49:13 | 2021-02-02T05:47:30 | Python | UTF-8 | Python | false | false | 3,295 | py | # -*- coding: utf-8 -*- #
# Copyright 2023 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Resource definitions for Cloud Platform Apis generated from apitools."""
import enum
BASE_URL = 'https://artifactregistry.googleapis.com/v1beta2/'
DOCS_URL = 'https://cloud.google.com/artifacts/docs/'
class Collections(enum.Enum):
"""Collections for all supported apis."""
PROJECTS = (
'projects',
'projects/{projectsId}',
{},
['projectsId'],
True
)
PROJECTS_LOCATIONS = (
'projects.locations',
'{+name}',
{
'':
'projects/{projectsId}/locations/{locationsId}',
},
['name'],
True
)
PROJECTS_LOCATIONS_OPERATIONS = (
'projects.locations.operations',
'{+name}',
{
'':
'projects/{projectsId}/locations/{locationsId}/operations/'
'{operationsId}',
},
['name'],
True
)
PROJECTS_LOCATIONS_REPOSITORIES = (
'projects.locations.repositories',
'{+name}',
{
'':
'projects/{projectsId}/locations/{locationsId}/repositories/'
'{repositoriesId}',
},
['name'],
True
)
PROJECTS_LOCATIONS_REPOSITORIES_FILES = (
'projects.locations.repositories.files',
'{+name}',
{
'':
'projects/{projectsId}/locations/{locationsId}/repositories/'
'{repositoriesId}/files/{filesId}',
},
['name'],
True
)
PROJECTS_LOCATIONS_REPOSITORIES_PACKAGES = (
'projects.locations.repositories.packages',
'{+name}',
{
'':
'projects/{projectsId}/locations/{locationsId}/repositories/'
'{repositoriesId}/packages/{packagesId}',
},
['name'],
True
)
PROJECTS_LOCATIONS_REPOSITORIES_PACKAGES_TAGS = (
'projects.locations.repositories.packages.tags',
'{+name}',
{
'':
'projects/{projectsId}/locations/{locationsId}/repositories/'
'{repositoriesId}/packages/{packagesId}/tags/{tagsId}',
},
['name'],
True
)
PROJECTS_LOCATIONS_REPOSITORIES_PACKAGES_VERSIONS = (
'projects.locations.repositories.packages.versions',
'{+name}',
{
'':
'projects/{projectsId}/locations/{locationsId}/repositories/'
'{repositoriesId}/packages/{packagesId}/versions/{versionsId}',
},
['name'],
True
)
def __init__(self, collection_name, path, flat_paths, params,
enable_uri_parsing):
self.collection_name = collection_name
self.path = path
self.flat_paths = flat_paths
self.params = params
self.enable_uri_parsing = enable_uri_parsing
| [
"[email protected]"
] | |
9805ffe4daef50c8bdfe737999913fe9357c8479 | e4da82e4beb9b1af7694fd5b49824a1c53ee59ff | /AutoWorkup/SEMTools/registration/averagebraingenerator.py | b206faa7d7b842adead8675771f35338e6d91db4 | [
"LicenseRef-scancode-warranty-disclaimer"
] | no_license | ipekoguz/BRAINSTools | c8732a9206525adb5779eb0c2ed97f448e2df47f | dc32fa0820a0d0b3bd882fa744e79194c9c137bc | refs/heads/master | 2021-01-18T08:37:03.883250 | 2013-05-14T21:08:33 | 2013-05-14T21:08:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,709 | py | # -*- coding: utf8 -*-
"""Autogenerated file - DO NOT EDIT
If you spot a bug, please report it on the mailing list and/or change the generator."""
from nipype.interfaces.base import CommandLine, CommandLineInputSpec, SEMLikeCommandLine, TraitedSpec, File, Directory, traits, isdefined, InputMultiPath, OutputMultiPath
import os
class AverageBrainGeneratorInputSpec(CommandLineInputSpec):
inputDirectory = File(desc="Image To Warp", exists=True, argstr="--inputDirectory %s")
templateVolume = File(desc="Reference image defining the output space", exists=True, argstr="--templateVolume %s")
resolusion = traits.Str(desc="The resolusion.", argstr="--resolusion %s")
iteration = traits.Str(desc="The iteration.", argstr="--iteration %s")
pixelType = traits.Enum("uchar", "short", "ushort", "int", "uint", "float", desc="Specifies the pixel type for the input/output images", argstr="--pixelType %s")
outputVolume = traits.Either(traits.Bool, File(), hash_files=False, desc="Resulting deformed image", argstr="--outputVolume %s")
class AverageBrainGeneratorOutputSpec(TraitedSpec):
outputVolume = File(desc="Resulting deformed image", exists=True)
class AverageBrainGenerator(SEMLikeCommandLine):
"""title: Average Brain Generator
category: Registration
description:
This programs creates synthesized average brain.
version: 0.1
documentation-url: http:://mri.radiology.uiowa.edu/mriwiki
license: NEED TO ADD
contributor: This tool was developed by Yongqiang Zhao.
"""
input_spec = AverageBrainGeneratorInputSpec
output_spec = AverageBrainGeneratorOutputSpec
_cmd = " AverageBrainGenerator "
_outputs_filenames = {'outputVolume':'outputVolume'}
| [
"[email protected]"
] | |
4ff8a625e52e7a2fc0f40fd40fdb70a36086c6e2 | ad13583673551857615498b9605d9dcab63bb2c3 | /output/instances/sunData/SType/ST_facets/ST_facets00201m/ST_facets00201m9_p.py | 6b09bb1b8dd9512268b76bbd79e2c658e0d3fc7d | [
"MIT"
] | permissive | tefra/xsdata-w3c-tests | 397180205a735b06170aa188f1f39451d2089815 | 081d0908382a0e0b29c8ee9caca6f1c0e36dd6db | refs/heads/main | 2023-08-03T04:25:37.841917 | 2023-07-29T17:10:13 | 2023-07-30T12:11:13 | 239,622,251 | 2 | 0 | MIT | 2023-07-25T14:19:04 | 2020-02-10T21:59:47 | Python | UTF-8 | Python | false | false | 139 | py | from output.models.sun_data.stype.st_facets.st_facets00201m.st_facets00201m9_xsd.st_facets00201m9 import Test
obj = Test(
value=10
)
| [
"[email protected]"
] | |
8927c9bfdeb3e5161e03c5bbfb20291758317781 | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2367/60791/254956.py | 3891197249694bfc95edf61b7fdb4f59e0c7209d | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 127 | py | k = int(input())
n = '1'
if(k%2==0 or k%5==0):
print(-1)
else:
while(int(n)%k != 0):
n += '1'
print(len(n)) | [
"[email protected]"
] | |
e6dfd9cb391b1dc09795b1911c78d7980a0ff1ee | b7f45072d056b80ed49e6bcde91877d8576e970d | /ImageJ/py/Wayne-blob-example.py | 610a35e6e5ddb80455ce608015ed6b1efdfc7ff2 | [] | no_license | jrminter/tips | 128a18ee55655a13085c174d532c77bcea412754 | f48f8b202f8bf9e36cb6d487a23208371c79718e | refs/heads/master | 2022-06-14T08:46:28.972743 | 2022-05-30T19:29:28 | 2022-05-30T19:29:28 | 11,463,325 | 5 | 8 | null | 2019-12-18T16:24:02 | 2013-07-17T00:16:43 | Jupyter Notebook | UTF-8 | Python | false | false | 321 | py | from org.python.core import codecs
codecs.setDefaultEncoding('utf-8')
import os
from ij import IJ, WindowManager
IJ.run("Close All")
img = IJ.openImage("http://wsr.imagej.net/images/blobs.gif")
IJ.setAutoThreshold(img, "Default")
IJ.run(img, "Analyze Particles...", " show=[Bare Outlines] include in_situ")
img.show()
| [
"[email protected]"
] | |
014cbf61158fb280b11d2f149b026f48d5234c0e | 2e2a54e30f8c8018fe0d163a5fd4b0d854ef165d | /src/gluonts/torch/model/deep_npts/_network.py | c29d1935c3d32e884ec124b33fde866e0b55aa92 | [
"Apache-2.0"
] | permissive | kashif/gluon-ts | b742021ca0292ca2885b3b079150f24cdf3e6dec | a818f69dc049c1c1d57e09d2ccb8b5f7a0cff656 | refs/heads/master | 2023-09-05T00:00:22.861992 | 2023-08-09T15:47:28 | 2023-08-09T15:47:28 | 222,552,468 | 5 | 0 | null | 2019-11-18T21:56:52 | 2019-11-18T21:56:52 | null | UTF-8 | Python | false | false | 14,377 | py | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License.
# A copy of the License is located at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# or in the "license" file accompanying this file. This file is distributed
# on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
# express or implied. See the License for the specific language governing
# permissions and limitations under the License.
from functools import partial
from typing import Optional, Callable, List, Union
import torch
from torch import nn
from torch.distributions import (
Categorical,
MixtureSameFamily,
Normal,
)
from gluonts.core.component import validated
from gluonts.torch.distributions import DiscreteDistribution
from .scaling import (
min_max_scaling,
standard_normal_scaling,
)
INPUT_SCALING_MAP = {
"min_max_scaling": partial(min_max_scaling, dim=1, keepdim=True),
"standard_normal_scaling": partial(
standard_normal_scaling, dim=1, keepdim=True
),
}
def init_weights(module: nn.Module, scale: float = 1.0):
if type(module) == nn.Linear:
nn.init.uniform_(module.weight, -scale, scale)
nn.init.zeros_(module.bias)
class FeatureEmbedder(nn.Module):
"""Creates a feature embedding for the static categorical features."""
@validated()
def __init__(
self,
cardinalities: List[int],
embedding_dimensions: List[int],
):
super().__init__()
assert (
len(cardinalities) > 0
), "Length of `cardinalities` list must be greater than zero"
assert len(cardinalities) == len(
embedding_dimensions
), "Length of `embedding_dims` and `embedding_dims` should match"
assert all(
[c > 0 for c in cardinalities]
), "Elements of `cardinalities` should be > 0"
assert all(
[d > 0 for d in embedding_dimensions]
), "Elements of `embedding_dims` should be > 0"
self.embedders = [
torch.nn.Embedding(num_embeddings=card, embedding_dim=dim)
for card, dim in zip(cardinalities, embedding_dimensions)
]
for embedder in self.embedders:
embedder.apply(init_weights)
def forward(self, features: torch.Tensor):
"""
Parameters
----------
features
Input features to the model, shape: (-1, num_features).
Returns
-------
torch.Tensor
Embedding, shape: (-1, sum(self.embedding_dimensions)).
"""
embedded_features = torch.cat(
[
embedder(features[:, i].long())
for i, embedder in enumerate(self.embedders)
],
dim=-1,
)
return embedded_features
class DeepNPTSNetwork(nn.Module):
"""Base class implementing a simple feed-forward neural network that takes
in static and dynamic features and produces `num_hidden_nodes` independent
outputs. These outputs are then used by derived classes to construct the
forecast distribution for a single time step.
Note that the dynamic features are just treated as independent features
without considering their temporal nature.
"""
@validated()
def __init__(
self,
context_length: int,
num_hidden_nodes: List[int],
cardinality: List[int],
embedding_dimension: List[int],
num_time_features: int,
batch_norm: bool = False,
input_scaling: Optional[Union[Callable, str]] = None,
dropout_rate: float = 0.0,
):
super().__init__()
self.context_length = context_length
self.num_hidden_nodes = num_hidden_nodes
self.batch_norm = batch_norm
self.input_scaling = (
INPUT_SCALING_MAP[input_scaling]
if isinstance(input_scaling, str)
else input_scaling
)
self.dropout_rate = dropout_rate
# Embedding for categorical features
self.embedder = FeatureEmbedder(
cardinalities=cardinality, embedding_dimensions=embedding_dimension
)
total_embedding_dim = sum(embedding_dimension)
# We have two target related features: past_target and observed value
# indicator each of length `context_length`.
# Also, +1 for the static real feature.
dimensions = [
context_length * (num_time_features + 2) + total_embedding_dim + 1
] + num_hidden_nodes
modules: List[nn.Module] = []
for in_features, out_features in zip(dimensions[:-1], dimensions[1:]):
modules += [nn.Linear(in_features, out_features), nn.ReLU()]
if self.batch_norm:
modules.append(nn.BatchNorm1d(out_features))
if self.dropout_rate > 0:
modules.append(nn.Dropout(self.dropout_rate))
self.model = nn.Sequential(*modules)
self.model.apply(partial(init_weights, scale=0.07))
# TODO: Handle missing values using the observed value indicator.
def forward(
self,
feat_static_cat: torch.Tensor,
feat_static_real: torch.Tensor,
past_target: torch.Tensor,
past_observed_values: torch.Tensor,
past_time_feat: torch.Tensor,
):
"""
Parameters
----------
feat_static_cat
Shape (-1, num_features).
feat_static_real
Shape (-1, num_features).
past_target
Shape (-1, context_length).
past_observed_values
Shape (-1, context_length).
past_time_feat
Shape (-1, context_length, self.num_time_features).
"""
x = past_target
if self.input_scaling:
loc, scale = self.input_scaling(x)
x_scaled = (x - loc) / scale
else:
x_scaled = x
embedded_cat = self.embedder(feat_static_cat)
static_feat = torch.cat(
(embedded_cat, torch.tensor(feat_static_real)),
dim=1,
)
time_features = torch.cat(
[
x_scaled.unsqueeze(dim=-1),
past_observed_values.unsqueeze(dim=-1),
past_time_feat,
],
dim=-1,
)
features = torch.cat(
[
time_features.reshape(time_features.shape[0], -1),
static_feat,
],
dim=-1,
)
return self.model(features)
class DeepNPTSNetworkDiscrete(DeepNPTSNetwork):
"""
Extends `DeepNTPSNetwork` by implementing the output layer which
converts the outputs from the base network into probabilities of length
`context_length`. These probabilities together with the past values in the
context window constitute the one-step-ahead forecast distribution.
Specifically, the forecast is always one of the values observed in the
context window with the corresponding predicted probability.
Parameters
----------
*args
Arguments to ``DeepNPTSNetwork``.
use_softmax
Flag indicating whether to use softmax or normalization for
converting the outputs of the base network to probabilities.
kwargs
Keyword arguments to ``DeepNPTSNetwork``.
"""
@validated()
def __init__(self, *args, use_softmax: bool = False, **kwargs):
super().__init__(*args, **kwargs)
self.use_softmax = use_softmax
modules: List[nn.Module] = (
[] if self.dropout_rate > 0 else [nn.Dropout(self.dropout_rate)]
)
modules.append(
nn.Linear(self.num_hidden_nodes[-1], self.context_length)
)
self.output_layer = nn.Sequential(*modules)
self.output_layer.apply(init_weights)
def forward(
self,
feat_static_cat: torch.Tensor,
feat_static_real: torch.Tensor,
past_target: torch.Tensor,
past_observed_values: torch.Tensor,
past_time_feat: torch.Tensor,
) -> DiscreteDistribution:
h = super().forward(
feat_static_cat=feat_static_cat,
feat_static_real=feat_static_real,
past_target=past_target,
past_observed_values=past_observed_values,
past_time_feat=past_time_feat,
)
outputs = self.output_layer(h)
probs = (
nn.functional.softmax(outputs, dim=1)
if self.use_softmax
else nn.functional.normalize(
nn.functional.softplus(outputs), p=1, dim=1
)
)
return DiscreteDistribution(values=past_target, probs=probs)
class DeepNPTSNetworkSmooth(DeepNPTSNetwork):
"""
Extends `DeepNTPSNetwork` by implementing the output layer which
converts the outputs from the base network into a smoothed mixture
distribution. The components of the mixture are Gaussians centered around
the observations in the context window. The mixing probabilities as well as
the width of the Gaussians are predicted by the network.
This mixture distribution represents the one-step-ahead forecast
distribution. Note that the forecast can contain values not observed in the
context window.
"""
@validated()
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
modules = (
[] if self.dropout_rate > 0 else [nn.Dropout(self.dropout_rate)]
)
modules += [
nn.Linear(self.num_hidden_nodes[-1], self.context_length + 1),
nn.Softplus(),
]
self.output_layer = nn.Sequential(*modules)
self.output_layer.apply(init_weights)
def forward(
self,
feat_static_cat: torch.Tensor,
feat_static_real: torch.Tensor,
past_target: torch.Tensor,
past_observed_values: torch.Tensor,
past_time_feat: torch.Tensor,
) -> MixtureSameFamily:
h = super().forward(
feat_static_cat=feat_static_cat,
feat_static_real=feat_static_real,
past_target=past_target,
past_observed_values=past_observed_values,
past_time_feat=past_time_feat,
)
outputs = self.output_layer(h)
probs = outputs[:, :-1]
kernel_width = outputs[:, -1:]
mix = Categorical(probs)
components = Normal(loc=past_target, scale=kernel_width)
return MixtureSameFamily(
mixture_distribution=mix, component_distribution=components
)
class DeepNPTSMultiStepNetwork(nn.Module):
"""
Implements multi-step prediction given a trained `DeepNPTSNetwork` model
that outputs one-step-ahead forecast distribution.
"""
@validated()
def __init__(
self,
net: DeepNPTSNetwork,
prediction_length: int,
num_parallel_samples: int = 100,
):
super().__init__()
self.net = net
self.prediction_length = prediction_length
self.num_parallel_samples = num_parallel_samples
def forward(
self,
feat_static_cat: torch.Tensor,
feat_static_real: torch.Tensor,
past_target: torch.Tensor,
past_observed_values: torch.Tensor,
past_time_feat: torch.Tensor,
future_time_feat: torch.Tensor,
):
"""Generates samples from the forecast distribution.
Parameters
----------
feat_static_cat
Shape (-1, num_features).
feat_static_real
Shape (-1, num_features).
past_target
Shape (-1, context_length).
past_observed_values
Shape (-1, context_length).
past_time_feat
Shape (-1, context_length, self.num_time_features).
future_time_feat
Shape (-1, prediction_length, self.num_time_features).
Returns
-------
torch.Tensor
Tensor containing samples from the predicted distribution.
Shape is (-1, self.num_parallel_samples, self.prediction_length).
"""
# Blow up the initial `x` by the number of parallel samples required.
# (batch_size * num_parallel_samples, context_length)
past_target = past_target.repeat_interleave(
self.num_parallel_samples, dim=0
)
# Note that gluonts returns empty future_observed_values.
future_observed_values = torch.ones(
(past_observed_values.shape[0], self.prediction_length)
)
observed_values = torch.cat(
[past_observed_values, future_observed_values], dim=1
)
observed_values = observed_values.repeat_interleave(
self.num_parallel_samples, dim=0
)
time_feat = torch.cat([past_time_feat, future_time_feat], dim=1)
time_feat = time_feat.repeat_interleave(
self.num_parallel_samples, dim=0
)
feat_static_cat = feat_static_cat.repeat_interleave(
self.num_parallel_samples, dim=0
)
feat_static_real = feat_static_real.repeat_interleave(
self.num_parallel_samples, dim=0
)
future_samples = []
for t in range(self.prediction_length):
distr = self.net(
feat_static_cat=feat_static_cat,
feat_static_real=feat_static_real,
past_target=past_target,
past_observed_values=observed_values[
:, t : -self.prediction_length + t
],
past_time_feat=time_feat[
:, t : -self.prediction_length + t, :
],
)
samples = distr.sample()
if past_target.dim() != samples.dim():
samples = samples.unsqueeze(dim=-1)
future_samples.append(samples)
past_target = torch.cat([past_target[:, 1:], samples], dim=1)
# (batch_size * num_parallel_samples, prediction_length)
samples_out = torch.stack(future_samples, dim=1)
# (batch_size, num_parallel_samples, prediction_length)
return samples_out.reshape(
-1, self.num_parallel_samples, self.prediction_length
)
| [
"[email protected]"
] | |
fd8cfb47b2d8e17dae6ea7bb6a37a38a95978a58 | ef5f8a1d7b098391b5e5fce57edc83870204fe69 | /albert_model/clue_classifier_utils_char_no_space.py | b1755d70cbfbb75c08b321f41ecb2ab40f4d9ea6 | [
"Apache-2.0"
] | permissive | guome/subchar-transformers | 9829ded6c312adabf481c11ea25a2eaa069a1aaa | 54c3bfb5c197946fa5a8b6ed5524b81284259613 | refs/heads/master | 2022-07-04T16:21:12.589815 | 2020-05-13T12:49:54 | 2020-05-13T12:49:54 | 263,630,138 | 1 | 0 | null | 2020-05-13T12:57:25 | 2020-05-13T12:57:24 | null | UTF-8 | Python | false | false | 21,002 | py | # -*- coding: utf-8 -*-
# @Author: bo.shi
# @Date: 2019-12-01 22:28:41
# @Last Modified by: bo.shi
# @Last Modified time: 2019-12-02 18:36:50
# coding=utf-8
# Copyright 2019 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility functions for GLUE classification tasks."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import json
import csv
import os
import six
import tensorflow as tf
def convert_to_unicode(text):
"""Converts `text` to Unicode (if it's not already), assuming utf-8 input."""
if six.PY3:
if isinstance(text, str):
return text
elif isinstance(text, bytes):
return text.decode("utf-8", "ignore")
else:
raise ValueError("Unsupported string type: %s" % (type(text)))
elif six.PY2:
if isinstance(text, str):
return text.decode("utf-8", "ignore")
elif isinstance(text, unicode):
return text
else:
raise ValueError("Unsupported string type: %s" % (type(text)))
else:
raise ValueError("Not running on Python2 or Python 3?")
class InputExample(object):
"""A single training/test example for simple sequence classification."""
def __init__(self, guid, text_a, text_b=None, label=None):
"""Constructs a InputExample.
Args:
guid: Unique id for the example.
text_a: string. The untokenized text of the first sequence. For single
sequence tasks, only this sequence must be specified.
text_b: (Optional) string. The untokenized text of the second sequence.
Only must be specified for sequence pair tasks.
label: (Optional) string. The label of the example. This should be
specified for train and dev examples, but not for test examples.
"""
self.guid = guid
self.text_a = text_a
self.text_b = text_b
self.label = label
class PaddingInputExample(object):
"""Fake example so the num input examples is a multiple of the batch size.
When running eval/predict on the TPU, we need to pad the number of examples
to be a multiple of the batch size, because the TPU requires a fixed batch
size. The alternative is to drop the last batch, which is bad because it means
the entire output data won't be generated.
We use this class instead of `None` because treating `None` as padding
battches could cause silent errors.
"""
class DataProcessor(object):
"""Base class for data converters for sequence classification data sets."""
def __init__(self, args):
self.args = args
def get_train_examples(self, data_dir):
"""Gets a collection of `InputExample`s for the train set."""
raise NotImplementedError()
def get_dev_examples(self, data_dir):
"""Gets a collection of `InputExample`s for the dev set."""
raise NotImplementedError()
def get_test_examples(self, data_dir):
"""Gets a collection of `InputExample`s for prediction."""
raise NotImplementedError()
def get_labels(self):
"""Gets the list of labels for this data set."""
raise NotImplementedError()
@classmethod
def _read_tsv(cls, input_file, delimiter="\t", quotechar=None):
"""Reads a tab separated value file."""
with tf.gfile.Open(input_file, "r") as f:
reader = csv.reader(f, delimiter=delimiter, quotechar=quotechar)
lines = []
for line in reader:
lines.append(line)
return lines
@classmethod
def _read_txt(cls, input_file):
"""Reads a tab separated value file."""
with tf.gfile.Open(input_file, "r") as f:
reader = f.readlines()
lines = []
for line in reader:
lines.append(line.strip().split("_!_"))
return lines
@classmethod
def _read_json(cls, input_file):
"""Reads a tab separated value file."""
with tf.gfile.Open(input_file, "r") as f:
reader = f.readlines()
lines = []
for line in reader:
lines.append(json.loads(line.strip()))
return lines
class XnliProcessor(DataProcessor):
"""Processor for the XNLI data set."""
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_json(os.path.join(data_dir, "train.json")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_json(os.path.join(data_dir, "dev.json")), "dev")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_json(os.path.join(data_dir, "test.json")), "test")
def _create_examples(self, lines, set_type):
"""See base class."""
examples = []
for (i, line) in enumerate(lines):
guid = "%s-%s" % (set_type, i)
text_a = convert_to_unicode(line['premise'])
text_b = convert_to_unicode(line['hypo'])
label = convert_to_unicode(line['label']) if set_type != 'test' else 'contradiction'
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
def get_labels(self):
"""See base class."""
return ["contradiction", "entailment", "neutral"]
class TnewsProcessor(DataProcessor):
"""Processor for the MRPC data set (GLUE version)."""
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_json(os.path.join(data_dir, "train.json")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_json(os.path.join(data_dir, "dev.json")), "dev")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_json(os.path.join(data_dir, "test.json")), "test")
def get_labels(self):
"""See base class."""
labels = []
for i in range(17):
if i == 5 or i == 11:
continue
labels.append(str(100 + i))
return labels
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
guid = "%s-%s" % (set_type, i)
text_a = line['sentence'].strip()
if hasattr(self.args, "max_sent_length"):
text_a = text_a[: self.args.max_sent_length]
if self.args.do_lower_case:
text_a = text_a.lower()
text_a = convert_to_unicode(text_a)
text_b = None
label = convert_to_unicode(line['label']) if set_type != 'test' else "100"
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class iFLYTEKDataProcessor(DataProcessor):
"""Processor for the iFLYTEKData data set (GLUE version)."""
def __init__(self, args):
super(iFLYTEKDataProcessor, self).__init__(args)
self.args = args
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_json(os.path.join(data_dir, "train.json")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_json(os.path.join(data_dir, "dev.json")), "dev")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_json(os.path.join(data_dir, "test.json")), "test")
def get_labels(self):
"""See base class."""
labels = []
for i in range(119):
labels.append(str(i))
return labels
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
# dict_char2comp = json.load(open("./resources/char2comp.json", "r"))
examples = []
for (i, line) in enumerate(lines):
guid = "%s-%s" % (set_type, i)
text_a = line['sentence'].strip()
if hasattr(self.args, "max_sent_length"):
text_a = text_a[: self.args.max_sent_length]
if self.args.do_lower_case:
text_a = text_a.lower()
# print(text_a)
text_a = convert_to_unicode(text_a)
text_b = None
label = convert_to_unicode(line['label']) if set_type != 'test' else "0"
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
if i < 5:
print(text_a)
print(text_b)
return examples
class ChnSentiCorpDataProcessor(DataProcessor):
"""Processor for the iFLYTEKData data set (GLUE version)."""
def __init__(self, args):
super(ChnSentiCorpDataProcessor, self).__init__(args)
self.args = args
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_json(os.path.join(data_dir, "train.json")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_json(os.path.join(data_dir, "dev.json")), "dev")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_json(os.path.join(data_dir, "test.json")), "test")
def get_labels(self):
"""See base class."""
labels = []
for i in range(2):
labels.append(str(i))
return labels
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
# dict_char2comp = json.load(open("./resources/char2comp.json", "r"))
examples = []
for (i, line) in enumerate(lines):
guid = "%s-%s" % (set_type, i)
text_a = line['sentence'].strip()
if hasattr(self.args, "max_sent_length"):
text_a = text_a[: self.args.max_sent_length]
if self.args.do_lower_case:
text_a = text_a.lower()
# print(text_a)
text_a = convert_to_unicode(text_a)
text_b = None
label = convert_to_unicode(line['label']) if set_type != 'test' else "0"
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
if i < 5:
print(text_a)
print(text_b)
return examples
class LCQMCProcessor(DataProcessor):
"""Processor for the internal data set. sentence pair classification"""
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_json(os.path.join(data_dir, "train.json")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_json(os.path.join(data_dir, "dev.json")), "dev")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_json(os.path.join(data_dir, "test.json")), "test")
def get_labels(self):
"""See base class."""
return ["0", "1"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
guid = "%s-%s" % (set_type, i)
text_a = line['sentence1'].strip()
if hasattr(self.args, "max_sent_length"):
text_a = text_a[: self.args.max_sent_length]
if self.args.do_lower_case:
text_a = text_a.lower()
text_a = convert_to_unicode(text_a)
text_b = line['sentence2'].strip()
if hasattr(self.args, "max_sent_length"):
text_b = text_b[: self.args.max_sent_length]
if self.args.do_lower_case:
text_b = text_b.lower()
text_b = convert_to_unicode(text_b)
label = convert_to_unicode(line['label']) if set_type != 'test' else '0'
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
if i < 5:
print(text_a)
print(text_b)
return examples
class AFQMCProcessor(DataProcessor):
"""Processor for the internal data set. sentence pair classification"""
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_json(os.path.join(data_dir, "train.json")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_json(os.path.join(data_dir, "dev.json")), "dev")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_json(os.path.join(data_dir, "test.json")), "test")
def get_labels(self):
"""See base class."""
return ["0", "1"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
guid = "%s-%s" % (set_type, i)
text_a = convert_to_unicode(line['sentence1'])
text_b = convert_to_unicode(line['sentence2'])
label = convert_to_unicode(line['label']) if set_type != 'test' else '0'
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class CMNLIProcessor(DataProcessor):
"""Processor for the CMNLI data set."""
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples_json(os.path.join(data_dir, "train.json"), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples_json(os.path.join(data_dir, "dev.json"), "dev")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples_json(os.path.join(data_dir, "test.json"), "test")
def get_labels(self):
"""See base class."""
return ["contradiction", "entailment", "neutral"]
def _create_examples_json(self, file_name, set_type):
"""Creates examples for the training and dev sets."""
examples = []
lines = tf.gfile.Open(file_name, "r")
index = 0
for line in lines:
line_obj = json.loads(line)
index = index + 1
guid = "%s-%s" % (set_type, index)
text_a = convert_to_unicode(line_obj["sentence1"])
text_b = convert_to_unicode(line_obj["sentence2"])
label = convert_to_unicode(line_obj["label"]) if set_type != 'test' else 'neutral'
if label != "-":
examples.append(InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class CslProcessor(DataProcessor):
"""Processor for the CSL data set."""
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_json(os.path.join(data_dir, "train.json")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_json(os.path.join(data_dir, "dev.json")), "dev")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_json(os.path.join(data_dir, "test.json")), "test")
def get_labels(self):
"""See base class."""
return ["0", "1"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
guid = "%s-%s" % (set_type, i)
text_a = convert_to_unicode(" ".join(line['keyword']))
text_b = convert_to_unicode(line['abst'])
label = convert_to_unicode(line['label']) if set_type != 'test' else '0'
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=text_b, label=label))
return examples
class WSCProcessor(DataProcessor):
"""Processor for the internal data set. sentence pair classification"""
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_json(os.path.join(data_dir, "train.json")), "train")
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_json(os.path.join(data_dir, "dev.json")), "dev")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_json(os.path.join(data_dir, "test.json")), "test")
def get_labels(self):
"""See base class."""
return ["true", "false"]
def _create_examples(self, lines, set_type):
"""Creates examples for the training and dev sets."""
examples = []
for (i, line) in enumerate(lines):
guid = "%s-%s" % (set_type, i)
text_a = convert_to_unicode(line['text'])
text_a_list = list(text_a)
target = line['target']
query = target['span1_text']
query_idx = target['span1_index']
pronoun = target['span2_text']
pronoun_idx = target['span2_index']
assert text_a[pronoun_idx: (pronoun_idx + len(pronoun))
] == pronoun, "pronoun: {}".format(pronoun)
assert text_a[query_idx: (query_idx + len(query))] == query, "query: {}".format(query)
if pronoun_idx > query_idx:
text_a_list.insert(query_idx, "_")
text_a_list.insert(query_idx + len(query) + 1, "_")
text_a_list.insert(pronoun_idx + 2, "[")
text_a_list.insert(pronoun_idx + len(pronoun) + 2 + 1, "]")
else:
text_a_list.insert(pronoun_idx, "[")
text_a_list.insert(pronoun_idx + len(pronoun) + 1, "]")
text_a_list.insert(query_idx + 2, "_")
text_a_list.insert(query_idx + len(query) + 2 + 1, "_")
text_a = "".join(text_a_list)
if set_type == "test":
label = "true"
else:
label = line['label']
examples.append(
InputExample(guid=guid, text_a=text_a, text_b=None, label=label))
return examples
class COPAProcessor(DataProcessor):
"""Processor for the internal data set. sentence pair classification"""
def __init__(self):
self.language = "zh"
def get_train_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_json(os.path.join(data_dir, "train.json")), "train")
# dev_0827.tsv
def get_dev_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_json(os.path.join(data_dir, "dev.json")), "dev")
def get_test_examples(self, data_dir):
"""See base class."""
return self._create_examples(
self._read_json(os.path.join(data_dir, "test.json")), "test")
def get_labels(self):
"""See base class."""
return ["0", "1"]
@classmethod
def _create_examples_one(self, lines, set_type):
examples = []
for (i, line) in enumerate(lines):
guid1 = "%s-%s" % (set_type, i)
# try:
if line['question'] == 'cause':
text_a = convert_to_unicode(line['premise'] + '原因是什么呢?' + line['choice0'])
text_b = convert_to_unicode(line['premise'] + '原因是什么呢?' + line['choice1'])
else:
text_a = convert_to_unicode(line['premise'] + '造成了什么影响呢?' + line['choice0'])
text_b = convert_to_unicode(line['premise'] + '造成了什么影响呢?' + line['choice1'])
label = convert_to_unicode(str(1 if line['label'] == 0 else 0)) if set_type != 'test' else '0'
examples.append(
InputExample(guid=guid1, text_a=text_a, text_b=text_b, label=label))
# except Exception as e:
# print('###error.i:',e, i, line)
return examples
@classmethod
def _create_examples(self, lines, set_type):
examples = []
for (i, line) in enumerate(lines):
i = 2 * i
guid1 = "%s-%s" % (set_type, i)
guid2 = "%s-%s" % (set_type, i + 1)
# try:
premise = convert_to_unicode(line['premise'])
choice0 = convert_to_unicode(line['choice0'])
label = convert_to_unicode(str(1 if line['label'] == 0 else 0)) if set_type != 'test' else '0'
#text_a2 = convert_to_unicode(line['premise'])
choice1 = convert_to_unicode(line['choice1'])
label2 = convert_to_unicode(
str(0 if line['label'] == 0 else 1)) if set_type != 'test' else '0'
if line['question'] == 'effect':
text_a = premise
text_b = choice0
text_a2 = premise
text_b2 = choice1
elif line['question'] == 'cause':
text_a = choice0
text_b = premise
text_a2 = choice1
text_b2 = premise
else:
print('wrong format!!')
return None
examples.append(
InputExample(guid=guid1, text_a=text_a, text_b=text_b, label=label))
examples.append(
InputExample(guid=guid2, text_a=text_a2, text_b=text_b2, label=label2))
# except Exception as e:
# print('###error.i:',e, i, line)
return examples
| [
"[email protected]"
] | |
59accba5a656d5b413c7c3ad528bee9b9a83ad95 | 9025c27655e2f150d01e64ce0826df8166ac6813 | /core/urls.py | a1c84250501f6e331d1daaab5d0a66f5b2db6bbf | [] | no_license | kairat3/bella-plain | 02dd219f6bf087c99772490a32d61cd242a18f28 | 1950fd46dc53b800461f6077af3044bdfcf8300c | refs/heads/master | 2023-07-13T05:06:17.575811 | 2021-08-19T14:05:29 | 2021-08-19T14:05:29 | 393,064,884 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,452 | py | from django.conf import settings
from django.conf.urls.static import static
from django.contrib import admin
from django.urls import path, include
from rest_framework.routers import DefaultRouter
from product.views import ProductApiView
from rest_framework import permissions
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
schema_view = get_schema_view(
openapi.Info(
title="Bella API",
default_version='v1',
description="Test description",
terms_of_service="https://www.google.com/policies/terms/",
contact=openapi.Contact(email="[email protected]"),
license=openapi.License(name="BSD License"),
),
public=True,
permission_classes=(permissions.AllowAny,),
)
router = DefaultRouter()
router.register('products', ProductApiView)
urlpatterns = [
path('', schema_view.with_ui('swagger', cache_timeout=0), name='schema-swagger-ui'),
path('docs/', schema_view.with_ui('redoc', cache_timeout=0), name='schema-redoc'),
path('', include('account.urls')),
path('', include('product.urls')),
path('admin/', admin.site.urls),
path('', include(router.urls)),
path('', include('info.urls')),
path('', include('news.urls')),
path('', include('cart.urls')),
]
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| [
"[email protected]"
] | |
50363bbf710a2b67812e488531ed086fe0b32138 | d40fbefbd5db39f1c3fb97f17ed54cb7b6f230e0 | /datadog_checks_dev/datadog_checks/dev/tooling/config.py | 7d63ecb7890e8d4df068f1419c36389ea8bb11bc | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | slightilusion/integrations-core | 47a170d791e809f3a69c34e2426436a6c944c322 | 8f89e7ba35e6d27c9c1b36b9784b7454d845ba01 | refs/heads/master | 2020-05-20T18:34:41.716618 | 2019-05-08T21:51:17 | 2019-05-08T21:51:17 | 185,708,851 | 2 | 0 | BSD-3-Clause | 2019-05-09T02:05:19 | 2019-05-09T02:05:18 | null | UTF-8 | Python | false | false | 3,143 | py | # (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import os
from collections import OrderedDict, deque
from copy import deepcopy
import toml
from appdirs import user_data_dir
from atomicwrites import atomic_write
from six import string_types
from ..compat import FileNotFoundError
from ..utils import ensure_parent_dir_exists, file_exists, read_file
APP_DIR = user_data_dir('dd-checks-dev', '')
CONFIG_FILE = os.path.join(APP_DIR, 'config.toml')
SECRET_KEYS = {'dd_api_key', 'github.token', 'pypi.pass', 'trello.key', 'trello.token'}
DEFAULT_CONFIG = OrderedDict(
[
('core', os.path.join('~', 'dd', 'integrations-core')),
('extras', os.path.join('~', 'dd', 'integrations-extras')),
('agent', os.path.join('~', 'dd', 'datadog-agent')),
('repo', 'core'),
('agent6', OrderedDict((('docker', 'datadog/agent-dev:master'), ('local', 'latest')))),
('agent5', OrderedDict((('docker', 'datadog/dev-dd-agent:master'), ('local', 'latest')))),
('dd_api_key', os.getenv('DD_API_KEY')),
('github', OrderedDict((('user', ''), ('token', '')))),
('pypi', OrderedDict((('user', ''), ('pass', '')))),
('trello', OrderedDict((('key', ''), ('token', '')))),
]
)
def config_file_exists():
return file_exists(CONFIG_FILE)
def copy_default_config():
return deepcopy(DEFAULT_CONFIG)
def save_config(config):
ensure_parent_dir_exists(CONFIG_FILE)
with atomic_write(CONFIG_FILE, mode='wb', overwrite=True) as f:
f.write(toml.dumps(config).encode('utf-8'))
def load_config():
config = copy_default_config()
try:
config.update(toml.loads(read_config_file(), OrderedDict))
except FileNotFoundError:
pass
return config
def read_config_file():
return read_file(CONFIG_FILE)
def read_config_file_scrubbed():
return toml.dumps(scrub_secrets(load_config()))
def restore_config():
config = copy_default_config()
save_config(config)
return config
def update_config():
config = copy_default_config()
config.update(load_config())
# Support legacy config where agent5 and agent6 were strings
if isinstance(config['agent6'], string_types):
config['agent6'] = OrderedDict((('docker', config['agent6']), ('local', 'latest')))
if isinstance(config['agent5'], string_types):
config['agent5'] = OrderedDict((('docker', config['agent5']), ('local', 'latest')))
save_config(config)
return config
def scrub_secrets(config):
for secret_key in SECRET_KEYS:
branch = config
paths = deque(secret_key.split('.'))
while paths:
path = paths.popleft()
if not hasattr(branch, 'get'):
break
if path in branch:
if not paths:
old_value = branch[path]
if isinstance(old_value, string_types):
branch[path] = '*' * len(old_value)
else:
branch = branch[path]
else:
break
return config
| [
"[email protected]"
] | |
beb223699fadcff443ec1b36fb64cecf67b2359c | b5d0a6254b54c0a778181a67bcda14cc6663e871 | /0-notes/job-search/Cracking the Coding Interview/C10SortingSearching/questions/10.5-question.py | 5ec618baaa19cdb2c7b27b33ac1bfb9f081b82c6 | [
"MIT",
"LicenseRef-scancode-public-domain"
] | permissive | Web-Dev-Collaborative/Lambda-Final-Backup | 113e719a76a144b86d06f3a412afe4b02689cad7 | e9ab84928faa8364bacd863009ae9aec01ff9d1e | refs/heads/master | 2023-06-07T15:34:00.682815 | 2021-04-17T01:53:14 | 2021-04-17T01:53:14 | 358,899,122 | 0 | 0 | MIT | 2023-05-30T04:03:16 | 2021-04-17T14:24:53 | JavaScript | UTF-8 | Python | false | false | 318 | py | # Sparse Search
# Given a sorted array of strings that is interspersed with empty strings,
# write a method to find the location of a given string.
# EXAMPLE: INPUT: ball, {"at", "", "", "", "ball", "", "", "car", "", "", "dad", "", ""}
# OUTPUT: 4
# time complexity: O()
# space complexity: O()
| [
"[email protected]"
] | |
a2dd70fc69879a4648eb45dac4bea8dae1233790 | d83118503614bb83ad8edb72dda7f449a1226f8b | /src/dprj/platinumegg/app/cabaret/views/application/effect.py | 40b158532e97911174a83a5334610da7b7a1310a | [] | no_license | hitandaway100/caba | 686fe4390e182e158cd9714c90024a082deb8c69 | 492bf477ac00c380f2b2758c86b46aa7e58bbad9 | refs/heads/master | 2021-08-23T05:59:28.910129 | 2017-12-03T19:03:15 | 2017-12-03T19:03:15 | 112,512,044 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 54,649 | py | # -*- coding: utf-8 -*-
from platinumegg.app.cabaret.views.apphandler import AppHandler
from platinumegg.app.cabaret.util.api import BackendApi
import settings
from platinumegg.app.cabaret.util.url_maker import UrlMaker
from platinumegg.lib.pljson import Json
import settings_sub
from urlparse import urlparse
import urllib
from defines import Defines
from platinumegg.app.cabaret.util.scout import ScoutEventNone
from platinumegg.app.cabaret.util.card import CardUtil
from platinumegg.lib.opensocial.util import OSAUtil
from platinumegg.app.cabaret.util.present import PresentSet
import datetime
from platinumegg.app.cabaret.util.datetime_util import DateTimeUtil
from platinumegg.app.cabaret.util.rediscache import LoginBonusTimeLimitedAnimationSet
from platinumegg.app.cabaret.views.application.loginbonus.base import LoginBonusHandler
class Handler(AppHandler):
"""演出のパラメータを取得.
"""
@classmethod
def get_default_status(cls):
"""デフォルトで返すHttpStatus.
"""
return 500
def processError(self, error_message):
self.response.set_status(500)
self.response.end()
def __sendErrorResponse(self, status):
self.response.set_status(status)
self.response.end()
def checkUser(self):
pass
def check_process_pre(self):
if settings_sub.IS_LOCAL:
return True
elif self.osa_util.is_dbg_user:
pass
elif not settings_sub.IS_DEV and self.osa_util.viewer_id in ('10814964', '11404810', '39121', '12852359', '1412759', '11830507', '11467913', '10128761', '11868885', '434009', '23427632', '10918839', '21655464', '17279084', '24500573', '28774432', '11739356','2588824','28978730','20174324'):
pass
elif not self.checkMaintenance():
return False
return True
def process(self):
args = self.getUrlArgs('/effect/')
ope = args.get(0)
f = getattr(self, 'proc_%s' % ope, None)
if f is None:
self.__sendErrorResponse(404)
return
f(args)
def writeResponseBody(self, params):
if self.isUsePCEffect():
body = Json.encode({
'flashVars' : self.makeFlashVars(params)
})
else:
body = Json.encode(params)
self.response.set_header('Content-Type', 'plain/text')
self.response.set_status(200)
self.response.send(body)
def proc_battle(self, args):
"""バトル演出.
"""
model_mgr = self.getModelMgr()
v_player = self.getViewerPlayer(True)
if v_player is None:
# 結果が存在しない.
self.osa_util.logger.error('Player is None. opensocial_viewer_id=%s' % self.osa_util.viewer_id)
self.__sendErrorResponse(404)
return
# 結果データ.
battleresult = BackendApi.get_battleresult(model_mgr, v_player.id, using=settings.DB_READONLY)
if battleresult is None or not battleresult.anim:
# 結果が存在しない.
self.osa_util.logger.error('result is None')
self.__sendErrorResponse(404)
return
# 演出用パラメータ.
animationdata = battleresult.anim
params = animationdata.to_animation_data(self)
if BackendApi.get_current_battleevent_master(model_mgr, using=settings.DB_READONLY):
params['feverFlag'] = 0 # イベントでは表示しない.
urldata = urlparse(self.url_cgi)
url = '%s://%s%s' % (urldata.scheme, settings_sub.WEB_GLOBAL_HOST, urldata.path)
url = url + UrlMaker.battleresultanim()
url = self.osa_util.makeLinkUrl(self.addTimeStamp(url))
params['backUrl'] = url
self.writeResponseBody(params)
def proc_battleevent(self, args):
"""イベントバトル演出.
"""
model_mgr = self.getModelMgr()
v_player = self.getViewerPlayer(True)
if v_player is None:
# 結果が存在しない.
self.osa_util.logger.error('Player is None. opensocial_viewer_id=%s' % self.osa_util.viewer_id)
self.__sendErrorResponse(404)
return
uid = v_player.id
try:
eventid = int(args.get(1))
except:
# 引数がおかしい.
self.osa_util.logger.error('Invalid arguments')
self.__sendErrorResponse(400)
return
# 結果データ.
battleresult = BackendApi.get_battleevent_battleresult(model_mgr, eventid, uid, using=settings.DB_READONLY)
if battleresult is None or not battleresult.anim:
# 結果が存在しない.
self.osa_util.logger.error('result is None')
self.__sendErrorResponse(404)
return
# 演出用パラメータ.
animationdata = battleresult.anim
params = animationdata.to_animation_data(self)
params['feverFlag'] = 0 # イベントでは表示しない.
rarity = args.getInt(2)
piecenumber = args.getInt(3)
is_complete = args.getInt(4)
urldata = urlparse(self.url_cgi)
url = '%s://%s%s' % (urldata.scheme, settings_sub.WEB_GLOBAL_HOST, urldata.path)
url = url + UrlMaker.battleevent_battleresultanim(eventid, rarity, piecenumber, is_complete)
url = self.osa_util.makeLinkUrl(self.addTimeStamp(url))
params['backUrl'] = url
self.writeResponseBody(params)
def proc_scout(self, args):
"""スカウト演出.
"""
try:
scoutid = int(args.get(1))
scoutkey = urllib.unquote(args.get(2))
except:
# 引数がおかしい.
self.osa_util.logger.error('Invalid arguments')
self.__sendErrorResponse(400)
return
v_player = self.getViewerPlayer()
model_mgr = self.getModelMgr()
uid = v_player.id
using = settings.DB_READONLY
# 進行情報.
playdata = BackendApi.get_scoutprogress(model_mgr, uid, [scoutid], using=using).get(scoutid, None)
if playdata is None or playdata.alreadykey != scoutkey:
# DBからとり直すべき.
playdata = BackendApi.get_scoutprogress(model_mgr, uid, [scoutid], using=settings.DB_DEFAULT, reflesh=True).get(scoutid, None)
if playdata is None or playdata.alreadykey != scoutkey:
self.osa_util.logger.error('Not Found')
self.__sendErrorResponse(404)
return
eventlist = playdata.result.get('event', [])
if eventlist:
# ここで必要なのははじめの1件.
event = eventlist[0]
else:
# なにも起きなかった.
event = ScoutEventNone.create()
eventKind = event.get_type()
backUrl = None
# イベント毎の設定.
if eventKind == Defines.ScoutEventType.NONE:
# そのままもう一回.
backUrl = UrlMaker.scoutdo(scoutid, playdata.confirmkey)
elif eventKind in (Defines.ScoutEventType.LEVELUP, Defines.ScoutEventType.COMPLETE, Defines.ScoutEventType.HAPPENING):
# 結果表示へ.
backUrl = UrlMaker.scoutresultanim(scoutid, scoutkey, 0)
# 結果表示へ.
backUrl = backUrl or UrlMaker.scoutresult(scoutid, scoutkey)
# 演出のパラメータ.
scoutmaster = BackendApi.get_scouts(model_mgr, [scoutid], using=using)[0]
resultlist = playdata.result.get('result', [])
params = BackendApi.make_scoutanim_params(self, scoutmaster, eventlist, resultlist)
if params is None:
self.osa_util.logger.error('Not Found')
self.__sendErrorResponse(404)
return
urldata = urlparse(self.url_cgi)
url = '%s://%s%s' % (urldata.scheme, settings_sub.WEB_GLOBAL_HOST, urldata.path)
url = self.osa_util.makeLinkUrl(self.addTimeStamp(url + backUrl))
params['backUrl'] = url
self.writeResponseBody(params)
def __make_eventscoutanim_params(self, stagemaster, playdata, backUrl):
"""スカウトイベント演出.
"""
eventlist = playdata.result.get('event', [])
# 演出のパラメータ.
resultlist = playdata.result.get('result', [])
params = BackendApi.make_scoutanim_params(self, stagemaster, eventlist, resultlist, feveretime=getattr(playdata, 'feveretime', None))
if params is None:
self.osa_util.logger.error('Not Found')
self.__sendErrorResponse(404)
return
urldata = urlparse(self.url_cgi)
url = '%s://%s%s' % (urldata.scheme, settings_sub.WEB_GLOBAL_HOST, urldata.path)
url = self.osa_util.makeLinkUrl(self.addTimeStamp(url + backUrl))
params['backUrl'] = url
return params
def proc_scoutevent(self, args):
"""スカウトイベント演出.
"""
try:
stageid = int(args.get(1))
scoutkey = urllib.unquote(args.get(2))
except:
# 引数がおかしい.
self.osa_util.logger.error('Invalid arguments')
self.__sendErrorResponse(400)
return
v_player = self.getViewerPlayer()
model_mgr = self.getModelMgr()
using = settings.DB_READONLY
eventmaster = BackendApi.get_current_scouteventmaster(model_mgr, using=using)
if eventmaster is None:
# 引数がおかしい.
self.osa_util.logger.error('Event Not Found')
self.__sendErrorResponse(404)
return
mid = eventmaster.id
# 進行情報.
playdata = BackendApi.get_event_playdata(model_mgr, mid, v_player.id, using)
if playdata is None or playdata.alreadykey != scoutkey:
self.osa_util.logger.error('Not Found')
self.__sendErrorResponse(404)
return
eventlist = playdata.result.get('event', [])
if eventlist:
# ここで必要なのははじめの1件.
event = eventlist[0]
else:
# なにも起きなかった.
event = ScoutEventNone.create()
eventKind = event.get_type()
backUrl = None
# イベント毎の設定.
if eventKind == Defines.ScoutEventType.NONE:
# そのままもう一回.
backUrl = UrlMaker.scouteventdo(stageid, playdata.confirmkey)
else:
if playdata.result.get('feverstart'):
# フィーバー演出
backUrl = UrlMaker.scouteventfever(stageid, scoutkey)
elif playdata.result.get('lovetime_start'):
# 逢引タイム演出.
backUrl = UrlMaker.scouteventlovetime(stageid, scoutkey)
elif eventKind in (Defines.ScoutEventType.LEVELUP, Defines.ScoutEventType.COMPLETE, Defines.ScoutEventType.HAPPENING):
# 結果表示へ.
backUrl = UrlMaker.scouteventresultanim(stageid, scoutkey, 0)
# 結果表示へ.
backUrl = backUrl or UrlMaker.scouteventresult(stageid, scoutkey)
stagemaster = BackendApi.get_event_stage(model_mgr, stageid, using=using)
params = self.__make_eventscoutanim_params(stagemaster, playdata, backUrl)
if self.response.isEnd:
return
self.writeResponseBody(params)
def proc_raideventscout(self, args):
"""スカウトイベント演出.
"""
try:
stageid = int(args.get(1))
scoutkey = urllib.unquote(args.get(2))
except:
# 引数がおかしい.
self.osa_util.logger.error('Invalid arguments')
self.__sendErrorResponse(400)
return
v_player = self.getViewerPlayer()
uid = v_player.id
model_mgr = self.getModelMgr()
using = settings.DB_READONLY
eventmaster = BackendApi.get_current_raideventmaster(model_mgr, using=using)
if eventmaster is None:
# 引数がおかしい.
self.osa_util.logger.error('Event Not Found')
self.__sendErrorResponse(404)
return
mid = eventmaster.id
# 進行情報.
playdata = BackendApi.get_raideventstage_playdata(model_mgr, mid, uid, using)
if playdata is None or playdata.alreadykey != scoutkey:
self.osa_util.logger.error('Not Found')
self.__sendErrorResponse(404)
return
eventlist = playdata.result.get('event', [])
if eventlist:
# ここで必要なのははじめの1件.
event = eventlist[0]
else:
# なにも起きなかった.
event = ScoutEventNone.create()
eventKind = event.get_type()
backUrl = None
# イベント毎の設定.
if eventKind == Defines.ScoutEventType.NONE:
# そのままもう一回.
backUrl = UrlMaker.raidevent_scoutdo(stageid, playdata.confirmkey)
elif eventKind in (Defines.ScoutEventType.LEVELUP, Defines.ScoutEventType.COMPLETE, Defines.ScoutEventType.HAPPENING):
# 結果表示へ.
backUrl = UrlMaker.raidevent_scoutresultanim(stageid, scoutkey, 0)
# 結果表示へ.
backUrl = backUrl or UrlMaker.raidevent_scoutresult(stageid, scoutkey)
stagemaster = BackendApi.get_raidevent_stagemaster(model_mgr, stageid, using=using)
params = self.__make_eventscoutanim_params(stagemaster, playdata, backUrl)
if self.response.isEnd:
return
self.writeResponseBody(params)
def proc_produceeventscout(self, args):
"""プロデュースイベントのスカウトイベント演出.
"""
try:
stageid = int(args.get(1))
scoutkey = urllib.unquote(args.get(2))
except:
# 引数がおかしい.
self.osa_util.logger.error('Invalid arguments')
self.__sendErrorResponse(400)
return
v_player = self.getViewerPlayer()
uid = v_player.id
model_mgr = self.getModelMgr()
using = settings.DB_READONLY
eventmaster = BackendApi.get_current_produce_event_master(model_mgr, using=using)
if eventmaster is None:
# 引数がおかしい.
self.osa_util.logger.error('Event Not Found')
self.__sendErrorResponse(404)
return
mid = eventmaster.id
# 進行情報.
playdata = BackendApi.get_raideventstage_playdata(model_mgr, mid, uid, using)
playdata = BackendApi.get_produceeventstage_playdata(model_mgr, mid, uid, using)
if playdata is None or playdata.alreadykey != scoutkey:
self.osa_util.logger.error('Not Found')
self.__sendErrorResponse(404)
return
eventlist = playdata.result.get('event', [])
if eventlist:
# ここで必要なのははじめの1件.
event = eventlist[0]
else:
# なにも起きなかった.
event = ScoutEventNone.create()
eventKind = event.get_type()
backUrl = None
# イベント毎の設定.
if eventKind == Defines.ScoutEventType.NONE:
# そのままもう一回.
backUrl = UrlMaker.produceevent_scoutdo(stageid, playdata.confirmkey)
elif eventKind in (Defines.ScoutEventType.LEVELUP, Defines.ScoutEventType.COMPLETE, Defines.ScoutEventType.HAPPENING):
# 結果表示へ.
backUrl = UrlMaker.produceevent_scoutresultanim(stageid, scoutkey, 0)
# 結果表示へ.
backUrl = backUrl or UrlMaker.produceevent_scoutresult(stageid, scoutkey)
stagemaster = BackendApi.get_produceevent_stagemaster(model_mgr, stageid, using=using)
params = self.__make_eventscoutanim_params(stagemaster, playdata, backUrl)
if self.response.isEnd:
return
self.writeResponseBody(params)
def proc_gacha(self, args):
"""ガチャ演出.
"""
CONTENT_NUM_PER_PAGE = 10
try:
mid = int(args.get(1))
reqkey = urllib.unquote(args.get(2))
page = int(args.get(3) or 0)
except:
# 引数がおかしい.
self.osa_util.logger.error('Invalid arguments')
self.__sendErrorResponse(400)
return
model_mgr = self.getModelMgr()
using = settings.DB_READONLY
v_player = self.getViewerPlayer()
uid = v_player.id
gachamaster = BackendApi.get_gachamaster(model_mgr, mid, using)
playdata = None
gachamasterstep = None
if gachamaster:
if gachamaster.stepsid > 0:
if gachamaster.stepsid != gachamaster.id:
gachamasterstep = BackendApi.get_gachamaster(model_mgr, gachamaster.stepsid, using=using)
if gachamasterstep is None:
self.osa_util.logger.error('Not Found')
self.__sendErrorResponse(404)
return
else:
gachamasterstep = gachamaster
playdata = BackendApi.get_gachaplaydata(model_mgr, uid, [gachamaster.boxid], using=using).get(gachamaster.boxid)
if playdata is None or not playdata.result:
# 結果がない.
self.osa_util.logger.error('Not Found')
self.__sendErrorResponse(404)
return
if gachamaster.consumetype == Defines.GachaConsumeType.RANKING:
cardtextformat_getter = lambda master : Defines.EffectTextFormat.RANKINGGACHA_CARDTEXT
else:
cardtextformat_getter = lambda master : Defines.EffectTextFormat.GACHA_CARDTEXT if master.ckind == Defines.CardKind.NORMAL else Defines.EffectTextFormat.GACHA_ITEMTEXT
sep = Defines.ANIMATION_SEPARATE_STRING
urlsep = Defines.ANIMATION_URLSEPARATE_STRING
newFlag = []
rarityFlag = []
cardText = []
image = []
pointlist = []
expectation = []
is_first = page == 0
is_last = True
# 獲得したカード.
resultlist = playdata.result['result'] if isinstance(playdata.result, dict) else playdata.result
if gachamaster.consumetype in (Defines.GachaConsumeType.FUKUBUKURO, Defines.GachaConsumeType.FUKUBUKURO2016, Defines.GachaConsumeType.FUKUBUKURO2017):
page_last = int((len(resultlist) + CONTENT_NUM_PER_PAGE - 1) / CONTENT_NUM_PER_PAGE) - 1
page = min(page, page_last)
offset = page * CONTENT_NUM_PER_PAGE
resultlist = resultlist[offset:(offset+CONTENT_NUM_PER_PAGE)]
is_last = page == page_last
if gachamaster.consumetype == Defines.GachaConsumeType.FIXEDSR:
try:
gachamorecast = int(args.get(5))
except:
self.osa_util.logger.error('Invalid arguments')
self.__sendErrorResponse(400)
return
if gachamorecast == 0:
resultlist = resultlist[gachamaster.rarity_fixed_num:]
cardidlist = [data['id'] for data in resultlist]
cardmasters = BackendApi.get_cardmasters(cardidlist, model_mgr, using=settings.DB_READONLY)
groupidlist = [data['group'] for data in resultlist]
groupmaster_dict = BackendApi.get_gachagroupmaster_dict(model_mgr, groupidlist, using=settings.DB_READONLY)
rarityFlag_getter = None
if gachamaster.consumetype == Defines.GachaConsumeType.CHRISTMAS:
image_getter = lambda idx,master:(CardUtil.makeThumbnailUrlIcon(master) if idx < gachamaster.continuity-1 else CardUtil.makeThumbnailUrlMiddle(master))
cardtext_getter = lambda idx,master:master.name
elif gachamaster.consumetype in (Defines.GachaConsumeType.FUKUBUKURO, Defines.GachaConsumeType.FUKUBUKURO2016, Defines.GachaConsumeType.FUKUBUKURO2017):
image_getter = lambda idx,master:CardUtil.makeThumbnailUrlMiddle(master)
cardtext_getter = lambda idx,master:master.name
elif gachamaster.consumetype == Defines.GachaConsumeType.XMAS_OMAKE:
image_getter = lambda idx,master:CardUtil.makeThumbnailUrlIcon(master)
cardtext_getter = lambda idx,master:master.name
elif gachamaster.consumetype == Defines.GachaConsumeType.SCOUTEVENT and Defines.SCOUTEVENTGACHA_USE_EXCLUSIVE_USE_EFFECT:
image_getter = lambda idx,master:CardUtil.makeThumbnailUrlMiddle(master)
cardtext_getter = lambda idx,master:(cardtextformat_getter(master) % master.name)
else:
image_getter = lambda idx,master:self.makeAppLinkUrlImg(CardUtil.makeThumbnailUrlMiddle(master))
cardtext_getter = lambda idx,master:(cardtextformat_getter(master) % master.name)
rarityFlag_getter = rarityFlag_getter or (lambda master:'1' if Defines.Rarity.SUPERRARE <= master.rare else '0')
max_rare = Defines.Rarity.NORMAL
for idx,data in enumerate(resultlist):
master = cardmasters[data['id']]
groupmaster = groupmaster_dict.get(data['group'])
newFlag.append(str(int(bool(data['is_new']))))
cardText.append(cardtext_getter(idx, master))
image.append(image_getter(idx, master))
pointlist.append(str(data['point']))
expectation.append(str(groupmaster.expectation) if groupmaster else str(Defines.RankingGachaExpect.LOW))
rarityFlag.append(rarityFlag_getter(master))
if max_rare < master.rare:
max_rare = master.rare
v_player = self.getViewerPlayer()
# シートガチャ情報.
seatmodels = BackendApi.get_gachaseatmodels_by_gachamaster(model_mgr, uid, gachamasterstep or gachamaster, do_get_result=False, using=settings.DB_READONLY)
urldata = urlparse(self.url_cgi)
urlhead = '%s://%s%s' % (urldata.scheme, settings_sub.WEB_GLOBAL_HOST, urldata.path)
if seatmodels.get('playdata'):
# シート演出へ.
url = urlhead + UrlMaker.gachaseatanim(gachamaster.id, reqkey)
else:
url = urlhead + UrlMaker.gacharesult(gachamaster.id, reqkey)
backUrl = self.osa_util.makeLinkUrl(self.addTimeStamp(url))
params = {
'newFlag': sep.join(newFlag),
'cardText' : sep.join(cardText),
'image' : urlsep.join(image),
}
if gachamaster.consumetype == Defines.GachaConsumeType.CHRISTMAS:
params['logoPre'] = self.url_static + 'effect/sp/v2/gachaxmas/data/'
params['pre'] = self.url_static_img
params['cardText'] = cardText[-1]
elif gachamaster.consumetype == Defines.GachaConsumeType.RANKING:
params.update({
'point' : sep.join(pointlist),
'expectation' : sep.join(expectation),
'pre' : self.url_static + 'img/sp/large/gacha/ranking/rank_01/', # TODO:DBを見るように修正が必要.
'logo_img' : 'event_logo.png',
'logo_w_img' : 'event_logo_w.png',
})
elif gachamaster.consumetype == Defines.GachaConsumeType.SCOUTEVENT and Defines.SCOUTEVENTGACHA_USE_EXCLUSIVE_USE_EFFECT:
eventmaster = BackendApi.get_current_present_scouteventmaster(model_mgr, using=settings.DB_READONLY)
if Defines.SCOUTEVENTGACHA_FOR_VALENTINE:
params.update({
'pre' : self.url_static_img,
'effectPre' : self.url_static + 'effect/sp/v2/gachascev/data/scev_25/',
'cardText' : params['cardText'].replace('が入店しました', ''), # js, flash の修正をすると作業が大きくなるのでquick hack.
})
else:
params.update({
'imagePre' : self.url_static_img,
'rarityFlag' : sep.join(rarityFlag),
'logoPre' : self.makeAppLinkUrlImg('event/scevent/%s/gacha/' % eventmaster.codename),
})
elif gachamaster.consumetype in (Defines.GachaConsumeType.FUKUBUKURO, Defines.GachaConsumeType.FUKUBUKURO2016, Defines.GachaConsumeType.FUKUBUKURO2017):
url = None
if is_last:
if isinstance(playdata.result, dict) and playdata.result.get('omake'):
prizelist = BackendApi.get_prizelist(model_mgr, playdata.result['omake'], using=settings.DB_READONLY)
presentlist = BackendApi.create_present_by_prize(model_mgr, v_player.id, prizelist, 0, using=settings.DB_READONLY, do_set_save=False)
presentsetlist = PresentSet.presentToPresentSet(model_mgr, presentlist, using=settings.DB_READONLY)
thumblist = []
omakeindexes = []
for presentset in presentsetlist:
if presentset.present.itype in (Defines.ItemType.GOLD, Defines.ItemType.GACHA_PT):
num = 1
else:
num = presentset.num
if presentset.itemthumbnail in thumblist:
idx = thumblist.index(presentset.itemthumbnail)
else:
idx = len(thumblist)
thumblist.append(presentset.itemthumbnail)
omakeindexes.extend([str(idx)] * num)
if thumblist:
params.update({
'itemImage' : urlsep.join(thumblist),
'itemImageIdx' : sep.join(omakeindexes),
})
else:
url = urlhead + UrlMaker.gachaanimsub(gachamaster.id)
url = OSAUtil.addQuery(url, Defines.URLQUERY_PAGE, page + 1)
url = self.osa_util.makeLinkUrl(self.addTimeStamp(url))
params.update({
'skipUrl': backUrl,
'pre' : self.url_static_img,
# 4月ver
#'logoPre' : self.url_static + 'effect/sp/v2/gachahappybag201604/data/',
#'logoPre' : self.url_static + 'effect/sp/v2/gachahappybag201605/data/',
# 'logoPre' : self.url_static + 'effect/sp/v2/gachahappybag201607/data/',
# 'logoPre' : self.url_static + 'effect/sp/v2/gachahappybag201608/data/',
'logoPre' : self.url_static + 'effect/sp/v2/gachahappybag201701/data/',
'isFirst' : is_first,
'isLast' : is_last,
'n' : gachamaster.continuity,
'rarityFlag' : sep.join(rarityFlag),
})
del params['cardText']
backUrl = url or backUrl
elif gachamaster.consumetype == Defines.GachaConsumeType.SR_SSR_PROBABILITY_UP or gachamaster.consumetype == Defines.GachaConsumeType.PTCHANGE:
#トレードショップが開いていたら
if gachamaster.trade_shop_master_id is not None and 0 < gachamaster.trade_shop_master_id:
try:
lottery_point = int(args.get(4))
url = urlhead + UrlMaker.gacharesult(gachamaster.id, reqkey, lottery_point=lottery_point)
except:
# 引数がおかしい.
self.osa_util.logger.error('Invalid arguments')
self.__sendErrorResponse(400)
return
else:
url = urlhead + UrlMaker.gacharesult(gachamaster.id, reqkey)
# URL作り直し
backUrl = self.osa_util.makeLinkUrl(self.addTimeStamp(url))
elif gachamaster.consumetype == Defines.GachaConsumeType.FIXEDSR:
try:
gachamorecast = int(args.get(5))
except:
self.osa_util.logger.error('Invalid arguments')
self.__sendErrorResponse(400)
return
if gachamorecast == 0:
url = urlhead + UrlMaker.gachamorecast(gachamaster.id, reqkey)
backUrl = self.osa_util.makeLinkUrl(self.addTimeStamp(url))
else:
if 0 < gachamaster.rarity_fixed_num:
fixed_card_id = cardidlist[0]
card = BackendApi.get_cardmasters([fixed_card_id], model_mgr).get(fixed_card_id)
backUrl = self.makeAppLinkUrl(UrlMaker.gacharesult(gachamaster.id, reqkey))
params = {
'cardText': Defines.EffectTextFormat.GACHA_CARDTEXT % card.name,
'image': self.makeAppLinkUrlImg(CardUtil.makeThumbnailUrlMiddle(card)),
'pre': 'img/',
}
else:
self.osa_util.logger.error('Not set Gachamaster.rarity_fixed_num')
self.__sendErrorResponse(400)
return
elif gachamaster.consumetype == Defines.GachaConsumeType.XMAS_OMAKE:
params = {
'pre' : self.url_static_img,
'logoPre' : self.url_static + 'effect/sp/v2/gachaxmas2015/',
'image' : urlsep.join(image),
'newFlag': sep.join(newFlag)
}
params['backUrl'] = backUrl
self.writeResponseBody(params)
def proc_panelmission(self, args):
"""パネルミッション.
"""
try:
panel = int(args.get(1))
except:
# 引数がおかしい.
self.osa_util.logger.error('Invalid arguments')
self.__sendErrorResponse(400)
return
model_mgr = self.getModelMgr()
using = settings.DB_READONLY
# パネルのマスターデータ.
panelmaster = None
if panel:
panelmaster = BackendApi.get_panelmission_panelmaster(model_mgr, panel, using=using)
if panelmaster is None:
self.osa_util.logger.error('Illigal panel number')
self.__sendErrorResponse(400)
return
v_player = self.getViewerPlayer()
uid = v_player.id
now = OSAUtil.get_now()
# 進行情報.
panelplaydata = BackendApi.get_panelmission_data(model_mgr, uid, panel, using=using, get_instance=False)
if panelplaydata is None:
self.osa_util.logger.error('Illigal panel number')
self.__sendErrorResponse(400)
return
# 演出パラメータ.
params = {
# 'logoPre' : self.url_static + 'effect/sp/v2/panel_mission/data/201412/',
# 'logoPre' : self.url_static + 'effect/sp/v2/panel_mission/data/201505/',
# 'logoPre' : self.url_static + 'effect/sp/v2/panel_mission/data/201508/',
# 'logoPre' : self.url_static + 'effect/sp/v2/panel_mission/data/201512/',
# 'logoPre' : self.url_static + 'effect/sp/v2/panel_mission/data/201602/',
# 'logoPre' : self.url_static + 'effect/sp/v2/panel_mission/data/201604/',
# 'logoPre' : self.url_static + 'effect/sp/v2/panel_mission/data/201606/',
# 'logoPre' : self.url_static + 'effect/sp/v2/panel_mission/data/201607/',
# 'logoPre' : self.url_static + 'effect/sp/v2/panel_mission/data/201610/',
# 'logoPre' : self.url_static + 'effect/sp/v2/panel_mission/data/201612/',
'logoPre' : self.url_static + 'effect/sp/v2/panel_mission/data/201702/',
'pre' : self.url_static_img,
'panel' : panel,
'bg' : panelmaster.image,
}
# ミッションのマスター.
missionmaster_list = BackendApi.get_panelmission_missionmaster_by_panelid(model_mgr, panel, using=using)
# 全クリフラグ.
is_allend = True
# 今回クリアしたミッション.
max_time = None
clearlist = []
missionmaster_dict = {}
for missionmaster in missionmaster_list:
number = missionmaster.number
missionmaster_dict[number] = missionmaster
idx = number - 1
data = panelplaydata.get_data(number)
rtime = data['rtime']
if now < rtime:
# 未達成のミッション画像と名前.
params['m%d' % idx] = missionmaster.image_pre
params['mtext%d' % idx] = missionmaster.name
is_allend = False
continue
elif max_time and rtime < max_time:
continue
elif max_time is None or max_time < rtime:
max_time = rtime
clearlist = []
clearlist.append(str(idx))
if not clearlist:
self.osa_util.logger.error('You can not view the effect.')
self.__sendErrorResponse(400)
return
params['clear'] = ','.join(clearlist)
# 今回達成したミッションの画像と名前.
for idx in clearlist:
missionmaster = missionmaster_dict[int(idx) + 1]
params['m%s' % idx] = missionmaster.image_pre
params['mtext%s' % idx] = missionmaster.name
if is_allend:
# 獲得したカード画像と名前.
prizelist = BackendApi.get_prizelist(model_mgr, panelmaster.prizes, using=using)
if not prizelist:
self.osa_util.logger.error('prize none.')
self.__sendErrorResponse(400)
return
presentlist = BackendApi.create_present_by_prize(model_mgr, uid, prizelist, 0, using=using, do_set_save=False)
presentset = PresentSet.presentToPresentSet(model_mgr, presentlist[:1], using=using)[0]
params['card'] = presentset.itemthumbnail_middle
params['cname'] = presentset.itemname
# 次のパネル.
next_panelmaster = BackendApi.get_panelmission_panelmaster(model_mgr, panel + 1, using=using)
if next_panelmaster:
next_panelmissionmaster_list = BackendApi.get_panelmission_missionmaster_by_panelid(model_mgr, next_panelmaster.id, using=using)
for next_panelmissionmaster in next_panelmissionmaster_list:
idx = next_panelmissionmaster.number - 1
params['next%s' % idx] = next_panelmissionmaster.image_pre
urldata = urlparse(self.url_cgi)
url = '%s://%s%s' % (urldata.scheme, settings_sub.WEB_GLOBAL_HOST, urldata.path)
url = url + UrlMaker.panelmissiontop()
url = self.osa_util.makeLinkUrl(self.addTimeStamp(url))
params['backUrl'] = url
self.writeResponseBody(params)
def proc_loginbonustimelimited(self, args):
"""期限付きログインボーナス.
"""
mid = args.getInt(1)
loginbonus = args.getInt(2)
str_midlist = self.request.get(Defines.URLQUERY_ID) or ''
midlist = [int(str_mid) for str_mid in str_midlist.split(',') if str_mid.isdigit()]
model_mgr = self.getModelMgr()
now = OSAUtil.get_now()
master = BackendApi.get_loginbonustimelimitedmaster(model_mgr, mid, using=settings.DB_READONLY)
if master is None:
self.osa_util.logger.error('masterdata is not found.')
self.__sendErrorResponse(400)
return
# プレイヤー情報.
v_player = self.getViewerPlayer()
if BackendApi.check_lead_loginbonustimelimited(model_mgr, v_player.id, now):
# まだ受け取っていない.
self.osa_util.logger.error('not received.')
self.__sendErrorResponse(400)
return
logindata = BackendApi.get_logintimelimited_data(model_mgr, v_player.id, mid, using=settings.DB_READONLY)
if logindata is None:
self.osa_util.logger.error('logindata is None.')
self.__sendErrorResponse(400)
return
# 表示するログインボーナスを選別(現在の日数のボーナスの前のボーナスから4つ表示したい).
table = BackendApi.get_loginbonustimelimiteddaysmaster_day_table_by_timelimitedmid(model_mgr, mid, using=settings.DB_READONLY)
params = {
'pre' : self.url_static_img,
}
# 設定情報.
config = BackendApi.get_current_loginbonustimelimitedconfig(model_mgr, using=settings.DB_READONLY)
config_data = dict(config.getDataList()).get(master.id)
making_functions = {
'monthly_login' : self.__makeMonthlyLoginBonusParams,
}
func = making_functions.get(master.effectname, self.__makeCommonLoginBonusParams)
tmp, cur_bonusmaster, next_bonusmaster = func(master, logindata, table, config_data)
params.update(**tmp)
#取得したアイテム(名前,日数).
if cur_bonusmaster:
params['td'] = cur_bonusmaster.day
params['tt'] = self.getBonusItemText(cur_bonusmaster)
else:
# 演出いらない.
self.osa_util.logger.error('can not view the effect.')
self.__sendErrorResponse(400)
return
if next_bonusmaster:
params['nt'] = self.getBonusItemText(next_bonusmaster)
# 遷移先.
url = None
if mid in midlist:
next_idx = midlist.index(mid)+1
if next_idx < len(midlist):
# 次がある.
url = UrlMaker.loginbonustimelimitedanim(midlist[next_idx], loginbonus)
url = OSAUtil.addQuery(url, Defines.URLQUERY_ID, str_midlist)
if url is None:
if loginbonus:
# ログインボーナス.
url = UrlMaker.loginbonusanim()
else:
url = LoginBonusHandler.getEffectBackUrl(self)
anniversary_data = {}
if master.effectname == 'countdown_login_2ndanniversary':
anniversary_data = {
'ten_digit': params['day'] / 10,
'one_digit': params['day'] % 10,
}
elif master.effectname == 'countdown_login_3rdanniversary':
anniversary_data = {
'one_digit': params['day'] % 10,
'predata': self.url_static + 'effect/sp/v2/countdown_login_3rdanniversary/data/'
}
params.update(anniversary_data)
urldata = urlparse(self.url_cgi)
urlhead = '%s://%s%s' % (urldata.scheme, settings_sub.WEB_GLOBAL_HOST, urldata.path)
url = urlhead + url
url = self.osa_util.makeLinkUrl(self.addTimeStamp(url))
params['backUrl'] = url
self.writeResponseBody(params)
def __makeCommonLoginBonusParams(self, master, logindata, day_table, config_data):
"""共通のログインボーナス演出パラメータ.
"""
VIEW_ITEM_NUM_MAX_TABLE = {
Defines.LoginBonusTimeLimitedType.TOTAL : 4,
Defines.LoginBonusTimeLimitedType.FIXATION : 6,
Defines.LoginBonusTimeLimitedType.MONTHLY : 3,
}
VIEW_ITEM_NUM_MAX_TABLE_BY_EFFECTNAME = {
'hinamatsuri_login' : 4,
'countdown_login_2ndanniversary' : 4,
'countdown_login_3rdanniversary' : 4,
'2nd_anniversary_login' : 4,
'3rd_anniversary_login' : 4,
'valentine2016' : 6,
'end_of_year_countdown' : 3,
'newyear_login' : 7,
'newbie_login' : 7,
}
item_num_max = VIEW_ITEM_NUM_MAX_TABLE_BY_EFFECTNAME.get(master.effectname, VIEW_ITEM_NUM_MAX_TABLE[master.lbtype])
model_mgr = self.getModelMgr()
cur_day = logindata.days
params = {}
cur_bonusmaster = None
next_bonusmaster = None
mid = master.id
days = day_table.keys()
days.sort()
tmp_days = list(set(days + [cur_day]))
tmp_days.sort()
start = max(0, min(tmp_days.index(cur_day) - 1, len(days) - item_num_max))
bonusmidlist = []
has_next = False
for day in days[start:]:
if not day_table.has_key(day):
continue
elif len(bonusmidlist) == item_num_max:
has_next = True
break
bonusmidlist.append(day_table[day])
bonusmaster_list = BackendApi.get_loginbonustimelimiteddaysmaster_by_idlist(model_mgr, bonusmidlist, using=settings.DB_READONLY)
params.update(has_next=has_next)
if master.lbtype == Defines.LoginBonusTimeLimitedType.FIXATION:
min_time = DateTimeUtil.strToDateTime(logindata.lbtltime.strftime("%Y%m01"), "%Y%m%d") - datetime.timedelta(seconds=1)
min_time = DateTimeUtil.toLoginTime(min_time)
receive_flags = BackendApi.get_loginbonustimelimited_fixation_received_dates(logindata.uid, mid, min_time).keys()
params['logoPre'] = self.url_static + 'effect/sp/v2/%s/data/' % master.effectname
else:
params['logoPre'] = self.url_static + 'effect/sp/v2/%s/data/' % master.effectname
receive_flags = None
make_date_string = {
Defines.LoginBonusTimeLimitedType.FIXATION : lambda x:u'%s月%s日' % (logindata.lbtltime.month, x),
Defines.LoginBonusTimeLimitedType.MONTHLY : lambda x:u'%s日' % (logindata.lbtltime.month, x),
}.get(master.lbtype, lambda x:'%d日目' % x)
#アイテム一覧(日数と画像URL).
bonusmaster_list.sort(key=lambda x:x.day)
for idx, bonusmaster in enumerate(bonusmaster_list):
params['i%d' % idx] = bonusmaster.thumb
params['d%d' % idx] = bonusmaster.day
params['date%d' % idx] = make_date_string(bonusmaster.day)
if cur_day == bonusmaster.day:
cur_bonusmaster = bonusmaster
params['idx'] = idx
elif cur_bonusmaster and not next_bonusmaster:
next_bonusmaster = bonusmaster
if receive_flags is not None:
params['f%d' % idx] = 1 if bonusmaster.day in receive_flags else 0
# 最終日までの日数.
td = config_data['etime'] - logindata.lbtltime
params['day'] = td.days
if next_bonusmaster and 0 < td.days:
params['idxnext'] = params['idx'] + 1
if master.lbtype == Defines.LoginBonusTimeLimitedType.TOTAL:
for i in xrange(params['idx']):
params['f%d' % i] = 1
def getEffectDBValue(attname, default):
v = getattr(cur_bonusmaster, attname, '') if cur_bonusmaster else ''
return v or default
# 演出用文言.
params['logo'] = master.logo
params['preEffect'] = self.url_static_img + master.img_effect
params['bg'] = getEffectDBValue(u'bg', u'bg.png')
params['tlogo'] = getEffectDBValue(u'text_logo', master.text_logo)
params['t0'] = getEffectDBValue(u'text_start', master.text_start)
params['t1'] = getEffectDBValue(u'text_itemlist', master.text_itemlist)
params['t2'] = getEffectDBValue(u'text_itemget', master.text_itemget)
params['t3'] = getEffectDBValue(u'text_itemnext', master.text_itemnext)
params['t4'] = getEffectDBValue(u'text_end', master.text_end)
if cur_bonusmaster:
params['ix'] = cur_bonusmaster.item_x
params['iy'] = cur_bonusmaster.item_y
params['gx'] = cur_bonusmaster.item_x
params['gy'] = cur_bonusmaster.item_y
return params, cur_bonusmaster, next_bonusmaster
def __makeMonthlyLoginBonusParams(self, master, logindata, day_table, config_data):
"""月末ログインボーナス演出用パラメータ.
"""
LOOP_CNT = 3
ITEM_NUM_MAX = 3
model_mgr = self.getModelMgr()
mid = master.id
cur_day = logindata.days
params = {}
params['logoPre'] = self.url_static + 'effect/sp/v2/monthly_login/data/default/' # TODO: これをマスターデータで設定しないと.
# 次の日.
tomorrow = logindata.lbtltime + datetime.timedelta(days=1)
# 月末はなんか特殊.
bonusmaster_list = BackendApi.get_loginbonustimelimiteddaysmaster_by_idlist(model_mgr, day_table.values(), using=settings.DB_READONLY)
bonusmaster_list.sort(key=lambda x:x.id)
cur_bonusmaster = BackendApi.get_loginbonustimelimiteddaysmaster(model_mgr, mid, cur_day, using=settings.DB_READONLY)
next_bonusmaster = None
if config_data['stime'] <= tomorrow < config_data['etime']:
# 次の日が期間内.
next_bonusmaster = BackendApi.get_loginbonustimelimiteddaysmaster(model_mgr, mid, tomorrow.day, using=settings.DB_READONLY)
cur_prizeid = cur_bonusmaster.prizes[0] if cur_bonusmaster and cur_bonusmaster.prizes else 0
next_prizeid = next_bonusmaster.prizes[0] if next_bonusmaster and next_bonusmaster.prizes else 0
prizeidlist = []
for bonusmaster in bonusmaster_list:
if not bonusmaster.prizes:
continue
prizeid = bonusmaster.prizes[0]
if prizeid in prizeidlist:
continue
idx = len(prizeidlist)
params['i%d' % idx] = bonusmaster.thumb
prizeidlist.append(prizeid)
if ITEM_NUM_MAX <= len(prizeidlist):
break
idx = prizeidlist.index(cur_prizeid)
params['idx'] = idx
if next_prizeid:
params['idxnext'] = prizeidlist.index(next_prizeid)
params['rouletteCnt'] = LOOP_CNT * ITEM_NUM_MAX + idx
return params, cur_bonusmaster, next_bonusmaster
def getBonusItemText(self, master):
"""ログインボーナスのテキストを作成
"""
if LoginBonusTimeLimitedAnimationSet.exists(master.mid, master.day):
items = LoginBonusTimeLimitedAnimationSet.get(master.mid, master.day)
else:
model_mgr = self.getModelMgr()
prizelist = BackendApi.get_prizelist(model_mgr, master.prizes, using=settings.DB_READONLY)
prizeinfo = BackendApi.make_prizeinfo(self, prizelist, using=settings.DB_READONLY)
items = [listitem['text'] for listitem in prizeinfo['listitem_list']]
LoginBonusTimeLimitedAnimationSet.save(master.mid, master.day, items)
return Defines.STR_AND.join(items)
#==============================================================
# イベントシナリオ.
def proc_eventscenario(self, args):
"""イベントシナリオ.
"""
number = args.getInt(1)
edt = args.get(2) or ''
backUrl = '/'.join(args.args[3:])
model_mgr = self.getModelMgr()
data = BackendApi.get_eventscenario_by_number(model_mgr, number, using=settings.DB_READONLY)
if not data:
self.osa_util.logger.error('the scenario is not found...%s' % number)
self.__sendErrorResponse(404)
return
urldata = urlparse(self.url_cgi)
urlhead = '%s://%s%s' % (urldata.scheme, settings_sub.WEB_GLOBAL_HOST, urldata.path)
url = '%s/%s' % (urlhead, backUrl)
url = self.osa_util.makeLinkUrl(self.addTimeStamp(url))
img_pre = self.url_static_img + (data.get('thumb') or 'event/scenario/%d/' % number)
params = {
'backUrl' : url,
'pre' : img_pre,
'edt' : edt,
}
params.update(data)
self.writeResponseBody(params)
#==============================================================
# 双六.
def proc_sugoroku(self, args):
"""双六ログイン.
"""
mid = args.getInt(1)
if mid is None:
self.__sendErrorResponse(404)
return
page = args.getInt(2) or 0
model_mgr = self.getModelMgr()
# プレイヤー情報.
v_player = self.getViewerPlayer()
viewer_id = v_player.id
# 結果情報を取得.
logindata = BackendApi.get_loginbonus_sugoroku_playerdata(model_mgr, viewer_id, mid, using=settings.DB_DEFAULT)
if logindata is None:
self.__sendErrorResponse(404)
return
# 停まったマス.
squares_id_list = logindata.result.get('square_id_list')
squares_master_list = BackendApi.get_loginbonus_sugoroku_map_squares_master_list_by_id(model_mgr, squares_id_list, using=settings.DB_READONLY)
squares_master_dict = dict([(squares_master.id, squares_master) for squares_master in squares_master_list])
page_cnt = 0
arr = []
mapid = None
for squares_id in squares_id_list:
squares_master = squares_master_dict[squares_id]
if mapid is None:
mapid = squares_master.mid
elif mapid != squares_master.mid:
page_cnt += 1
if page < page_cnt:
# 次のマップの分も入れておく.
arr.append(squares_master)
break
mapid = squares_master.mid
if page_cnt == page:
arr.append(squares_master)
squares_master_list = arr
# マップ.
mapmaster = BackendApi.get_loginbonus_sugoroku_map_master(model_mgr, mapid, using=settings.DB_READONLY)
# 演出パラメータ.
params = dict(
backUrl = self.request.get('backUrl'),
logoPre = self.url_static_img + 'sugo6/{}/'.format(mapmaster.effectname),
pre = self.url_static_img,
lt = 0,
)
# 報酬.
prizeidlist_list = []
message_items = []
def get_prize_number(prizeidlist):
if prizeidlist in prizeidlist_list:
return prizeidlist_list.index(prizeidlist)
else:
prizeidlist_list.append(prizeidlist)
return len(prizeidlist_list) - 1
# 現在地.
if 0 < page:
params['continue'] = '1'
params['cp'] = 0
else:
squares_master = squares_master_list.pop(0)
params['cp'] = squares_master.number
if len(squares_id_list) == 1:
# 動いていない.
if squares_master.last:
# 最終マス.
params['completeitem'] = get_prize_number(mapmaster.prize)
message_items.append(params['completeitem'])
else:
# 休み.
params['lt'] = logindata.lose_turns + 1
# マップ情報.
map_squares_master_list = BackendApi.get_loginbonus_sugoroku_map_squares_master_by_mapid(model_mgr, mapid, using=settings.DB_READONLY)
for squares_master in map_squares_master_list:
number = squares_master.number
params['et{}'.format(number)] = squares_master.event_type
params['ev{}'.format(number)] = squares_master.event_value
if squares_master.prize:
params['ei{}'.format(number)] = get_prize_number(squares_master.prize)
# 停まったマス.
params['pn'] = len(squares_master_list)
pre_event_type = Defines.SugorokuMapEventType.NONE
for i,squares_master in enumerate(squares_master_list):
if squares_master.mid == mapid:
params['p{}'.format(i)] = squares_master.number
if squares_master.prize:
message_items.append(get_prize_number(squares_master.prize))
elif pre_event_type == Defines.SugorokuMapEventType.BACK:
# 戻って前のマップへ.
pre_map_squares_master_list = BackendApi.get_loginbonus_sugoroku_map_squares_master_by_mapid(model_mgr, squares_master.mid, using=settings.DB_READONLY)
params['p{}'.format(i)] = squares_master.number - len(pre_map_squares_master_list)
else:
# 進んで次のマップへ.
params['p{}'.format(i)] = len(map_squares_master_list) + squares_master.number
pre_event_type = squares_master.event_type
# アイテム.
params['in'] = len(prizeidlist_list)
for i,prizeidlist in enumerate(prizeidlist_list):
# アイテム画像.
if i in message_items:
prizelist = BackendApi.get_prizelist(model_mgr, prizeidlist, using=settings.DB_READONLY)
prizeinfo = BackendApi.make_prizeinfo(self, prizelist, using=settings.DB_READONLY)
# アイテム名.
params['in{}'.format(i)] = Defines.STR_AND.join([listitem['text'] for listitem in prizeinfo['listitem_list']])
else:
prizelist = BackendApi.get_prizelist(model_mgr, [prizeidlist[0]], using=settings.DB_READONLY)
prizeinfo = BackendApi.make_prizeinfo(self, prizelist, using=settings.DB_READONLY)
# アイテム画像.
params['i{}'.format(i)] = prizeinfo['listitem_list'][0]['thumbUrl'].replace(params['pre'], '')
self.writeResponseBody(params)
def main(request):
return Handler.run(request)
| [
"[email protected]"
] | |
a8a1af44b4ff29b22520121f30295c8ebe1d693f | 554ec84f23825452f7692f91f742bdc81fa50e84 | /chatbot_27549/urls.py | 7d1264887b9b6eb6dad7fc662d8571cc66eddd66 | [] | no_license | crowdbotics-apps/chatbot-27549 | a7806af210b6e7ccdfb3db3dbaaac9e9dcb5a5af | 0e615cbb191a8d91e2874e7329b059193a8ad625 | refs/heads/master | 2023-05-26T13:30:53.116812 | 2021-05-29T07:24:50 | 2021-05-29T07:24:50 | 371,908,087 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,022 | py | """chatbot_27549 URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include, re_path
from django.views.generic.base import TemplateView
from allauth.account.views import confirm_email
from rest_framework import permissions
from drf_yasg.views import get_schema_view
from drf_yasg import openapi
urlpatterns = [
path("", include("home.urls")),
path("accounts/", include("allauth.urls")),
path("modules/", include("modules.urls")),
path("api/v1/", include("home.api.v1.urls")),
path("admin/", admin.site.urls),
path("users/", include("users.urls", namespace="users")),
path("rest-auth/", include("rest_auth.urls")),
# Override email confirm to use allauth's HTML view instead of rest_auth's API view
path("rest-auth/registration/account-confirm-email/<str:key>/", confirm_email),
path("rest-auth/registration/", include("rest_auth.registration.urls")),
]
admin.site.site_header = "Chatbot"
admin.site.site_title = "Chatbot Admin Portal"
admin.site.index_title = "Chatbot Admin"
# swagger
api_info = openapi.Info(
title="Chatbot API",
default_version="v1",
description="API documentation for Chatbot App",
)
schema_view = get_schema_view(
api_info,
public=True,
permission_classes=(permissions.IsAuthenticated,),
)
urlpatterns += [
path("api-docs/", schema_view.with_ui("swagger", cache_timeout=0), name="api_docs")
]
| [
"[email protected]"
] | |
f8b918dbc080c727941fe32353727591500f3f2d | 5c61851a03dd1ac98d03c2e98f27487f188ff00f | /{{cookiecutter.repo_name}}/manage.py | 13bffdcfd10dc0e98343059f47512923a6698335 | [
"BSD-3-Clause"
] | permissive | tony/cookiecutter-flask-pythonic | e7208a8fc9ccbde10e541f8e657dbf4da7b388b3 | d1274ec5d5b72cab128e593ed78de88c29bd54b5 | refs/heads/master | 2023-05-29T20:49:21.927268 | 2021-10-05T12:39:04 | 2021-10-05T12:39:04 | 35,064,692 | 39 | 4 | null | 2023-05-01T21:06:54 | 2015-05-04T22:52:20 | Python | UTF-8 | Python | false | false | 1,203 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
from flask_script import Manager
from {{ cookiecutter.repo_name }} import {{ cookiecutter.repo_name | capitalize }}
"""If not using Flask-Script::
app = {{ cookiecutter.repo_name | capitalize }}.from_cli(sys.argv[1:])
Does the trick for retrieving an application object using
pure argparse. But let's hook into Flask-Script's CLI argparse
instance.
"""
def app_wrapper(*args, **kwargs):
"""App factory returns the :class:`flask.Flask` via ``__call__``,
but because of the way :class:`flask_script.Manager` handles
accepting app objects, this wrapper returns the flask object directly.
:returns: Flask object build from CLI
:rtype: :class:`flask.Flask`
"""
return {{ cookiecutter.repo_name | capitalize }}.from_file(*args, **kwargs).app
manager = Manager(app_wrapper)
manager.add_option('-c', '--config', dest='config', required=False)
@manager.command
def run_server(*args, **kwargs):
{{ cookiecutter.repo_name | capitalize }}.from_file().run()
@manager.command
def testing(*args, **kwargs):
print('Run "./run-tests.py" or "python setup.py test".')
if __name__ == "__main__":
run_server()
| [
"[email protected]"
] | |
e6acc1a14b714638e4d8eb6b3210b8ad4b35a3c2 | 37069009dd428ce59819ffea2fcffc07dda6e712 | /django_analyze/migrations/0068_auto__add_field_genotype_max_memory_usage.py | 550ac7b81c79b27de932d2c0ecb1788805c93c03 | [] | no_license | chrisspen/django-analyze | 829f560d7c5f2fb1c19fc07bc77cb1a83238e696 | 421ee35235f76ff8657f7befe5212acd7ccf3989 | refs/heads/master | 2020-04-28T15:42:51.773823 | 2015-04-18T14:50:02 | 2015-04-18T14:50:02 | 14,995,029 | 2 | 2 | null | 2014-07-07T12:39:22 | 2013-12-06T22:26:29 | Python | UTF-8 | Python | false | false | 18,209 | py | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Genotype.max_memory_usage'
db.add_column(u'django_analyze_genotype', 'max_memory_usage',
self.gf('django.db.models.fields.PositiveIntegerField')(null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Genotype.max_memory_usage'
db.delete_column(u'django_analyze_genotype', 'max_memory_usage')
models = {
'django_analyze.epoche': {
'Meta': {'ordering': "('genome', '-index')", 'unique_together': "(('genome', 'index'),)", 'object_name': 'Epoche'},
'genome': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'epoches'", 'to': "orm['django_analyze.Genome']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'index': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'db_index': 'True'}),
'max_fitness': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'mean_fitness': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'min_fitness': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'oldest_epoche_of_creation': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'})
},
'django_analyze.gene': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('genome', 'name'),)", 'object_name': 'Gene'},
'coverage_ratio': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'default': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'exploration_priority': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1', 'db_index': 'True'}),
'genome': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'genes'", 'to': "orm['django_analyze.Genome']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_increment': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'max_value': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'max_value_observed': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'min_value': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'min_value_observed': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'mutation_weight': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'values': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'django_analyze.genedependency': {
'Meta': {'unique_together': "(('gene', 'dependee_gene', 'dependee_value'),)", 'object_name': 'GeneDependency'},
'dependee_gene': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependents'", 'to': "orm['django_analyze.Gene']"}),
'dependee_value': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'gene': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'dependencies'", 'to': "orm['django_analyze.Gene']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'positive': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'django_analyze.genestatistics': {
'Meta': {'ordering': "('genome', 'gene', '-mean_fitness')", 'object_name': 'GeneStatistics', 'managed': 'False'},
'gene': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['django_analyze.Gene']", 'on_delete': 'models.DO_NOTHING', 'db_column': "'gene_id'"}),
'genome': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'gene_statistics'", 'on_delete': 'models.DO_NOTHING', 'db_column': "'genome_id'", 'to': "orm['django_analyze.Genome']"}),
'genotype_count': ('django.db.models.fields.PositiveIntegerField', [], {}),
'id': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'primary_key': 'True'}),
'max_fitness': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'mean_fitness': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'min_fitness': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'value': ('django.db.models.fields.CharField', [], {'max_length': '1000'})
},
'django_analyze.genome': {
'Meta': {'object_name': 'Genome'},
'_epoche': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'current_genome'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['django_analyze.Epoche']"}),
'delete_inferiors': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'elite_ratio': ('django.db.models.fields.FloatField', [], {'default': '0.1'}),
'epoche': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'epoche_stall': ('django.db.models.fields.PositiveIntegerField', [], {'default': '10'}),
'epoches_since_improvement': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'error_report': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'evaluating_part': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'evaluation_timeout': ('django.db.models.fields.PositiveIntegerField', [], {'default': '300'}),
'evaluator': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'null': 'True', 'blank': 'True'}),
'evolution_start_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'evolving': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'max_fitness': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'max_species': ('django.db.models.fields.PositiveIntegerField', [], {'default': '10'}),
'maximum_evaluated_population': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1000'}),
'maximum_population': ('django.db.models.fields.PositiveIntegerField', [], {'default': '10'}),
'min_fitness': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'mutation_rate': ('django.db.models.fields.FloatField', [], {'default': '0.1'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'production_at_best': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'production_evaluation_timeout': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'null': 'True', 'blank': 'True'}),
'production_genotype': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'production_genomes'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['django_analyze.Genotype']"}),
'production_genotype_auto': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'ratio_evaluated': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'version': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'})
},
'django_analyze.genotype': {
'Meta': {'ordering': "('-fitness',)", 'unique_together': "(('genome', 'fingerprint'),)", 'object_name': 'Genotype', 'index_together': "(('valid', 'fresh', 'fitness'), ('genome', 'fresh'))"},
'accuracy': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'complete_parts': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'complete_ratio': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'max_length': '500', 'null': 'True', 'blank': 'True'}),
'epoche': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'genotypes'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['django_analyze.Epoche']"}),
'epoche_of_creation': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'epoche_of_evaluation': ('django.db.models.fields.PositiveIntegerField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'error': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'evaluating': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'evaluating_pid': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'fingerprint': ('django.db.models.fields.CharField', [], {'db_index': 'True', 'max_length': '700', 'null': 'True', 'db_column': "'fingerprint'", 'blank': 'True'}),
'fingerprint_fresh': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'fitness': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'fitness_evaluation_datetime': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'fitness_evaluation_datetime_start': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'fresh': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'gene_count': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'generation': ('django.db.models.fields.PositiveIntegerField', [], {'default': '1'}),
'genome': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'genotypes'", 'to': "orm['django_analyze.Genome']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'immortal': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'max_memory_usage': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'mean_absolute_error': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'mean_evaluation_seconds': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'mean_memory_usage': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'memory_usage_samples': ('picklefield.fields.PickledObjectField', [], {'null': 'True', 'blank': 'True'}),
'ontime_parts': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'ontime_ratio': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'production_complete_parts': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'production_complete_ratio': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'production_error': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'production_evaluating': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'production_evaluating_pid': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'production_evaluation_end_datetime': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'production_evaluation_start_datetime': ('django.db.models.fields.DateTimeField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'production_fresh': ('django.db.models.fields.BooleanField', [], {'default': 'False', 'db_index': 'True'}),
'production_ontime_parts': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'production_ontime_ratio': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'production_success_parts': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'production_success_ratio': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'production_total_parts': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'production_valid': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'}),
'species': ('django.db.models.fields.related.ForeignKey', [], {'blank': 'True', 'related_name': "'genotypes'", 'null': 'True', 'on_delete': 'models.SET_NULL', 'to': "orm['django_analyze.Species']"}),
'success_parts': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'success_ratio': ('django.db.models.fields.FloatField', [], {'db_index': 'True', 'null': 'True', 'blank': 'True'}),
'total_evaluation_seconds': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'total_parts': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'valid': ('django.db.models.fields.BooleanField', [], {'default': 'True', 'db_index': 'True'})
},
'django_analyze.genotypegene': {
'Meta': {'ordering': "('gene__name',)", 'unique_together': "(('genotype', 'gene'),)", 'object_name': 'GenotypeGene'},
'_value': ('django.db.models.fields.CharField', [], {'max_length': '1000', 'db_column': "'value'"}),
'_value_genome': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['django_analyze.Genome']", 'null': 'True', 'on_delete': 'models.SET_NULL', 'blank': 'True'}),
'gene': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'gene_values'", 'to': "orm['django_analyze.Gene']"}),
'genotype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'genes'", 'to': "orm['django_analyze.Genotype']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'})
},
u'django_analyze.genotypegeneillegal': {
'Meta': {'object_name': 'GenotypeGeneIllegal', 'managed': 'False'},
'gene_value': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['django_analyze.GenotypeGene']", 'on_delete': 'models.DO_NOTHING', 'primary_key': 'True', 'db_column': "'illegal_genotypegene_id'"}),
'genotype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'illegal_gene_values'", 'on_delete': 'models.DO_NOTHING', 'db_column': "'illegal_genotype_id'", 'to': "orm['django_analyze.Genotype']"}),
'illegal_gene_name': ('django.db.models.fields.CharField', [], {'max_length': '1000'})
},
u'django_analyze.genotypegenemissing': {
'Meta': {'object_name': 'GenotypeGeneMissing', 'managed': 'False'},
'default': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'gene': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['django_analyze.Gene']", 'on_delete': 'models.DO_NOTHING', 'primary_key': 'True', 'db_column': "'gene_id'"}),
'gene_name': ('django.db.models.fields.CharField', [], {'max_length': '1000'}),
'genotype': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'missing_gene_values'", 'on_delete': 'models.DO_NOTHING', 'db_column': "'genotype_id'", 'to': "orm['django_analyze.Genotype']"})
},
'django_analyze.species': {
'Meta': {'ordering': "('genome', 'index')", 'unique_together': "(('genome', 'index'),)", 'object_name': 'Species', 'index_together': "(('genome', 'index'),)"},
'centroid': ('picklefield.fields.PickledObjectField', [], {'null': 'True', 'blank': 'True'}),
'genome': ('django.db.models.fields.related.ForeignKey', [], {'related_name': "'species'", 'to': "orm['django_analyze.Genome']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'index': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0', 'db_index': 'True'}),
'population': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'})
}
}
complete_apps = ['django_analyze'] | [
"chris@coronis"
] | chris@coronis |
af5d3531a0c3b27b202c1ef66223d898bd77ec13 | 008aada8c0e718e0220eabc5b54732a1e1b07f97 | /sergeant/connector/_connector.py | ee1985d5cf05a1683d5b4b588c6a582648b9599b | [
"MIT"
] | permissive | gabriel-yahav/sergeant | 59259a92c4c072e317d82022f19b440b21d2c294 | 0de9bfb4fdca62f061d6588c6839c4491c5d4f9b | refs/heads/master | 2022-09-30T04:38:48.414842 | 2020-05-26T10:28:50 | 2020-05-26T10:28:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,931 | py | import typing
class Lock:
def acquire(
self,
timeout: typing.Optional[float] = None,
check_interval: float = 1.0,
ttl: int = 60,
) -> bool:
raise NotImplementedError()
def release(
self,
) -> bool:
raise NotImplementedError()
def is_locked(
self,
) -> bool:
raise NotImplementedError()
def set_ttl(
self,
ttl: int,
) -> bool:
raise NotImplementedError()
def get_ttl(
self,
) -> typing.Optional[int]:
raise NotImplementedError()
class Connector:
name: str
def key_set(
self,
key: str,
value: bytes,
) -> bool:
raise NotImplementedError()
def key_get(
self,
key: str,
) -> typing.Optional[bytes]:
raise NotImplementedError()
def key_delete(
self,
key: str,
) -> bool:
raise NotImplementedError()
def queue_pop(
self,
queue_name: str,
) -> typing.Optional[bytes]:
raise NotImplementedError()
def queue_pop_bulk(
self,
queue_name: str,
number_of_items: int,
) -> typing.List[bytes]:
raise NotImplementedError()
def queue_push(
self,
queue_name: str,
item: bytes,
priority: str = 'NORMAL',
) -> bool:
raise NotImplementedError()
def queue_push_bulk(
self,
queue_name: str,
items: typing.Iterable[bytes],
priority: str = 'NORMAL',
) -> bool:
raise NotImplementedError()
def queue_length(
self,
queue_name: str,
) -> int:
raise NotImplementedError()
def queue_delete(
self,
queue_name: str,
) -> bool:
raise NotImplementedError()
def lock(
self,
name: str,
) -> Lock:
raise NotImplementedError()
| [
"[email protected]"
] | |
b12892a96f4b48796a35f6700c11b1ce1875c2cf | 94c8dd4126da6e9fe9acb2d1769e1c24abe195d3 | /test/python/circuit/library/test_phase_estimation.py | 8bf3d15d9ea0a395cd1d2ede7c122fdb666605b4 | [
"Apache-2.0"
] | permissive | levbishop/qiskit-terra | a75c2f96586768c12b51a117f9ccb7398b52843d | 98130dd6158d1f1474e44dd5aeacbc619174ad63 | refs/heads/master | 2023-07-19T19:00:53.483204 | 2021-04-20T16:30:16 | 2021-04-20T16:30:16 | 181,052,828 | 1 | 0 | Apache-2.0 | 2019-06-05T15:32:13 | 2019-04-12T17:20:54 | Python | UTF-8 | Python | false | false | 5,238 | py | # This code is part of Qiskit.
#
# (C) Copyright IBM 2017, 2020.
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
"""Test library of phase estimation circuits."""
import unittest
import numpy as np
from qiskit.test.base import QiskitTestCase
from qiskit import BasicAer, execute
from qiskit.circuit import QuantumCircuit
from qiskit.circuit.library import PhaseEstimation, QFT
from qiskit.quantum_info import Statevector
class TestPhaseEstimation(QiskitTestCase):
"""Test the phase estimation circuit."""
def assertPhaseEstimationIsCorrect(self, pec: QuantumCircuit, eigenstate: QuantumCircuit,
phase_as_binary: str):
r"""Assert that the phase estimation circuit implements the correct transformation.
Applying the phase estimation circuit on a target register which holds the eigenstate
:math:`|u\rangle` (say the last register), the final state should be
.. math::
|\phi_1\rangle \cdots |\phi_t\rangle |u\rangle
where the eigenvalue is written as :math:`e^{2\pi i \phi}` and the angle is represented
in binary fraction, i.e. :math:`\phi = 0.\phi_1 \ldots \phi_t`.
Args:
pec: The circuit implementing the phase estimation circuit.
eigenstate: The eigenstate as circuit.
phase_as_binary: The phase of the eigenvalue in a binary fraction. E.g. if the
phase is 0.25, the binary fraction is '01' as 0.01 = 0 * 0.5 + 1 * 0.25 = 0.25.
"""
# the target state
eigenstate_as_vector = Statevector.from_instruction(eigenstate).data
reference = eigenstate_as_vector
zero, one = [1, 0], [0, 1]
for qubit in phase_as_binary[::-1]:
reference = np.kron(reference, zero if qubit == '0' else one)
# the simulated state
circuit = QuantumCircuit(pec.num_qubits)
circuit.compose(eigenstate,
list(range(pec.num_qubits - eigenstate.num_qubits, pec.num_qubits)),
inplace=True)
circuit.compose(pec, inplace=True)
# TODO use Statevector for simulation once Qiskit/qiskit-terra#4681 is resolved
# actual = Statevector.from_instruction(circuit).data
backend = BasicAer.get_backend('statevector_simulator')
actual = execute(circuit, backend).result().get_statevector()
np.testing.assert_almost_equal(reference, actual)
def test_phase_estimation(self):
"""Test the standard phase estimation circuit."""
with self.subTest('U=S, psi=|1>'):
unitary = QuantumCircuit(1)
unitary.s(0)
eigenstate = QuantumCircuit(1)
eigenstate.x(0)
# eigenvalue is 1j = exp(2j pi 0.25) thus phi = 0.25 = 0.010 = '010'
# using three digits as 3 evaluation qubits are used
phase_as_binary = '0100'
pec = PhaseEstimation(4, unitary)
self.assertPhaseEstimationIsCorrect(pec, eigenstate, phase_as_binary)
with self.subTest('U=SZ, psi=|11>'):
unitary = QuantumCircuit(2)
unitary.z(0)
unitary.s(1)
eigenstate = QuantumCircuit(2)
eigenstate.x([0, 1])
# eigenvalue is -1j = exp(2j pi 0.75) thus phi = 0.75 = 0.110 = '110'
# using three digits as 3 evaluation qubits are used
phase_as_binary = '110'
pec = PhaseEstimation(3, unitary)
self.assertPhaseEstimationIsCorrect(pec, eigenstate, phase_as_binary)
with self.subTest('a 3-q unitary'):
unitary = QuantumCircuit(3)
unitary.x([0, 1, 2])
unitary.cz(0, 1)
unitary.h(2)
unitary.ccx(0, 1, 2)
unitary.h(2)
eigenstate = QuantumCircuit(3)
eigenstate.h(0)
eigenstate.cx(0, 1)
eigenstate.cx(0, 2)
# the unitary acts as identity on the eigenstate, thus the phase is 0
phase_as_binary = '00'
pec = PhaseEstimation(2, unitary)
self.assertPhaseEstimationIsCorrect(pec, eigenstate, phase_as_binary)
def test_phase_estimation_iqft_setting(self):
"""Test default and custom setting of the QFT circuit."""
unitary = QuantumCircuit(1)
unitary.s(0)
with self.subTest('default QFT'):
pec = PhaseEstimation(3, unitary)
expected_qft = QFT(3, inverse=True, do_swaps=False).reverse_bits()
self.assertEqual(pec.data[-1][0].definition, expected_qft)
with self.subTest('custom QFT'):
iqft = QFT(3, approximation_degree=2).inverse()
pec = PhaseEstimation(3, unitary, iqft=iqft)
self.assertEqual(pec.data[-1][0].definition, iqft)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
aa3069e85491124d364115e57d1a97e1ff6dbda7 | e2589896ad0e629d933f1e9e03f9963eb922664a | /backend/cool_dust_27675/wsgi.py | 297564f38beadc76f1ea37eeabd22b393dcbc0c4 | [] | no_license | crowdbotics-apps/cool-dust-27675 | 89b947ddd6c87d70febeb2af15ffab3706b6cc13 | f2fa1d6f4206955173a2ebf1b0f824ee5d184d1a | refs/heads/master | 2023-05-08T10:46:07.382608 | 2021-06-02T06:44:22 | 2021-06-02T06:44:22 | 373,066,266 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 407 | py | """
WSGI config for cool_dust_27675 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'cool_dust_27675.settings')
application = get_wsgi_application()
| [
"[email protected]"
] | |
b94eb3cd9714f1550d11a2faa1808f08db720be0 | bc9f66258575dd5c8f36f5ad3d9dfdcb3670897d | /lib/surface/storage/delete.py | b0dd92d45fc1d77f4de21763de0131975f546827 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | google-cloud-sdk-unofficial/google-cloud-sdk | 05fbb473d629195f25887fc5bfaa712f2cbc0a24 | 392abf004b16203030e6efd2f0af24db7c8d669e | refs/heads/master | 2023-08-31T05:40:41.317697 | 2023-08-23T18:23:16 | 2023-08-23T18:23:16 | 335,182,594 | 9 | 2 | NOASSERTION | 2022-10-29T20:49:13 | 2021-02-02T05:47:30 | Python | UTF-8 | Python | false | false | 5,897 | py | # -*- coding: utf-8 -*- #
# Copyright 2013 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command to list Cloud Storage objects."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.storage import storage_api
from googlecloudsdk.api_lib.storage import storage_util
from googlecloudsdk.calliope import base
from googlecloudsdk.calliope import exceptions
from googlecloudsdk.command_lib.storage import expansion
from googlecloudsdk.command_lib.storage import flags
from googlecloudsdk.command_lib.storage import storage_parallel
from googlecloudsdk.core import log
from googlecloudsdk.core.console import console_io
@base.Hidden
@base.Deprecate(is_removed=False, warning='This command is deprecated. '
'Use `gcloud alpha storage rm` instead.')
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class Delete(base.Command):
"""Delete Cloud Storage objects and buckets."""
detailed_help = {
'DESCRIPTION': """\
*{command}* lets you delete Cloud Storage objects and buckets. You can
specify one or more paths (including wildcards) and all matching objects
and buckets will be deleted.
""",
'EXAMPLES': """\
To delete an object, run:
$ *{command}* gs://mybucket/a.txt
To delete all objects in a directory, run:
$ *{command}* gs://mybucket/remote-dir/*
The above command will delete all objects under remote-dir/ but not its sub-directories.
To delete a directory and all its objects and subdirectories, run:
$ *{command}* --recursive gs://mybucket/remote-dir
$ *{command}* gs://mybucket/remote-dir/**
To delete all objects and subdirectories of a directory, without deleting the directory
itself, run:
$ *{command}* --recursive gs://mybucket/remote-dir/*
or
$ *{command}* gs://mybucket/remote-dir/**
To delete all objects and directories in a bucket without deleting the bucket itself, run:
$ *{command}* gs://mybucket/**
To delete all text files in a bucket or a directory, run:
$ *{command}* gs://mybucket/*.txt
$ *{command}* gs://mybucket/remote-dir/*.txt
To go beyond directory boundary and delete all text files in a bucket or a directory, run:
$ *{command}* gs://mybucket/**/*.txt
$ *{command}* gs://mybucket/remote-dir/**/*.txt
To delete a bucket, run:
$ *{command}* gs://mybucket
You can use wildcards in bucket names. To delete all buckets with prefix of `my`, run:
$ *{command}* --recursive gs://my*
""",
}
@staticmethod
def Args(parser):
parser.add_argument(
'path',
nargs='+',
help='The path of objects and directories to delete. The path must '
'begin with gs:// and may or may not contain wildcard characters.')
parser.add_argument(
'--recursive',
action='store_true',
help='Recursively delete the contents of any directories that match '
'the path expression.')
parser.add_argument(
'--num-threads',
type=int,
hidden=True,
default=16,
help='The number of threads to use for the delete.')
flags.add_additional_headers_flag(parser)
def Run(self, args):
paths = args.path or ['gs://']
expander = expansion.GCSPathExpander()
objects, dirs = expander.ExpandPaths(paths)
if dirs and not args.recursive:
raise exceptions.RequiredArgumentException(
'--recursive',
'Source path matches directories but --recursive was not specified.')
buckets = []
dir_paths = []
for d in dirs:
obj_ref = storage_util.ObjectReference.FromUrl(d, allow_empty_object=True)
if not obj_ref.name:
buckets.append(obj_ref.bucket_ref)
dir_paths.append(d + '**')
sub_objects, _ = expander.ExpandPaths(dir_paths)
objects.update(sub_objects)
tasks = []
for o in sorted(objects):
tasks.append(storage_parallel.ObjectDeleteTask(
storage_util.ObjectReference.FromUrl(o)))
if buckets:
# Extra warnings and confirmation if any buckets will be deleted.
log.warning('Deleting a bucket is irreversible and makes that bucket '
'name available for others to claim.')
message = 'This command will delete the following buckets:\n '
message += '\n '.join([b.bucket for b in buckets])
console_io.PromptContinue(
message=message, throw_if_unattended=True, cancel_on_no=True)
# TODO(b/120033753): Handle long lists of items.
message = 'You are about to delete the following:'
message += ''.join(['\n ' + b.ToUrl() for b in buckets])
message += ''.join(['\n ' + t.obj_ref.ToUrl() for t in tasks])
console_io.PromptContinue(
message=message, throw_if_unattended=True, cancel_on_no=True)
storage_parallel.ExecuteTasks(tasks, num_threads=args.num_threads,
progress_bar_label='Deleting Files')
log.status.write(
'Deleted [{}] file{}.\n'.format(
len(tasks), 's' if len(tasks) > 1 else ''))
storage_client = storage_api.StorageClient()
for b in buckets:
storage_client.DeleteBucket(b)
log.DeletedResource(b.ToUrl(), kind='bucket')
| [
"[email protected]"
] | |
74d689c8c85d5d2561a6abc2a06ba077a7496e0e | 0fa82ccc0b93944c4cbb8255834b019cf16d128d | /Az/temp.py | caf3bc211fbf8fccda75e10e1fee9d32caddc4ec | [] | no_license | Akashdeepsingh1/project | 6ad477088a3cae2d7eea818a7bd50a2495ce3ba8 | bdebc6271b39d7260f6ab5bca37ab4036400258f | refs/heads/master | 2022-12-13T23:09:35.782820 | 2020-08-27T14:22:37 | 2020-08-27T14:22:37 | 279,722,741 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 563 | py | def mincostTickets (days, costs):
dp = [0] * 366
for i in range (1,max (days)+1):
if i in days:
dp[i] = min (dp[i - 1] + costs[0], dp[i - 7] + costs[1], dp[i - 30] + costs[2])
else:
dp[i] = dp[i-1]
return dp[:max (days) + 1][-1]
def mincostTickets2( days, costs):
dp = [0]*366
for i in range(1,max(days)+1):
dp[i] = min(dp[i-1] + costs[0] , dp[i-7] + costs[1], dp[i-30] + costs[2])
return dp[:max(days)+1][-1]
days = [1,4,6,7,8,20]
costs= [2,7,15]
print (mincostTickets2 (days, costs))
| [
"[email protected]"
] | |
114910137765ee9246494ef8b775990951da0d1f | b321ca6310cd84bd8603fa9685365bb2a4acc945 | /公司真题/拼多多/phone_number.py | 144534cc23631ee5da9b7f732598e83ae9e6c492 | [] | no_license | baixiaoyanvision/python-algorithm | 71b2fdf7d6b57be8a2960c44160f2a7459e153ae | 6cbb61213af8264e083af1994522929fb7711616 | refs/heads/master | 2020-08-27T03:41:08.332322 | 2019-10-02T13:28:49 | 2019-10-02T13:28:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,096 | py |
# line1 = input()
# line2 = input()
line1 = '6 5'
line2 = '787585'
N, K = [int(i) for i in line1.split()]
line2 = [int(i) for i in line2]
result = []
line2_set = set(line2)
min_money = 99999999
for val in line2_set:
sub_vals = [abs(val - number) for number in line2]
sort_sub_vals = sorted( list(range(len(sub_vals))), key=lambda x: sub_vals[x] )
pay_money = sum([sub_vals[i] for i in sort_sub_vals[:K]])
equal_val = sub_vals[sort_sub_vals[K-1]]
copy_line2 = line2[:]
for i in sort_sub_vals[:K-1]:
copy_line2[i] = val
last_change = None
for i in range(len(copy_line2)):
if abs(copy_line2[i]-val) == equal_val:
last_change = i
copy_line2[last_change] = val
copy_line2 = [str(i) for i in copy_line2]
copy_line2 = ''.join(copy_line2)
if pay_money > min_money:
continue
elif pay_money < min_money:
result = []
result.append(copy_line2)
min_money = pay_money
else:
result.append(copy_line2)
result = sorted(result)
print(min_money)
print(result[0])
| [
"[email protected]"
] | |
8b97fb6b8a7718a7b273586c5c11230785335bf5 | 51d348426c6e5fa79f2e77baf59bdbf8357d9f12 | /week10/Инфоматрикс/d.массивы/1.py | 39e914161d08fae03f9bd90984ada04bfe926359 | [] | no_license | Zhansayaas/webdev | c01325b13abf92cef13138d7ffc123cf9bc4f81a | dd054d0bcafc498eccc5f4626ab45fd8b46b3a3f | refs/heads/main | 2023-04-10T23:33:30.469465 | 2021-04-17T10:21:53 | 2021-04-17T10:21:53 | 322,049,225 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 82 | py | n=int(input())
a=input().split()
for i in range(0,n,2):
print(a[i],end=' ') | [
"[email protected]"
] | |
c0d29ea3e56d0a9a1129476105c243a8a2566772 | 8d2a124753905fb0455f624b7c76792c32fac070 | /pytnon-month01/周六练习-practice on saturday/独立完成/OOP-fanb-1_student_manager_system.py | 370a4186757ac84e2f949eca27cb01e393c5348c | [] | no_license | Jeremy277/exercise | f38e4f19aae074c804d265f6a1c49709fd2cae15 | a72dd82eb2424e4ae18e2f3e9cc66fc4762ec8fa | refs/heads/master | 2020-07-27T09:14:00.286145 | 2019-09-17T11:31:44 | 2019-09-17T11:31:44 | 209,041,629 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,533 | py | #学生信息管理系统:
# 数据模型类:StudentModel
# 数据:编号 id,姓名 name,年龄 age,成绩 score
class StudentModel:
def __init__(self,name,age,score,id = 0):
self.name = name
self.age = age
self.score = score
self.id = id
# 逻辑控制类:StudentManagerController
# 数据:学生列表 __stu_list
# (#私有属性,提供只读)
# 行为:获取列表 stu_list,添加学生 add_student,删除学生remove_student,
# 修改学生update_student,根据成绩排序order_by_score。
class StudentManagerController:
__stu_id = 1000
def __init__(self): #函数中不需要定义行参
self.__stu_list = [] #赋值空列表
@property
def stu_list(self):
return self.__stu_list
def add_student(self,stu):
StudentManagerController.__stu_id += 1
stu.id = StudentManagerController.__stu_id
self.__stu_list.append(stu)
def remove_student(self,id):
for item in self.__stu_list:
if item.id == id:
self.__stu_list.remove(item)
return True
def update_student(self,stu):
for item in self.__stu_list:
if item.id == stu.id:
item.name = stu.name
item.age = stu.age
item.score = stu.score
return True
def order_by_score(self):
for i in range(len(self.__stu_list)-1):
for j in range(i+1,len(self.__stu_list)):
if self.__stu_list[i].score > self.__stu_list[j].score:
self.__stu_list[i],self.__stu_list[j] = self.__stu_list[j],self.__stu_list[i]
# 界面视图类:StudentManagerView
# 数据:逻辑控制对象__manager
# 行为:显示菜单__display_menu,选择菜单项__select_menu_item,入口逻辑main,
# 输入学生__input_students,输出学生__output_students,
# 删除学生__delete_student,修改学生信息__modify_student
class StudentManagerView():
def __init__(self):
self.__manager = StudentManagerController()
def __display_menu(self):
print('''
学生信息管理系统1.0
+-----------------------+
| 0)退出管理系统 |
| 1)添加学生信息 |
| 2)显示学生信息 |
| 3)删除学生信息 |
| 4)修改学生信息 |
| 5)按照成绩排序 |
+-----------------------+
''')
def main(self):
choice = None
while choice != 0:
self.__display_menu()
choice = input('请输入选项:')
if choice == '0':
print('谢谢使用,退出!')
break
elif choice == '1':
self.__input_students()
elif choice == '2':
self.__output_students()
elif choice == '3':
self.__delete_student()
elif choice == '4':
self.__modify_student()
elif choice == '5':
self.__sort_by_score()
else:
print('请重新输入选项!')
def __input_students(self):
name = input('请输入学生姓名:')
age = int(input('请输入学生年龄:'))
score = int(input('请输入学生成绩:'))
stu = StudentModel(name,age,score)
self.__manager.add_student(stu)
print('添加学生信息成功!')
def __output_students(self):
print('学生信息:')
for item in self.__manager.stu_list:
print(item.id,item.name,item.age,item.score)
def __delete_student(self):
stu_id = int(input('请输入学生编号:'))
if self.__manager.remove_student(stu_id):
print('删除学生信息成功!')
else:
print('删除学生信息失败!')
def __modify_student(self):
id = int(input('请输入需要修改的学生ID:'))
name = input('请输入修改后学生姓名:')
age = int(input('请输入修改后学生年龄:'))
score = int(input('请输入修改后学生成绩:'))
stu = StudentModel(name, age, score, id)
if self.__manager.update_student(stu):
print('修改学生信息成功!')
else:
print('修改学生信息失败!')
def __sort_by_score(self):
self.__manager.order_by_score()
print('排序成功!')
view = StudentManagerView()
view.main()
#1.测试逻辑控制代码
#测试添加学员
# manger = StudentManagerController()
# s01 = StudentModel('许瑶',18,98)
# s02 = StudentModel('许仙',16,99)
# s03 = StudentModel('小青',15,79)
# s04 = StudentModel('姐夫',15,79)
# manger.add_student(s01)
# manger.add_student(s02)
# manger.add_student(s03)
# manger.add_student(s04)
# for item in manger.stu_list:
# print(item.id,item.name,item.age,item.score)
# # #manger.stu_list列表 保存学生对象
# # print(manger.stu_list[1].name)
# # #测试删除学员
# manger.remove_student(1004)
# for item in manger.stu_list:
# print('删除后:',item.id,item.name)
# # #测试修改学员
# manger.update_student(StudentModel('娘子',19,80,1001))
# for item in manger.stu_list:
# print('修改后:',item.id,item.name,item.age,item.score)
# # #测试按成绩排序
# manger.order_by_score()
# for item in manger.stu_list:
# print('按分数升序排列:',item.id,item.name,item.age,item.score) | [
"[email protected]"
] | |
edbc5843172b296c275bf4d38092d8dabd6213fe | bd3b1eaedfd0aab45880c100b86bc4714149f5cd | /student/dyp1/11.py | c6e63aa6b223b8b5cdbb13353fe5872beeeea0a7 | [] | no_license | ophwsjtu18/ohw19f | a008cd7b171cd89fa116718e2a5a5eabc9f7a93e | 96dedf53a056fbb4d07c2e2d37d502171a6554a6 | refs/heads/master | 2020-08-08T12:59:38.875197 | 2020-04-01T10:38:14 | 2020-04-01T10:38:14 | 213,835,959 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,178 | py | import numpy as np
import cv2
capture = cv2.VideoCapture(0)
face_cascade = cv2.CascadeClassifier('C:\\Users\\DING-DING\\AppData\\Local\\Programs\\Python\\Python36\\Lib\\site-packages\\cv2\\data\\haarcascade_frontalface_default.xml')
eye_cascade = cv2.CascadeClassifier('C:\\Users\\DING-DING\\AppData\\Local\\Programs\\Python\\Python36\\Lib\\site-packages\\cv2\\data\\haarcascade_eye.xml')
while(True):
ret, frame = capture.read()
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(gray, 1.3, 5)
for (x,y,w,h) in faces:
img = cv2.rectangle(gray,(x,y),(x+w,y+h),(255,0,0),2)
roi_gray = gray[y:y+h, x:x+w]
roi_color = img[y:y+h, x:x+w]
def hhh( lists ):
for (x,y,w,h) in lists:
a = x
for num in range(1,4):
for num in range(1,4):
cv2.rectangle(img,(x,y),(x+int(w/3),y+int(h/3)),(255,0,0),2)
x+=int(w/3)
x=a
y+=int(h/3)
hhh(faces)
cv2.imshow('frame',gray)
cv2.waitKey(0)
cv2.destroyAllWindows()
| [
"[email protected]"
] | |
34fc9717d6ba5477e1aa8e8cc9c71b46b8ee7fd2 | 2f2feae3dee5847edbf95c1eeb14e656490dae35 | /2022/day_13_distress_signal_1.py | e89f9fb5f20ecbd78b7b38f8d58eca40028031af | [] | no_license | olga3n/adventofcode | 32597e9044e11384452410b7a7dda339faf75f32 | 490a385fb8f1c45d22deb27bf21891e193fe58a2 | refs/heads/master | 2023-01-07T09:19:04.090030 | 2022-12-25T13:31:22 | 2022-12-25T13:31:22 | 163,669,598 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,209 | py | #!/usr/bin/env python3
import sys
import json
from typing import Iterable, List, Any, Tuple, Optional
def is_right_order(left: List[Any], right: List[Any]) -> Optional[bool]:
iter_left = iter(left)
iter_right = iter(right)
while True:
item_left = next(iter_left, None)
item_right = next(iter_right, None)
if item_left is None and item_right is None:
return None
if item_left is None:
return True
if item_right is None:
return False
if isinstance(item_left, int) and isinstance(item_right, int):
if item_left < item_right:
return True
if item_left > item_right:
return False
if item_right == item_left:
continue
if isinstance(item_left, int):
item_left = [item_left]
if isinstance(item_right, int):
item_right = [item_right]
value = is_right_order(item_left, item_right)
if value is not None:
return value
def build_pairs(data: Iterable[str]) -> Iterable[Tuple[List[Any], List[Any]]]:
buf = []
for line in data:
if not line.strip():
continue
buf.append(line)
if len(buf) == 2:
yield json.loads(buf[0]), json.loads(buf[1])
buf = []
def right_order_pairs(data: Iterable[str]) -> int:
return sum(
index + 1 for index, pair in enumerate(build_pairs(data))
if is_right_order(pair[0], pair[1])
)
def test_right_order_pairs():
data = [
'[1,1,3,1,1]',
'[1,1,5,1,1]',
'',
'[[1],[2,3,4]]',
'[[1],4]',
'',
'[9]',
'[[8,7,6]]',
'',
'[[4,4],4,4]',
'[[4,4],4,4,4]',
'',
'[7,7,7,7]',
'[7,7,7]',
'',
'[]',
'[3]',
'',
'[[[]]]',
'[[]]',
'',
'[1,[2,[3,[4,[5,6,7]]]],8,9]',
'[1,[2,[3,[4,[5,6,0]]]],8,9]'
]
assert right_order_pairs(data) == 13
def main():
data = sys.stdin
result = right_order_pairs(data)
print(result)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
9c7357576d312b577fde01d5955822e944b46c7b | d0f11aa36b8c594a09aa06ff15080d508e2f294c | /leecode/1-500/401-500/472-连接词.py | 4edb1540db15225aeb711ca0bd0954fa23641a7b | [] | no_license | saycmily/vtk-and-python | 153c1fe9953fce685903f938e174d3719eada0f5 | 5045d7c44a5af5c16df5a3b72c157e9a2928a563 | refs/heads/master | 2023-01-28T14:02:59.970115 | 2021-04-28T09:03:32 | 2021-04-28T09:03:32 | 161,468,316 | 1 | 1 | null | 2023-01-12T05:59:39 | 2018-12-12T10:00:08 | Python | UTF-8 | Python | false | false | 1,016 | py | class Solution:
def findAllConcatenatedWordsInADict(self, words):
def search(word, pre_dict):
if len(word)==0:
return True
cur_dict = pre_dict
for i,c in enumerate(word):
cur_dict = cur_dict.get(c,None)
if not cur_dict:
return False
if '#' in cur_dict:
if search(word[i+1:], pre_dict):
return True
return False
def insert(word, cur_dict):
for c in word:
if c not in cur_dict:
cur_dict[c] = {}
cur_dict = cur_dict[c]
cur_dict['#'] ={}
words.sort(key=lambda x: len(x))
ret = []
pre_dict = {}
for word in words:
if len(word)==0:
continue
if search(word, pre_dict):
ret.append(word)
else:
insert(word, pre_dict)
return ret | [
"[email protected]"
] | |
713b479653ed7764eabad8e061233c7fc1086f24 | 0c2ca3b3c7f307c29f45957e87ed940c23571fae | /fhirclient/models/bodysite_tests.py | a3aaa3593967b5390640ec04095fcc47317b4e4a | [
"LicenseRef-scancode-warranty-disclaimer",
"Apache-2.0"
] | permissive | myungchoi/client-py-1.0.3 | 49c3d15b8dfb845e7cbc933084ed5fcc37e7c4ed | 08e4e5828fb461c105907fd454b19dfc8463aad8 | refs/heads/master | 2021-06-25T04:36:26.952685 | 2021-02-11T16:27:26 | 2021-02-11T16:27:26 | 209,669,881 | 0 | 0 | NOASSERTION | 2021-03-20T01:45:42 | 2019-09-20T00:11:10 | Python | UTF-8 | Python | false | false | 2,663 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Generated from FHIR 1.0.2.7202 on 2016-03-23.
# 2016, SMART Health IT.
import os
import io
import unittest
import json
from . import bodysite
from .fhirdate import FHIRDate
class BodySiteTests(unittest.TestCase):
def instantiate_from(self, filename):
datadir = os.environ.get('FHIR_UNITTEST_DATADIR') or ''
with io.open(os.path.join(datadir, filename), 'r', encoding='utf-8') as handle:
js = json.load(handle)
self.assertEqual("BodySite", js["resourceType"])
return bodysite.BodySite(js)
def testBodySite1(self):
inst = self.instantiate_from("bodysite-example.json")
self.assertIsNotNone(inst, "Must have instantiated a BodySite instance")
self.implBodySite1(inst)
js = inst.as_json()
self.assertEqual("BodySite", js["resourceType"])
inst2 = bodysite.BodySite(js)
self.implBodySite1(inst2)
def implBodySite1(self, inst):
self.assertEqual(inst.code.coding[0].code, "53120007")
self.assertEqual(inst.code.coding[0].display, "Arm")
self.assertEqual(inst.code.coding[0].system, "http://snomed.info/sct")
self.assertEqual(inst.code.text, "Arm")
self.assertEqual(inst.description, "front of upper left arm directly below the tattoo")
self.assertEqual(inst.id, "example")
self.assertEqual(inst.identifier[0].system, "http://www.acmehosp.com/bodysites")
self.assertEqual(inst.identifier[0].use, "official")
self.assertEqual(inst.identifier[0].value, "12345")
self.assertEqual(inst.image[0].contentType, "image/png;base64")
self.assertEqual(inst.image[0].title, "ARM")
self.assertEqual(inst.modifier[0].coding[0].code, "419161000")
self.assertEqual(inst.modifier[0].coding[0].display, "Unilateral left")
self.assertEqual(inst.modifier[0].coding[0].system, "http://snomed.info/sct")
self.assertEqual(inst.modifier[0].text, "Left")
self.assertEqual(inst.modifier[1].coding[0].code, "261183002")
self.assertEqual(inst.modifier[1].coding[0].display, "Upper")
self.assertEqual(inst.modifier[1].coding[0].system, "http://snomed.info/sct")
self.assertEqual(inst.modifier[1].text, "Upper")
self.assertEqual(inst.modifier[2].coding[0].code, "255549009")
self.assertEqual(inst.modifier[2].coding[0].display, "Anterior")
self.assertEqual(inst.modifier[2].coding[0].system, "http://snomed.info/sct")
self.assertEqual(inst.modifier[2].text, "Anterior")
self.assertEqual(inst.text.status, "generated")
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.