blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
9bad380d1be1c4ee15612b78894c0dcea4cc9d25 | 0e1e643e864bcb96cf06f14f4cb559b034e114d0 | /Exps_7_v3/doc3d/Ablation4_ch016_ep003_7_10/Gather3_W_fix3blk_C_change/train/pyr_3s/L7/step10_a.py | 030a17a2615a8bb52e57dc6be508bb22e9741a0d | [] | no_license | KongBOy/kong_model2 | 33a94a9d2be5b0f28f9d479b3744e1d0e0ebd307 | 1af20b168ffccf0d5293a393a40a9fa9519410b2 | refs/heads/master | 2022-10-14T03:09:22.543998 | 2022-10-06T11:33:42 | 2022-10-06T11:33:42 | 242,080,692 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 79,177 | py | #############################################################################################################################################################################################################
#############################################################################################################################################################################################################
### 把 kong_model2 加入 sys.path
import os
code_exe_path = os.path.realpath(__file__) ### 目前執行 step10_b.py 的 path
code_exe_path_element = code_exe_path.split("\\") ### 把 path 切分 等等 要找出 kong_model 在第幾層
code_dir = "\\".join(code_exe_path_element[:-1])
kong_layer = code_exe_path_element.index("kong_model2") ### 找出 kong_model2 在第幾層
kong_model2_dir = "\\".join(code_exe_path_element[:kong_layer + 1]) ### 定位出 kong_model2 的 dir
import sys ### 把 kong_model2 加入 sys.path
sys.path.append(kong_model2_dir)
sys.path.append(code_dir)
# print(__file__.split("\\")[-1])
# print(" code_exe_path:", code_exe_path)
# print(" code_exe_path_element:", code_exe_path_element)
# print(" code_dir:", code_dir)
# print(" kong_layer:", kong_layer)
# print(" kong_model2_dir:", kong_model2_dir)
#############################################################################################################################################################################################################
kong_to_py_layer = len(code_exe_path_element) - 1 - kong_layer ### 中間 -1 是為了長度轉index
# print(" kong_to_py_layer:", kong_to_py_layer)
if (kong_to_py_layer == 0): template_dir = ""
elif(kong_to_py_layer == 2): template_dir = code_exe_path_element[kong_layer + 1][0:] ### [7:] 是為了去掉 step1x_, 後來覺得好像改有意義的名字不去掉也行所以 改 0
elif(kong_to_py_layer == 3): template_dir = code_exe_path_element[kong_layer + 1][0:] + "/" + code_exe_path_element[kong_layer + 2][0:] ### [5:] 是為了去掉 mask_ ,前面的 mask_ 是為了python 的 module 不能 數字開頭, 隨便加的這樣子, 後來覺得 自動排的順序也可以接受, 所以 改0
elif(kong_to_py_layer > 3): template_dir = code_exe_path_element[kong_layer + 1][0:] + "/" + code_exe_path_element[kong_layer + 2][0:] + "/" + "/".join(code_exe_path_element[kong_layer + 3: -1])
# print(" template_dir:", template_dir) ### 舉例: template_dir: 7_mask_unet/5_os_book_and_paper_have_dtd_hdr_mix_bg_tv_s04_mae
#############################################################################################################################################################################################################
exp_dir = template_dir
#############################################################################################################################################################################################################
from step06_a_datas_obj import *
from step09_3side_L7 import *
from step10_a2_loss_info_obj import *
from step10_b2_exp_builder import Exp_builder
rm_paths = [path for path in sys.path if code_dir in path]
for rm_path in rm_paths: sys.path.remove(rm_path)
rm_moduless = [module for module in sys.modules if "step09" in module]
for rm_module in rm_moduless: del sys.modules[rm_module]
import Exps_7_v3.doc3d.Ablation4_ch016_ep003_7_10.W_w_M_to_C_pyr.pyr_3s.L7.step10_a as W_w_M_to_C_p20_pyr
from Exps_7_v3.doc3d.Ablation4_ch016_ep003_7_10.I_w_M_to_W_pyr.pyr_3s.L5.step10_a import ch032_1side_6__2side_6__3side_6__ep010 as I_w_M_to_W_p20_3s_L5_Good
#############################################################################################################################################################################################################
'''
exp_dir 是 決定 result_dir 的 "上一層"資料夾 名字喔! exp_dir要巢狀也沒問題~
比如:exp_dir = "6_mask_unet/自己命的名字",那 result_dir 就都在:
6_mask_unet/自己命的名字/result_a
6_mask_unet/自己命的名字/result_b
6_mask_unet/自己命的名字/...
'''
use_db_obj = type8_blender_kong_doc3d_v2
use_loss_obj = [mae_s001_sobel_k9_s001_loss_info_builder.set_loss_target("UNet_Wz").copy(), mae_s001_sobel_k9_s001_loss_info_builder.set_loss_target("UNet_Wy").copy(), mae_s001_sobel_k9_s001_loss_info_builder.set_loss_target("UNet_Wx").copy(), mae_s001_sobel_k9_s001_loss_info_builder.set_loss_target("UNet_Cx").copy(), mae_s001_sobel_k9_s001_loss_info_builder.set_loss_target("UNet_Cy").copy()] ### z, y, x 順序是看 step07_b_0b_Multi_UNet 來對應的喔
#############################################################
### 為了resul_analyze畫空白的圖,建一個empty的 Exp_builder
empty = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_1__2side_1__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end=ch032_pyramid_1side_1__2side_1__3side_1_and_1s6_2s6.kong_model.model_describe) .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_result_name(result_name="為了resul_analyze畫空白的圖,建一個empty的 Exp_builder")
#############################################################
ch032_1side_1__2side_1__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_1__2side_1__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s1__2s1__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_1__2side_1__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_2__2side_1__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_1__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s2__2s1__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_2__2side_1__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_2__2side_2__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_2__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s2__2s2__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_2__2side_2__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_2__2side_2__3side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_2__2side_2__3side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s2__2s2__3s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_2__2side_2__3side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_3__2side_1__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_1__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s3__2s1__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_3__2side_1__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_3__2side_2__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_2__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s3__2s2__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_3__2side_2__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_3__2side_2__3side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_2__3side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s3__2s2__3s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_3__2side_2__3side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s3__2s3__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_3__2side_3__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s3__2s3__3s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_3__2side_3__3side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_3__2side_3__3side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_3__2side_3__3side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s3__2s3__3s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_3__2side_3__3side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_1__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_1__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s1__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_1__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_2__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_2__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s2__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_2__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_2__3side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_2__3side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s2__3s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_2__3side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s3__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_3__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s3__3s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_3__3side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_3__3side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_3__3side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s3__3s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_3__3side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s4__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_4__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s4__3s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_4__3side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s4__3s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_4__3side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_4__2side_4__3side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_4__2side_4__3side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s4__2s4__3s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_4__2side_4__3side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_1__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_1__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s1__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_1__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_2__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_2__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s2__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_2__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_2__3side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_2__3side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s2__3s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_2__3side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s3__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_3__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s3__3s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_3__3side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_3__3side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_3__3side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s3__3s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_3__3side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s4__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_4__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s4__3s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_4__3side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s4__3s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_4__3side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_4__3side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_4__3side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s4__3s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_4__3side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s5__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_5__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s5__3s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_5__3side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s5__3s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_5__3side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s5__3s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_5__3side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_5__2side_5__3side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_5__2side_5__3side_5_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s5__2s5__3s5") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_5__2side_5__3side_5, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_1__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_1__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s1__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_1__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_2__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_2__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s2__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_2__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_2__3side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_2__3side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s2__3s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_2__3side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s3__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_3__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s3__3s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_3__3side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_3__3side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_3__3side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s3__3s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_3__3side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s4__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_4__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s4__3s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_4__3side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s4__3s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_4__3side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_4__3side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_4__3side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s4__3s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_4__3side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s5__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_5__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s5__3s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_5__3side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s5__3s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_5__3side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s5__3s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_5__3side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_5__3side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_5__3side_5_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s5__3s5") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_5__3side_5, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s6__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_6__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s6__3s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_6__3side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s6__3s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_6__3side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s6__3s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_6__3side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_5_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s6__3s5") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_6__3side_5, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_6__2side_6__3side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_6__2side_6__3side_6_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s6__2s6__3s6") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_6__2side_6__3side_6, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_1__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_1__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s1__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_1__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_2__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_2__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s2__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_2__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_2__3side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_2__3side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s2__3s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_2__3side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_3__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_3__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s3__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_3__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_3__3side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_3__3side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s3__3s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_3__3side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_3__3side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_3__3side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s3__3s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_3__3side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_4__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_4__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s4__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_4__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_4__3side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_4__3side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s4__3s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_4__3side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_4__3side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_4__3side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s4__3s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_4__3side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_4__3side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_4__3side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s4__3s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_4__3side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_5__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_5__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s5__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_5__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_5__3side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_5__3side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s5__3s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_5__3side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_5__3side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_5__3side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s5__3s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_5__3side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_5__3side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_5__3side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s5__3s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_5__3side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_5__3side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_5__3side_5_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s5__3s5") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_5__3side_5, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_6__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_6__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s6__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_6__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_6__3side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_6__3side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s6__3s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_6__3side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_6__3side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_6__3side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s6__3s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_6__3side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_6__3side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_6__3side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s6__3s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_6__3side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_6__3side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_6__3side_5_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s6__3s5") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_6__3side_5, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_6__3side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_6__3side_6_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s6__3s6") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_6__3side_6, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_5_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s5") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_5, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_6_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s6") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_6, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_7__2side_7__3side_7 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_7__2side_7__3side_7_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s7__2s7__3s7") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_7__2side_7__3side_7, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_1__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_1__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s1__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_1__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_2__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_2__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s2__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_2__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_2__3side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_2__3side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s2__3s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_2__3side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_3__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_3__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s3__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_3__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_3__3side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_3__3side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s3__3s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_3__3side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_3__3side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_3__3side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s3__3s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_3__3side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_4__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_4__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s4__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_4__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_4__3side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_4__3side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s4__3s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_4__3side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_4__3side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_4__3side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s4__3s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_4__3side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_4__3side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_4__3side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s4__3s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_4__3side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_5__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_5__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s5__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_5__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_5__3side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_5__3side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s5__3s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_5__3side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_5__3side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_5__3side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s5__3s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_5__3side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_5__3side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_5__3side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s5__3s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_5__3side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_5__3side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_5__3side_5_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s5__3s5") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_5__3side_5, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_6__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_6__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s6__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_6__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_6__3side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_6__3side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s6__3s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_6__3side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_6__3side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_6__3side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s6__3s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_6__3side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_6__3side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_6__3side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s6__3s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_6__3side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_6__3side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_6__3side_5_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s6__3s5") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_6__3side_5, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_6__3side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_6__3side_6_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s6__3s6") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_6__3side_6, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_7__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_7__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s7__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_7__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_7__3side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_7__3side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s7__3s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_7__3side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_7__3side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_7__3side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s7__3s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_7__3side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_7__3side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_7__3side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s7__3s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_7__3side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_7__3side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_7__3side_5_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s7__3s5") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_7__3side_5, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_7__3side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_7__3side_6_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s7__3s6") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_7__3side_6, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_7__3side_7 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_7__3side_7_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s7__3s7") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_7__3side_7, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_8__3side_1 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_8__3side_1_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s8__3s1") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_8__3side_1, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_8__3side_2 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_8__3side_2_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s8__3s2") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_8__3side_2, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_8__3side_3 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_8__3side_3_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s8__3s3") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_8__3side_3, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_8__3side_4 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_8__3side_4_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s8__3s4") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_8__3side_4, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_8__3side_5 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_8__3side_5_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s8__3s5") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_8__3side_5, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_8__3side_6 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_8__3side_6_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s8__3s6") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_8__3side_6, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_8__3side_7 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_8__3side_7_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s8__3s7") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_8__3side_7, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
ch032_1side_8__2side_8__3side_8 = Exp_builder().set_basic("train", use_db_obj, ch032_pyramid_1side_8__2side_8__3side_8_and_1s6_2s6, use_loss_obj, exp_dir=exp_dir, code_exe_path=code_exe_path, describe_end="ch032_1s8__2s8__3s8") .set_train_args(epochs= 1) .set_train_iter_args(it_see_fq=900 * 5, it_save_fq=900 * 5, it_down_step="half", it_down_fq=900).set_train_in_gt_use_range(use_in_range=Range(0, 1), use_gt_range=Range(0, 1)).set_multi_model_reload_exp_builders_dict(W_to_Cx_Cy=W_w_M_to_C_p20_pyr.ch032_1side_8__2side_8__3side_8, I_to_Wx_Wy_Wz=I_w_M_to_W_p20_3s_L5_Good).set_result_name(result_name="")
#############################################################
if(__name__ == "__main__"):
print("build exps cost time:", time.time() - start_time)
if len(sys.argv) < 2:
############################################################################################################
### 直接按 F5 或打 python step10_b1_exp_obj_load_and_train_and_test.py,後面沒有接東西喔!才不會跑到下面給 step10_b_subprocss.py 用的程式碼~~~
ch032_1side_4__2side_3__3side_2.build().run()
# print('no argument')
sys.exit()
### 以下是給 step10_b_subprocess.py 用的,相當於cmd打 python step10_b1_exp_obj_load_and_train_and_test.py 某個exp.build().run()
eval(sys.argv[1])
| [
"[email protected]"
] | |
256a7ddfba37eb808339ceb2846b338beba828fe | 30e8e9365725fbdd7b0ee6660595eb8fa97b4a16 | /Semi-Supervised Learning_GAN/code.py | a17a4879c9e6758d1716dbf6fe64f475233c9117 | [] | no_license | moileehyeji/Discussion | edf0945c75a45998b13f4a4fa214587ed9bc5a75 | d502f45edadb178f14a21201707a6b1651932499 | refs/heads/main | 2023-05-06T15:15:00.567930 | 2021-06-04T05:59:20 | 2021-06-04T05:59:20 | 373,735,724 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,125 | py |
# https://github.com/eriklindernoren/PyTorch-GAN/blob/master/implementations/sgan/sgan.py
import argparse
import os
import numpy as np
import math
import torchvision.transforms as transforms
from torchvision.utils import save_image
from torch.utils.data import DataLoader
from torchvision import datasets
from torch.autograd import Variable
import torch.nn as nn
import torch.nn.functional as F
import torch
os.makedirs("images", exist_ok=True)
parser = argparse.ArgumentParser()
parser.add_argument("--n_epochs", type=int, default=5, help="number of epochs of training")
parser.add_argument("--batch_size", type=int, default=64, help="size of the batches")
parser.add_argument("--lr", type=float, default=0.0002, help="adam: learning rate")
parser.add_argument("--b1", type=float, default=0.5, help="adam: decay of first order momentum of gradient")
parser.add_argument("--b2", type=float, default=0.999, help="adam: decay of first order momentum of gradient")
parser.add_argument("--n_cpu", type=int, default=8, help="number of cpu threads to use during batch generation")
parser.add_argument("--latent_dim", type=int, default=100, help="dimensionality of the latent space")
parser.add_argument("--num_classes", type=int, default=10, help="number of classes for dataset")
parser.add_argument("--img_size", type=int, default=32, help="size of each image dimension")
parser.add_argument("--channels", type=int, default=1, help="number of image channels")
parser.add_argument("--sample_interval", type=int, default=400, help="interval between image sampling")
opt = parser.parse_args()
print(opt)
cuda = True if torch.cuda.is_available() else False
def weights_init_normal(m):
classname = m.__class__.__name__
if classname.find("Conv") != -1:
torch.nn.init.normal_(m.weight.data, 0.0, 0.02)
elif classname.find("BatchNorm") != -1:
torch.nn.init.normal_(m.weight.data, 1.0, 0.02)
torch.nn.init.constant_(m.bias.data, 0.0)
class Generator(nn.Module):
def __init__(self):
super(Generator, self).__init__()
self.label_emb = nn.Embedding(opt.num_classes, opt.latent_dim)
self.init_size = opt.img_size // 4 # Initial size before upsampling
self.l1 = nn.Sequential(nn.Linear(opt.latent_dim, 128 * self.init_size ** 2))
self.conv_blocks = nn.Sequential(
nn.BatchNorm2d(128),
nn.Upsample(scale_factor=2),
nn.Conv2d(128, 128, 3, stride=1, padding=1),
nn.BatchNorm2d(128, 0.8),
nn.LeakyReLU(0.2, inplace=True),
nn.Upsample(scale_factor=2),
nn.Conv2d(128, 64, 3, stride=1, padding=1),
nn.BatchNorm2d(64, 0.8),
nn.LeakyReLU(0.2, inplace=True),
nn.Conv2d(64, opt.channels, 3, stride=1, padding=1),
nn.Tanh(),
)
def forward(self, noise):
out = self.l1(noise)
out = out.view(out.shape[0], 128, self.init_size, self.init_size)
img = self.conv_blocks(out)
return img
class Discriminator(nn.Module):
def __init__(self):
super(Discriminator, self).__init__()
def discriminator_block(in_filters, out_filters, bn=True):
"""Returns layers of each discriminator block"""
block = [nn.Conv2d(in_filters, out_filters, 3, 2, 1), nn.LeakyReLU(0.2, inplace=True), nn.Dropout2d(0.25)]
if bn:
block.append(nn.BatchNorm2d(out_filters, 0.8))
return block
self.conv_blocks = nn.Sequential(
*discriminator_block(opt.channels, 16, bn=False),
*discriminator_block(16, 32),
*discriminator_block(32, 64),
*discriminator_block(64, 128),
)
# The height and width of downsampled image
ds_size = opt.img_size // 2 ** 4
# Output layers
self.adv_layer = nn.Sequential(nn.Linear(128 * ds_size ** 2, 1), nn.Sigmoid())
self.aux_layer = nn.Sequential(nn.Linear(128 * ds_size ** 2, opt.num_classes + 1), nn.Softmax())
def forward(self, img):
out = self.conv_blocks(img)
out = out.view(out.shape[0], -1)
validity = self.adv_layer(out)
label = self.aux_layer(out)
return validity, label
# Loss functions
adversarial_loss = torch.nn.BCELoss()
auxiliary_loss = torch.nn.CrossEntropyLoss()
# Initialize generator and discriminator
generator = Generator()
discriminator = Discriminator()
if cuda:
generator.cuda()
discriminator.cuda()
adversarial_loss.cuda()
auxiliary_loss.cuda()
# Initialize weights
generator.apply(weights_init_normal)
discriminator.apply(weights_init_normal)
# Configure data loader
# os.makedirs("../../data/mnist", exist_ok=True)
dataloader = torch.utils.data.DataLoader(
datasets.MNIST(
"../../data/mnist",
train=True,
download=True,
transform=transforms.Compose(
[transforms.Resize(opt.img_size), transforms.ToTensor(), transforms.Normalize([0.5], [0.5])]
),
),
batch_size=opt.batch_size,
shuffle=True,
)
# Optimizers
optimizer_G = torch.optim.Adam(generator.parameters(), lr=opt.lr, betas=(opt.b1, opt.b2))
optimizer_D = torch.optim.Adam(discriminator.parameters(), lr=opt.lr, betas=(opt.b1, opt.b2))
FloatTensor = torch.cuda.FloatTensor if cuda else torch.FloatTensor
LongTensor = torch.cuda.LongTensor if cuda else torch.LongTensor
# ----------
# Training
# ----------
for epoch in range(opt.n_epochs):
for i, (imgs, labels) in enumerate(dataloader):
batch_size = imgs.shape[0]
# Adversarial ground truths
valid = Variable(FloatTensor(batch_size, 1).fill_(1.0), requires_grad=False)
fake = Variable(FloatTensor(batch_size, 1).fill_(0.0), requires_grad=False)
fake_aux_gt = Variable(LongTensor(batch_size).fill_(opt.num_classes), requires_grad=False)
# Configure input
real_imgs = Variable(imgs.type(FloatTensor))
labels = Variable(labels.type(LongTensor))
# -----------------
# Train Generator
# -----------------
optimizer_G.zero_grad()
# Sample noise and labels as generator input
z = Variable(FloatTensor(np.random.normal(0, 1, (batch_size, opt.latent_dim))))
# Generate a batch of images
gen_imgs = generator(z)
# Loss measures generator's ability to fool the discriminator
validity, _ = discriminator(gen_imgs)
g_loss = adversarial_loss(validity, valid)
g_loss.backward()
optimizer_G.step()
# ---------------------
# Train Discriminator
# ---------------------
optimizer_D.zero_grad()
# Loss for real images
real_pred, real_aux = discriminator(real_imgs)
d_real_loss = (adversarial_loss(real_pred, valid) + auxiliary_loss(real_aux, labels)) / 2
# Loss for fake images
fake_pred, fake_aux = discriminator(gen_imgs.detach())
d_fake_loss = (adversarial_loss(fake_pred, fake) + auxiliary_loss(fake_aux, fake_aux_gt)) / 2
# Total discriminator loss
d_loss = (d_real_loss + d_fake_loss) / 2
# Calculate discriminator accuracy
pred = np.concatenate([real_aux.data.cpu().numpy(), fake_aux.data.cpu().numpy()], axis=0)
gt = np.concatenate([labels.data.cpu().numpy(), fake_aux_gt.data.cpu().numpy()], axis=0)
d_acc = np.mean(np.argmax(pred, axis=1) == gt)
d_loss.backward()
optimizer_D.step()
print(
"[Epoch %d/%d] [Batch %d/%d] [D loss: %f, acc: %d%%] [G loss: %f]"
% (epoch, opt.n_epochs, i, len(dataloader), d_loss.item(), 100 * d_acc, g_loss.item())
)
batches_done = epoch * len(dataloader) + i
if batches_done % opt.sample_interval == 0:
save_image(gen_imgs.data[:25], "images/%d.png" % batches_done, nrow=5, normalize=True) | [
"[email protected]"
] | |
a74b092a468de49c8bc506b98a8a0ff2bf39b929 | 2ce18a0d8e106065b57136927e3e73b4fa82f8fa | /list-comprehension/changing-generators.py | 29c62ce4f684f20d09f39b639c0d5fa5b0a8ddf9 | [] | no_license | ColinFendrick/python-data-science-toolbox | 3eac02f3e65cf7e63f7c297f06a35ee7cbe92216 | 83a3d4614ef825302f1881b5b9a59e65db583a00 | refs/heads/master | 2021-01-02T19:06:18.395930 | 2020-02-17T17:07:44 | 2020-02-17T17:07:44 | 239,757,083 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 151 | py | lannister = ['cersei', 'jaime', 'tywin', 'tyrion', 'joffrey']
lengths = (len(person) for person in lannister)
for value in lengths:
print(value)
| [
"[email protected]"
] | |
5b96b98122a2782bb9492808fa86015dbce11b7a | 8b5d68c9398186cae64dbcc5b293d62d69e1921d | /src/python/knowledge_base/readers/structured_data_reader.py | 7036de83e51c53d32b65ca69040eabecd3cc8e46 | [
"Apache-2.0"
] | permissive | reynoldsm88/Hume | ec99df21e9b9651ec3cacfb8655a510ba567abc9 | 79a4ae3b116fbf7c9428e75a651753833e5bc137 | refs/heads/master | 2020-07-24T21:28:39.709145 | 2019-07-10T15:43:24 | 2019-07-10T15:43:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 295 | py | import json
class StructuredDataReader:
def __init__(self):
pass
def read(self, kb, structured_kb_file):
print "StructuredDataReader READ"
with open(structured_kb_file) as f:
structured_kb = json.load(f)
kb.structured_kb = structured_kb
| [
"[email protected]"
] | |
1a329ea8b2e8fde9c9df6ee1fd947b58d49244a3 | f42affa951cd292e42fa47b4f4c5bfdab5c21eeb | /paddle.py | 5a3c751610cf1e19d060b380d81001011fc1d8fc | [] | no_license | thepixelboy/pong-game | 27e5432c9ee0080d2db3f2909591a0d2ef8d35c5 | d79fea5f8fd85dc06b906375587514a317d32bae | refs/heads/main | 2023-05-06T22:22:03.107087 | 2021-05-30T12:11:50 | 2021-05-30T12:11:50 | 372,206,257 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 577 | py | from turtle import Turtle
DEFAULT_MOVE = 20
class Paddle(Turtle):
def __init__(self, position):
super().__init__()
self.position = position
self.create_paddle()
def create_paddle(self):
self.shape("square")
self.color("white")
self.penup()
self.shapesize(stretch_wid=5, stretch_len=1)
self.goto(self.position)
def go_up(self):
new_y_position = self.ycor() + DEFAULT_MOVE
self.goto(self.xcor(), new_y_position)
def go_down(self):
new_y_position = self.ycor() - DEFAULT_MOVE
self.goto(self.xcor(), new_y_position) | [
"[email protected]"
] | |
d029186d44f62f98b226e4323b39b616d5f990a0 | fb97ccbd6aa0933f991c429c0e30081ce0f1fd90 | /Python/_interview_cake/9_valid_bst.py | 596335f493c2f0de60817cd5c0c1ec068d7cae43 | [] | no_license | 01-Jacky/PracticeProblems | a6c9b1dabc794ca52624870e48dcb84b1b69af67 | 5714fdb2d8a89a68d68d07f7ffd3f6bcff5b2ccf | refs/heads/master | 2022-03-23T12:24:13.834902 | 2019-12-31T08:11:19 | 2019-12-31T08:11:19 | 81,617,066 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,111 | py | """
Validate a BST
1)
Max of left sub tree must be < than root value
Min of right sub tree must be > than root value
"""
def is_bst(root, min=float('-inf'), max=float('inf')):
if root is None:
return True
return min < root.value < max and \
is_bst(root.left, min, root.value) and \
is_bst(root.right, root.value, max)
def is_binary_search_tree(root):
node_and_bounds_stack = [(root, -float('inf'), float('inf'))]
# depth-first traversal
while len(node_and_bounds_stack):
node, lower_bound, upper_bound = node_and_bounds_stack.pop()
if (node.value <= lower_bound) or (node.value >= upper_bound):
return False
if node.left: # this node must be less than the current node
node_and_bounds_stack.append((node.left, lower_bound, node.value))
if node.right: # this node must be greater than the current node
node_and_bounds_stack.append((node.right, node.value, upper_bound))
# if none of the nodes were invalid, return true (at this point we have checked all nodes)
return True | [
"[email protected]"
] | |
7a460d9abfd96d7fe1447f44197a372f74d342bc | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/verbs/_overcompensating.py | 2a2eb667f086df830e2666df3c95521102fec4ca | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 296 | py |
from xai.brain.wordbase.verbs._overcompensate import _OVERCOMPENSATE
#calss header
class _OVERCOMPENSATING(_OVERCOMPENSATE, ):
def __init__(self,):
_OVERCOMPENSATE.__init__(self)
self.name = "OVERCOMPENSATING"
self.specie = 'verbs'
self.basic = "overcompensate"
self.jsondata = {}
| [
"[email protected]"
] | |
59e619a9fa42c03d894ec74a465d10095094baeb | b4f487228db96114c52750c1cd72a7119230526a | /uliweb/i18n/pygettext.py | 6569b9a317aa089a0a40e0f8f28fdd80d7f930dc | [
"BSD-2-Clause"
] | permissive | limodou/uliweb3 | 6a400bd1c0047d8ecc8dbb3c16c01671f033153e | bca802c320bd09cc317b2db2574bd4bc7ca1d388 | refs/heads/master | 2023-03-04T18:35:53.921848 | 2023-02-25T08:16:42 | 2023-02-25T08:16:42 | 148,398,667 | 19 | 4 | BSD-2-Clause | 2023-02-25T08:16:43 | 2018-09-12T00:43:24 | Python | UTF-8 | Python | false | false | 32,831 | py | #! /usr/bin/env python
# coding=utf-8
# Originally written by Barry Warsaw <[email protected]>
#
# Minimally patched to make it even more xgettext compatible
# by Peter Funk <[email protected]>
#
# 2002-11-22 J?gen Hermann <[email protected]>
# Added checks that _() only contains string literals, and
# command line args are resolved to module lists, i.e. you
# can now pass a filename, a module or package name, or a
# directory (including globbing chars, important for Win32).
# Made docstring fit in 80 chars wide displays using pydoc.
#
# 2010-06-12 Jan-Hendrik G?lner <[email protected]>
# Made it plural sensitive, added ngettext as default keyword.
# Any keyworded function that is being supplied > 2 arguments
# is treated like ngettext.
# Also added support for constructs like "_('foo' + 10*'bar')"
# by evaluating the whole expression.
# Code like _(foo(arg1, arg2) + "bar") does not work by design
# as that expression must be evaluated at runtime and this script
# only extracts static strings known before runtime.
# However it is possible to do things like
# "ngettext('World', 'Worlds', numWorlds)"
# as only the first two arguments are evaluated.
# Advanced version number from 1.5 to 1.6
#
from __future__ import print_function, absolute_import, unicode_literals
# for selftesting
import sys
sys.path.insert(0, '..')
try:
import fintl
_ = fintl.gettext
except ImportError:
_ = lambda s: s
from uliweb.utils.common import walk_dirs
from ..utils._compat import text_type, b, u
__doc__ = """pygettext -- Python equivalent of xgettext(1)
Many systems (Solaris, Linux, Gnu) provide extensive tools that ease the
internationalization of C programs. Most of these tools are independent of
the programming language and can be used from within Python programs.
Martin von Loewis' work[1] helps considerably in this regard.
There's one problem though; xgettext is the program that scans source code
looking for message strings, but it groks only C (or C++). Python
introduces a few wrinkles, such as dual quoting characters, triple quoted
strings, and raw strings. xgettext understands none of this.
Enter pygettext, which uses Python's standard tokenize module to scan
Python source code, generating .pot files identical to what GNU xgettext[2]
generates for C and C++ code. From there, the standard GNU tools can be
used.
A word about marking Python strings as candidates for translation. GNU
xgettext recognizes the following keywords: gettext, dgettext, dcgettext,
and gettext_noop. But those can be a lot of text to include all over your
code. C and C++ have a trick: they use the C preprocessor. Most
internationalized C source includes a #define for gettext() to _() so that
what has to be written in the source is much less. Thus these are both
translatable strings:
gettext("Translatable String")
_("Translatable String")
Python of course has no preprocessor so this doesn't work so well. Thus,
pygettext searches only for _() by default, but see the -k/--keyword flag
below for how to augment this.
[1] http://www.python.org/workshops/1997-10/proceedings/loewis.html
[2] http://www.gnu.org/software/gettext/gettext.html
NOTE: pygettext attempts to be option and feature compatible with GNU
xgettext where ever possible. However some options are still missing or are
not fully implemented. Also, xgettext's use of command line switches with
option arguments is broken, and in these cases, pygettext just defines
additional switches.
Usage: pygettext [options] inputfile ...
Options:
-a
--extract-all
Extract all strings.
-d name
--default-domain=name
Rename the default output file from messages.pot to name.pot.
-E
--escape
Replace non-ASCII characters with octal escape sequences.
-D
--docstrings
Extract module, class, method, and function docstrings. These do
not need to be wrapped in _() markers, and in fact cannot be for
Python to consider them docstrings. (See also the -X option).
-h
--help
Print this help message and exit.
-k word
--keyword=word
Keywords to look for in addition to the default set, which are:
%(DEFAULTKEYWORDS)s
You can have multiple -k flags on the command line.
-K
--no-default-keywords
Disable the default set of keywords (see above). Any keywords
explicitly added with the -k/--keyword option are still recognized.
--no-location
Do not write filename/lineno location comments.
-n
--add-location
Write filename/lineno location comments indicating where each
extracted string is found in the source. These lines appear before
each msgid. The style of comments is controlled by the -S/--style
option. This is the default.
-o filename
--output=filename
Rename the default output file from messages.pot to filename. If
filename is `-' then the output is sent to standard out.
-p dir
--output-dir=dir
Output files will be placed in directory dir.
-S stylename
--style stylename
Specify which style to use for location comments. Two styles are
supported:
Solaris # File: filename, line: line-number
GNU #: filename:line
The style name is case insensitive. GNU style is the default.
-v
--verbose
Print the names of the files being processed.
-V
--version
Print the version of pygettext and exit.
-w columns
--width=columns
Set width of output to columns.
-x filename
--exclude-file=filename
Specify a file that contains a list of strings that are not be
extracted from the input files. Each string to be excluded must
appear on a line by itself in the file.
-X filename
--no-docstrings=filename
Specify a file that contains a list of files (one per line) that
should not have their docstrings extracted. This is only useful in
conjunction with the -D option above.
If `inputfile' is -, standard input is read.
"""
import os
import imp
import sys
import glob
import time
import getopt
import token
import tokenize
__version__ = '1.6'
default_keywords = ['_', 'ngettext']
DEFAULTKEYWORDS = ', '.join(default_keywords)
EMPTYSTRING = ''
# The normal pot-file header. msgmerge and Emacs's po-mode work better if it's
# there.
pot_header = '''\
# SOME DESCRIPTIVE TITLE.
# Copyright (C) YEAR ORGANIZATION
# {First_Author}, YEAR.
#
msgid ""
msgstr ""
"Project-Id-Version: {Project_Id_Version}\\n"
"POT-Creation-Date: {time}\\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\\n"
"Last-Translator: {Last_Translator}\\n"
"Language-Team: {Language_Team}\\n"
"MIME-Version: 1.0\\n"
"Content-Type: text/plain; charset={Content_Type_Charset}\\n"
"Content-Transfer-Encoding: {Content_Transfer_Encoding}\\n"
"Plural-Forms: {Plural_Forms}\\n"
"Generated-By: pygettext.py {version}\\n"
'''
def usage(code, msg=''):
print(__doc__ % globals(), file=sys.stderr)
if msg:
print(msg, file=sys.stderr)
sys.exit(code)
escapes = []
def make_escapes(pass_iso8859):
global escapes
# if pass_iso8859:
# # Allow iso-8859 characters to pass through so that e.g. 'msgid
# # "H?e"' would result not result in 'msgid "H\366he"'. Otherwise we
# # escape any character outside the 32..126 range.
# mod = 128
# else:
# mod = 256
# for i in range(256):
# if 32 <= (i % mod) <= 126:
# escapes.append(chr(i))
# else:
# escapes.append("\\%03o" % i)
# escapes[ord('\\')] = '\\\\'
# escapes[ord('\t')] = '\\t'
# escapes[ord('\r')] = '\\r'
# escapes[ord('\n')] = '\\n'
# escapes[ord('\"')] = '\\"'
__escapes__ = {}
__escapes__['\\'] = '\\\\'
__escapes__['\t'] = '\\t'
__escapes__['\r'] = '\\r'
__escapes__['\n'] = '\\n'
__escapes__['\"'] = '\\"'
def escape(s):
# global escapes
s = u(s)
r = []
for c in s:
r.append(__escapes__.get(c, c))
return EMPTYSTRING.join(r)
def safe_eval(s):
# unwrap quotes, safely
return eval(s, {'__builtins__':{}}, {})
def normalize(s):
# This converts the various Python string types into a format that is
# appropriate for .po files, namely much closer to C style.
lines = s.split('\n')
if len(lines) == 1:
s = '"' + escape(s) + '"'
else:
if not lines[-1]:
del lines[-1]
lines[-1] = lines[-1] + '\n'
for i in range(len(lines)):
lines[i] = escape(lines[i])
lineterm = '\\n"\n"'
s = '""\n"' + lineterm.join(lines) + '"'
return s
def containsAny(str, set):
"""Check whether 'str' contains ANY of the chars in 'set'"""
return 1 in [c in str for c in set]
def _visit_pyfiles(list, dirname, names):
"""Helper for getFilesForName()."""
# get extension for python source files
if not globals().has_key('_py_ext'):
global _py_ext
# _py_ext = [triple[0] for triple in imp.get_suffixes()
# if triple[2] == imp.PY_SOURCE][0]
_py_ext = [triple[0] for triple in imp.get_suffixes()
if triple[2] == imp.PY_SOURCE]
# don't recurse into CVS directories
if 'CVS' in names:
names.remove('CVS')
if '.svn' in names:
names.remove('.svn')
if '.git' in names:
names.remove('.git')
if 'static' in names:
names.remove('static')
# add all *.py files to list
list.extend(
[os.path.join(dirname, file) for file in names
if os.path.splitext(file)[1] in _py_ext]
)
def _get_modpkg_path(dotted_name, pathlist=None):
"""Get the filesystem path for a module or a package.
Return the file system path to a file for a module, and to a directory for
a package. Return None if the name is not found, or is a builtin or
extension module.
"""
# split off top-most name
parts = dotted_name.split('.', 1)
if len(parts) > 1:
# we have a dotted path, import top-level package
try:
file, pathname, description = imp.find_module(parts[0], pathlist)
if file: file.close()
except ImportError:
return None
# check if it's indeed a package
if description[2] == imp.PKG_DIRECTORY:
# recursively handle the remaining name parts
pathname = _get_modpkg_path(parts[1], [pathname])
else:
pathname = None
else:
# plain name
try:
file, pathname, description = imp.find_module(
dotted_name, pathlist)
if file:
file.close()
if description[2] not in [imp.PY_SOURCE, imp.PKG_DIRECTORY]:
pathname = None
except ImportError:
pathname = None
return pathname
def getFilesForName(name):
"""Get a list of module files for a filename, a module or package name,
or a directory.
"""
if not os.path.exists(name):
# check for glob chars
if containsAny(name, "*?[]"):
files = glob.glob(name)
alist = []
for file in files:
alist.extend(getFilesForName(file))
return alist
# try to find module or package
name = _get_modpkg_path(name)
if not name:
return []
if os.path.isdir(name):
# find all python files in directory
return list(walk_dirs(name, include_ext=['.py', '.ini', '.html'], file_only=True))
elif os.path.exists(name):
# a single file
return [name]
return []
class TokenEater:
def __init__(self, options, vars=None):
self.__options = options
self.__messages = {}
self.__state = self.__waiting
self.__args = []
self.__lineno = -1
self.__freshmodule = 1
self.__curfile = None
self.__vars = vars
def __call__(self, ttype, tstring, stup, etup, line):
# dispatch
## import token
## print >> sys.stderr, 'ttype:', token.tok_name[ttype], \
## 'tstring:', tstring
self.__state(ttype, tstring, stup[0])
def __waiting(self, ttype, tstring, lineno):
opts = self.__options
# Do docstring extractions, if enabled
if opts.docstrings and not opts.nodocstrings.get(self.__curfile):
# module docstring?
if self.__freshmodule:
if ttype == tokenize.STRING:
try:
s = safe_eval(tstring)
except Exception as e:
print((
'*** %(file)s:%(lineno)s: could not evaluate argument "%(arg)s"'
) % {
'arg': tstring,
'file': self.__curfile,
'lineno': self.__lineno
}, file=sys.stderr)
print(str(e), file=sys.stderr)
else:
self.__addentry([s], lineno, isdocstring=1)
self.__freshmodule = 0
elif ttype not in (tokenize.COMMENT, tokenize.NL):
self.__freshmodule = 0
return
# class docstring?
if ttype == tokenize.NAME and tstring in ('class', 'def'):
self.__state = self.__suiteseen
return
if ttype == tokenize.NAME and tstring in opts.keywords:
self.__state = self.__keywordseen
def __suiteseen(self, ttype, tstring, lineno):
# ignore anything until we see the colon
if ttype == tokenize.OP and tstring == ':':
self.__state = self.__suitedocstring
def __suitedocstring(self, ttype, tstring, lineno):
# ignore any intervening noise
if ttype == tokenize.STRING:
try:
s = safe_eval(tstring)
except Exception as e:
print((
'*** %(file)s:%(lineno)s: could not evaluate argument "%(arg)s"'
) % {
'arg': tstring,
'file': self.__curfile,
'lineno': self.__lineno
}, file=sys.stderr)
print(str(e), file=sys.stderr)
else:
self.__addentry(s, lineno, isdocstring=1)
self.__state = self.__waiting
elif ttype not in (tokenize.NEWLINE, tokenize.INDENT,
tokenize.COMMENT):
# there was no class docstring
self.__state = self.__waiting
def __keywordseen(self, ttype, tstring, lineno):
if ttype == tokenize.OP and tstring == '(':
self.__args = ['']
self.__lineno = lineno
self.__depth = 0
self.__state = self.__scanstring1
else:
self.__state = self.__waiting
def __scanstring1(self, ttype, tstring, lineno):
# handle first argument, which is supposed to be a string.
if ttype == tokenize.OP and tstring == ')':
# End of list of arguments for the current function call.
# If the argument list is empty (as in keyword()), ignore this call.
# otherwise evaluate the fragments we collected as the first
# argument and record its line number and update the list of
# messages seen. Reset state for the next batch.
if self.__args[-1]:
try:
s = safe_eval(self.__args[-1])
except Exception as e:
print((
'*** %(file)s:%(lineno)s: could not evaluate argument "%(arg)s"'
) % {
'arg': self.__args[-1],
'file': self.__curfile,
'lineno': self.__lineno
}, file=sys.stderr)
print(str(e), file=sys.stderr)
self.__state = self.__waiting
return
if type(s) == str or type(s) == text_type:
self.__args[-1] = s
self.__addentry(self.__args)
else:
print((
'*** %(file)s:%(lineno)s: argument is no str or unicode object "%(arg)s"'
) % {
'arg': s,
'file': self.__curfile,
'lineno': self.__lineno
}, file=sys.stderr)
self.__state = self.__waiting
elif ttype == tokenize.OP and tstring == ',':
# Start of the next argument.
try:
s = safe_eval(self.__args[-1])
except Exception as e:
print((
'*** %(file)s:%(lineno)s: could not evaluate argument "%(arg)s"'
) % {
'arg': self.__args[-1],
'file': self.__curfile,
'lineno': self.__lineno
}, file=sys.stderr)
print(str(e), file=sys.stderr)
self.__state = self.__waiting
return
if type(s) == str or type(s) == text_type:
self.__args[-1] = s
self.__args.append('') # next argument.
self.__state = self.__scanstring2
else:
print((
'*** %(file)s:%(lineno)s: argument 1 is no str or unicode object "%(arg)s"'
) % {
'arg': s,
'file': self.__curfile,
'lineno': self.__lineno
}, file=sys.stderr)
self.__state = self.__waiting
else:
# add string to current argument for later evaluation.
# no state change in this case.
self.__args[-1] += tstring
def __scanstring2(self, ttype, tstring, lineno):
# handle second argument, which is supposed to be a string.
if ttype == tokenize.OP and tstring == ')':
# End of list of arguments for the current function call.
# This is an error if we expect either one or three arguments but
# never two.
print((
'*** %(file)s:%(lineno)s: unexpected number of arguments (2)"'
) % {
'file': self.__curfile,
'lineno': self.__lineno
}, file=sys.stderr)
self.__state = self.__waiting
elif ttype == tokenize.OP and tstring == ',':
# Start of the next argument. We do not need to parse it, we only
# made sure it is there and now we assume this is a plural call.
try:
s = safe_eval(self.__args[-1])
except Exception as e:
print((
'*** %(file)s:%(lineno)s: could not evaluate argument "%(arg)s"'
) % {
'arg': self.__args[-1],
'file': self.__curfile,
'lineno': self.__lineno
}, file=sys.stderr)
print(str(e), file=sys.stderr)
self.__state = self.__waiting
return
s = safe_eval(self.__args[-1])
if type(s) == str or type(s) == six.text_type:
self.__args[-1] = s
self.__addentry(self.__args)
self.__state = self.__waiting
else:
print((
'*** %(file)s:%(lineno)s: argument 2 is no str or unicode object "%(arg)s"'
) % {
'arg': s,
'file': self.__curfile,
'lineno': self.__lineno
}, file=sys.stderr)
self.__state = self.__waiting
else:
# add string to current argument for later evaluation.
# no state change in this case.
self.__args[-1] += tstring
def __addentry(self, args, lineno=None, isdocstring=0):
isplural = 0
if len(args) > 1:
isplural = 1
if lineno is None:
lineno = self.__lineno
exclude = 0
if args[0] in self.__options.toexclude:
exclude = 1
if isplural:
if args[1] not in self.__options.toexclude:
# in case of plural, both strings must be in the toexclude list
# to exclude this entry.
exclude = 0
if not exclude:
entry = (self.__curfile, lineno)
# entries look like this:
# {('arg1','arg2') : {(filename,lineno) : <isdocstring>},
# ('arg1',) : {(filename,lineno) : <iscodstring>}}
# a key with len > 1 indicates plurals
self.__messages.setdefault(tuple(args[0:2]), {})[entry] = isdocstring
def set_filename(self, filename):
self.__curfile = filename
self.__freshmodule = 1
def write(self, fp):
options = self.__options
timestamp = time.strftime('%Y-%m-%d %H:%M')
# The time stamp in the header doesn't have the same format as that
# generated by xgettext...
d = self.__vars.copy()
d.update({'time': timestamp, 'version': __version__})
print(pot_header.format(**d), file=fp)
# Sort the entries. First sort each particular entry's keys, then
# sort all the entries by their first item.
reverse = {}
for k, v in self.__messages.items():
keys = sorted(v.keys())
reverse.setdefault(tuple(keys), []).append((k, v))
rkeys = reverse.keys()
for rkey in sorted(rkeys):
rentries = reverse[rkey]
for k, v in sorted(rentries):
# If the entry was gleaned out of a docstring, then add a
# comment stating so. This is to aid translators who may wish
# to skip translating some unimportant docstrings.
isdocstring = sum(v.values())
# k is the message string, v is a dictionary-set of (filename,
# lineno) tuples. We want to sort the entries in v first by
# file name and then by line number.
v = sorted(v.keys())
if not options.writelocations:
pass
# location comments are different b/w Solaris and GNU:
elif options.locationstyle == options.SOLARIS:
for filename, lineno in v:
d = {'filename': filename, 'lineno': lineno}
print((
'# File: %(filename)s, line: %(lineno)d') % d, file=fp)
elif options.locationstyle == options.GNU:
# fit as many locations on one line, as long as the
# resulting line length doesn't exceeds 'options.width'
locline = '#:'
for filename, lineno in v:
d = {'filename': filename, 'lineno': lineno}
s = (' %(filename)s:%(lineno)d') % d
if len(locline) + len(s) <= options.width:
locline = locline + s
else:
print(locline, file=fp)
locline = "#:" + s
if len(locline) > 2:
print(locline, file=fp)
if isdocstring:
print('#, docstring', file=fp)
print('msgid', normalize(k[0]), file=fp)
if len(k) > 1:
print('msgid_plural', normalize(k[1]), file=fp)
print('msgstr[0] ""', file=fp)
print('msgstr[1] ""\n', file=fp)
else:
print('msgstr ""\n', file=fp)
def main():
global default_keywords
try:
opts, args = getopt.getopt(
sys.argv[1:],
'ad:DEhk:Kno:p:S:Vvw:x:X:f:',
['extract-all', 'default-domain=', 'escape', 'help',
'keyword=', 'no-default-keywords',
'add-location', 'no-location', 'output=', 'output-dir=',
'style=', 'verbose', 'version', 'width=', 'exclude-file=',
'docstrings', 'no-docstrings',
])
except getopt.error as msg:
usage(1, msg)
# for holding option values
class Options:
# constants
GNU = 1
SOLARIS = 2
# defaults
extractall = 0 # FIXME: currently this option has no effect at all.
escape = 0
keywords = ['ugettext', 'ungettext']
outpath = ''
outfile = 'messages.pot'
writelocations = 1
locationstyle = GNU
verbose = 0
width = 78
excludefilename = ''
docstrings = 0
nodocstrings = {}
options = Options()
locations = {'gnu' : options.GNU,
'solaris' : options.SOLARIS,
}
files = ''
# parse options
for opt, arg in opts:
if opt in ('-h', '--help'):
usage(0)
elif opt in ('-a', '--extract-all'):
options.extractall = 1
elif opt in ('-d', '--default-domain'):
options.outfile = arg + '.pot'
elif opt in ('-E', '--escape'):
options.escape = 1
elif opt in ('-D', '--docstrings'):
options.docstrings = 1
elif opt in ('-k', '--keyword'):
options.keywords.append(arg)
elif opt in ('-K', '--no-default-keywords'):
default_keywords = []
elif opt in ('-n', '--add-location'):
options.writelocations = 1
elif opt in ('--no-location',):
options.writelocations = 0
elif opt in ('-S', '--style'):
options.locationstyle = locations.get(arg.lower())
if options.locationstyle is None:
usage(1, ('Invalid value for --style: %s') % arg)
elif opt in ('-o', '--output'):
options.outfile = arg
elif opt in ('-p', '--output-dir'):
options.outpath = arg
elif opt in ('-v', '--verbose'):
options.verbose = 1
elif opt in ('-V', '--version'):
print(('pygettext.py (xgettext for Python) %s') % __version__)
sys.exit(0)
elif opt in ('-w', '--width'):
try:
options.width = int(arg)
except ValueError:
usage(1, ('--width argument must be an integer: %s') % arg)
elif opt in ('-x', '--exclude-file'):
options.excludefilename = arg
elif opt in ('-X', '--no-docstrings'):
fp = open(arg)
try:
while 1:
line = fp.readline()
if not line:
break
options.nodocstrings[line[:-1]] = 1
finally:
fp.close()
elif opt == '-f':
files = arg
# calculate escapes
# make_escapes(options.escape)
# calculate all keywords
options.keywords.extend(default_keywords)
# initialize list of strings to exclude
if options.excludefilename:
try:
fp = open(options.excludefilename)
options.toexclude = fp.readlines()
fp.close()
except IOError:
print((
"Can't read --exclude-file: %s") % options.excludefilename, file=sys.stderr)
sys.exit(1)
else:
options.toexclude = []
# resolve args to module lists
expanded = []
for arg in args:
if arg == '-':
expanded.append(arg)
else:
expanded.extend(getFilesForName(arg))
args = expanded
if files:
lines = open(files).readlines()
for line in lines:
args.append(line.strip())
# slurp through all the files
eater = TokenEater(options)
for filename in args:
if filename == '-':
if options.verbose:
print ('Reading standard input')
fp = sys.stdin
closep = 0
else:
if options.verbose:
print(('Working on %s') % filename)
if filename.endswith('.html'):
from uliweb.core.template import template_file_py
from io import StringIO
text = template_file_py(filename, skip_extern=True, multilines=True)
fp = StringIO(text)
else:
fp = open(filename)
closep = 1
try:
eater.set_filename(filename)
try:
tokenize.tokenize(fp.readline, eater)
except tokenize.TokenError as e:
print('%s: %s, line %d, column %d' % (
e[0], filename, e[1][0], e[1][1]), file=sys.stderr)
finally:
if closep:
fp.close()
# write the output
if options.outfile == '-':
fp = sys.stdout
closep = 0
else:
if options.outpath:
options.outfile = os.path.join(options.outpath, options.outfile)
path = os.path.dirname(options.outfile)
if path:
if not os.path.exists(path):
try:
os.makedirs(path)
except:
pass
fp = open(options.outfile, 'w')
closep = 1
try:
eater.write(fp)
finally:
if closep:
fp.close()
def extrace_files(files, outputfile, opts=None, vars=None):
global _py_ext
import logging
from io import StringIO, BytesIO
log = logging.getLogger('pygettext')
opts = opts or {}
vars = vars or {}
_py_ext = ['.py', '.ini', '.html']
class Options:
# constants
GNU = 1
SOLARIS = 2
# defaults
extractall = 0 # FIXME: currently this option has no effect at all.
escape = 0
keywords = ['_', 'gettext', 'ngettext', 'ungettext', 'ugettext']
outpath = ''
outfile = outputfile
writelocations = 1
locationstyle = GNU
verbose = 0
width = 78
excludefilename = ''
docstrings = 0
nodocstrings = {}
toexclude = []
options = Options()
# make_escapes(options.escape)
options.keywords.extend(default_keywords)
for k, v in opts.items():
if v and hasattr(options, k):
_v = getattr(options, k)
if isinstance(_v, list):
_v.extend(v)
elif isinstance(_v, dict):
_v.update(v)
else:
setattr(options, k, v)
if not isinstance(files, list):
files = getFilesForName(files)
eater = TokenEater(options, vars=vars)
for filename in files:
if options.verbose:
print(('Working on %s') % filename)
if not os.path.exists(filename):
continue
if filename.endswith('.html'):
from uliweb.core import template
from uliweb.core.template import template_file_py
text = template_file_py(filename, skip_extern=True, log=log, multilines=True)
fp = BytesIO(b(text))
closep = 0
else:
fp = BytesIO(b(open(filename).read()))
closep = 1
try:
eater.set_filename(filename)
try:
for v in tokenize.tokenize(fp.readline):
eater(*v)
except tokenize.TokenError as e:
print('%s: %s, line %d, column %d' % (
e[0], filename, e[1][0], e[1][1]), file=sys.stderr)
finally:
if closep:
fp.close()
if options.outfile == '-':
fp = sys.stdout
closep = 0
else:
if options.outpath:
options.outfile = os.path.join(options.outpath, options.outfile)
path = os.path.dirname(options.outfile)
if path:
if not os.path.exists(path):
try:
os.makedirs(path)
except:
pass
fp = open(options.outfile, 'w')
closep = 1
try:
eater.write(fp)
finally:
if closep:
fp.close()
if __name__ == '__main__':
main()
# some more test strings
# _(u'a unicode string')
# # this one creates a warning
# _('*** Seen unexpected token "%(token)s"') % {'token': 'test'}
# _('more' 'than' 'one' 'string')
| [
"[email protected]"
] | |
d99d576a058ef5956106984d6bfadfa650d180fb | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03167/s367868270.py | 31abb3c30c5fcaa1420f7b86a38e2c7adaa479cf | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 541 | py | from collections import deque
h,w=map(int,input().split())
maze=[[i for i in input()] for _ in range(h)]
que=deque([[0,0]])
visited=[[0 for _ in range(w)] for _ in range(h)]
visited[0][0]=1
while que:
n=que.popleft()
x,y=n[0],n[1]
if n==(h-1,w-1):
break
for i, j in [(1,0), (0,1)]:
if (x+i >=w) or (y+j >=h) or maze[y+j][x+i] == '#':
continue
if visited[y+j][x+i] == 0:
que.append([x+i,y+j])
visited[y+j][x+i] += visited[y][x]
print(visited[h-1][w-1]%(10**9+7))
| [
"[email protected]"
] | |
81fe7eadd2418caa75ad8188bf1b5777398c7eb8 | 24f664aa2344d4f5d5e7b048ac4e85231715c4c8 | /datasets/github/scrape_repos/indexer.py | dd7a16e3b4940538eab982c9b84e8157e3e56d50 | [] | no_license | speycode/clfuzz | 79320655e879d1e0a06a481e8ec2e293c7c10db7 | f2a96cf84a7971f70cb982c07b84207db407b3eb | refs/heads/master | 2020-12-05T13:44:55.486419 | 2020-01-03T14:14:03 | 2020-01-03T14:15:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,203 | py | # Copyright 2018, 2019 Chris Cummins <[email protected]>.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Index ContentFiles from cloned GitHub repos."""
import multiprocessing
import os
import pathlib
import random
from datasets.github.scrape_repos import github_repo
from datasets.github.scrape_repos.preprocessors import preprocessors
from datasets.github.scrape_repos.proto import scrape_repos_pb2
from labm8.py import app
from labm8.py import humanize
from labm8.py import pbutil
FLAGS = app.FLAGS
app.DEFINE_integer(
"indexer_processes", os.cpu_count(), "The number of indexer processes to run."
)
app.DEFINE_string("clone_list", None, "The path to a LanguageCloneList file.")
def ImportFromLanguage(
language: scrape_repos_pb2.LanguageToClone, pool: multiprocessing.Pool
) -> None:
"""Import contentfiles from a language specification.
Args:
language: The language to import.
pool: A multiprocessing pool.
Raises:
ValueError: If importer field not set.
"""
if not language.importer:
raise ValueError("LanguageToClone.importer field not set")
app.Log(1, "Enumerating all repos ...")
all_repos = [
github_repo.GitHubRepo(pathlib.Path(language.destination_directory / f))
for f in pathlib.Path(language.destination_directory).iterdir()
if f.name.endswith(".pbtxt")
]
app.Log(1, "Pruning indexed repos ...")
num_repos = len(all_repos)
repos_to_import = [repo for repo in all_repos if not repo.IsIndexed()]
num_todo = len(repos_to_import)
num_pruned = num_repos - num_todo
random.shuffle(repos_to_import)
app.Log(
1,
"Importing %s of %s %s repos ...",
humanize.Commas(num_todo),
humanize.Commas(num_repos),
language.language.capitalize(),
)
for i, repo in enumerate(repos_to_import):
repo.Index(
list(language.importer),
pool,
github_repo.IndexProgress(num_pruned + i, num_repos),
)
def main(argv):
"""Main entry point."""
if len(argv) > 1:
raise app.UsageError("Unknown arguments '{}'".format(", ".join(argv[1:])))
clone_list_path = pathlib.Path(FLAGS.clone_list or "")
if not clone_list_path.is_file():
raise app.UsageError("--clone_list is not a file.")
clone_list = pbutil.FromFile(
clone_list_path, scrape_repos_pb2.LanguageCloneList()
)
# Error early if the config contains invalid preprocessors.
for language in clone_list.language:
for importer in language.importer:
[preprocessors.GetPreprocessorFunction(p) for p in importer.preprocessor]
pool = multiprocessing.Pool(FLAGS.indexer_processes)
for language in clone_list.language:
ImportFromLanguage(language, pool)
if __name__ == "__main__":
app.RunWithArgs(main)
| [
"[email protected]"
] | |
646aedf7a130c27300fb9f4a0e1e999385f86318 | f4aa1885d4121e131c2a580183c6312aeefa8147 | /ch12/likes_app_virtualenv/src/django-likes/likes/test_utils/test_app/apps.py | fc04070e8336f17e3b2402586653623e6bb51f67 | [
"MIT"
] | permissive | PacktPublishing/Django-3-Web-Development-Cookbook-Fourth-Edition | 8f09d1ea9b13e8a66fc489fc09c9a5ee8f9968cf | 9371e0ea6f4dc61567bf28299cf57146519e274c | refs/heads/master | 2023-02-20T02:36:51.226985 | 2023-01-30T08:39:30 | 2023-01-30T08:39:30 | 201,903,680 | 189 | 117 | MIT | 2023-02-10T22:45:42 | 2019-08-12T09:54:54 | Python | UTF-8 | Python | false | false | 90 | py | from django.apps import AppConfig
class TestAppConfig(AppConfig):
name = 'test_app'
| [
"[email protected]"
] | |
ab575baf490fda95031b2f5688a47b4869525d35 | 7d172bc83bc61768a09cc97746715b8ec0e13ced | /odoo/migrations/0006_auto_20170628_0402.py | 09a34635f361cf04be7b163f3380f627c20f235a | [] | no_license | shivam1111/jjuice | a3bcd7ee0ae6647056bdc62ff000ce6e6af27594 | 6a2669795ed4bb4495fda7869eeb221ed6535582 | refs/heads/master | 2020-04-12T05:01:27.981792 | 2018-11-08T13:00:49 | 2018-11-08T13:00:49 | 81,114,622 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 406 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.1 on 2017-06-28 04:02
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('odoo', '0005_auto_20170618_1356'),
]
operations = [
migrations.AlterModelTable(
name='promotioncodes',
table='promotion_codes',
),
]
| [
"[email protected]"
] | |
ace37ff10fc593ff551992e0b65900a9501b6d8a | e53c7d270e26bd0fac9dedadff9b4a4ff99110ec | /posts/views.py | 7d49db83c9a42fea1d31c85f873eff532ba7c0cb | [] | no_license | kydzoster/django-message_board | 34b33c0c240bd1dbb21bb0500db791411cca8cc6 | df8f038fc94b02e9ec9c51b8aab8307e1bc75848 | refs/heads/master | 2022-08-01T09:54:13.686456 | 2020-05-27T14:17:34 | 2020-05-27T14:17:34 | 267,337,171 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 253 | py | from django.shortcuts import render
# Create your views here.
from django.views.generic import ListView
from .models import Post
class HomePageView(ListView):
model = Post
template_name = 'home.html'
context_object_name = 'all_posts_list' | [
"[email protected]"
] | |
d0ae70451d70c0b7ffb35207c06faf07fc9c01d9 | 5801d65a93670ee89fc92fc59c3948765f8c028f | /loan_management/loan_management/doctype/customer_expenses/customer_expenses.py | 7c1e3f0b7e1fa4eb980bd78431ed0448b4f6de7a | [
"MIT"
] | permissive | staumoepeau/customer_loan | a9205476aa4646ba08f8531c27ecd43a21165f12 | bb9f42160bc1e17085f000b15810892337dd0465 | refs/heads/master | 2021-01-22T04:23:16.804892 | 2018-11-27T21:09:28 | 2018-11-27T21:09:28 | 92,459,369 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 269 | py | # -*- coding: utf-8 -*-
# Copyright (c) 2018, Sione Taumoepeau and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe.model.document import Document
class CustomerExpenses(Document):
pass
| [
"[email protected]"
] | |
57800186771cb6665475f9ebaa02f58d2a3cb52f | a570561df345c055a8763aefb63a153ed2a4d002 | /django/paper_tracker/papers/urls.py | 9a3fbdfcd3d4f5104ea9f1c501953d1a8478b644 | [
"MIT"
] | permissive | kmod/paper_tracker | 7b089613172788360d5401434e58a31740062577 | 43dc10286e8ea3d38b888403091d18549a8106d6 | refs/heads/master | 2020-12-24T12:34:32.130210 | 2016-11-29T22:52:36 | 2016-11-29T22:52:36 | 72,976,695 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 715 | py | from django.conf.urls import url
from . import views
urlpatterns = [
url(r'^papers$', views.papers_index, name='papers_index'),
url(r'^$', views.collections_index, name='collections_index'),
url(r'^collection/(?P<collection_id>[0-9]+)/$', views.collection, name='collection'),
url(r'^paper/new$', views.paper_new, name='paper_new'),
# url(r'^paper/(?P<paper_id>[0-9]+)$', views.paper, name='paper'),
url(r'^paper/(?P<paper_id>[0-9]+)/find_pdf$', views.paper_findpdf, name='paper_findpdf'),
url(r'^paper/(?P<paper_id>[0-9]+)/delete$', views.paper_delete, name='paper_delete'),
url(r'^collection/(?P<collection_id>[0-9]+)/edit/(?P<paper_id>[0-9]+)$', views.cpaper, name='cpaper'),
]
| [
"[email protected]"
] | |
c2a5bcff0bcc1420d7abd3fe87de544b2d01d220 | 5a7a3447d434a458a7bb63f2aa11b64c284d5492 | /thread-ing/thread-test.py | 837138ed0f016cd25779cf75bbc034ccf39bbfbd | [] | no_license | woshimayi/mypython | 35792e12036a7a05f12d3ef7006637b2b03f0e2e | 7f1eb38e8585bf6d2f21d3ad0f64dace61425875 | refs/heads/master | 2023-09-01T08:59:12.301836 | 2023-08-30T05:30:54 | 2023-08-30T05:30:54 | 130,017,052 | 4 | 0 | null | 2018-12-02T16:18:14 | 2018-04-18T06:50:36 | HTML | UTF-8 | Python | false | false | 1,130 | py | #!/usr/bin/env python
# encoding: utf-8
'''
@author: woshimayi
@license: (C) Copyright 2015-2049, Node Supply Chain Manager Corporation Limited.
@contact: [email protected]
@software: garner
@file: thread-test.py
@time: 2020/8/6 17:12
@desc:
'''
import threading
import time
exitFlag = 0
class myThread (threading.Thread):
def __init__(self, threadID, name, counter):
threading.Thread.__init__(self)
self.threadID = threadID
self.name = name
self.counter = counter
def run(self):
print ("开始线程:" + self.name)
print_time(self.name, self.counter, 5)
print ("退出线程:" + self.name)
def print_time(threadName, delay, counter):
while counter:
print(exitFlag)
if exitFlag:
threadName.exit()
time.sleep(delay)
print ("%s: %s" % (threadName, time.ctime(time.time())))
counter -= 1
# 创建新线程
thread1 = myThread(1, "Thread-1", 1)
thread2 = myThread(2, "Thread-2", 2)
# 开启新线程
thread1.start()
thread2.start()
thread1.join()
thread2.join() | [
"[email protected]"
] | |
d86207b5e670b325df9b9349b9b14a45a03030f9 | a679a7d30f132441fd65d90000c1daeb390a4ab5 | /tests/test_strformat_pybrace.py | 4f93167429011f80b9ca64f6b03e0a92d1959f09 | [
"MIT"
] | permissive | llimeht/i18nspector | 0c4c9d6420fd5f050c45941df4b6cb9ad882c367 | 593e5d1adc5f21765051203fc0e6c16775e60258 | refs/heads/master | 2023-09-04T11:49:40.196374 | 2021-10-26T17:09:32 | 2021-10-26T17:09:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,953 | py | # Copyright © 2016-2018 Jakub Wilk <[email protected]>
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the “Software”), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED “AS IS”, WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import struct
import unittest.mock
from nose.tools import (
assert_equal,
assert_is,
assert_is_instance,
assert_raises,
)
import lib.strformat.pybrace as M
def test_SSIZE_MAX():
struct.pack('=i', M.SSIZE_MAX)
with assert_raises(struct.error):
struct.pack('=i', M.SSIZE_MAX + 1)
small_SSIZE_MAX = unittest.mock.patch('lib.strformat.pybrace.SSIZE_MAX', 42)
# Setting SSIZE_ARGMAX to a small number makes it possible to test for
# a very large number of arguments without running out of memory.
def test_lone_lcb():
with assert_raises(M.Error):
M.FormatString('{')
def test_lone_rcb():
with assert_raises(M.Error):
M.FormatString('}')
def test_invalid_field():
with assert_raises(M.Error):
M.FormatString('{@}')
def test_add_argument():
fmt = M.FormatString('{}')
with assert_raises(RuntimeError):
fmt.add_argument(None, None)
with assert_raises(RuntimeError):
fmt.add_argument('eggs', None)
def test_text():
fmt = M.FormatString('eggs{}bacon{}spam')
assert_equal(len(fmt), 5)
fmt = list(fmt)
assert_equal(fmt[0], 'eggs')
assert_equal(fmt[2], 'bacon')
assert_equal(fmt[4], 'spam')
class test_types:
def t(self, k, *types):
types = frozenset(tp.__name__ for tp in types)
fmt = M.FormatString('{:' + k + '}')
[fld] = fmt
assert_is_instance(fld, M.Field)
assert_equal(fld.types, types)
assert_equal(len(fmt.argument_map), 1)
[(key, [afld])] = fmt.argument_map.items()
assert_equal(key, 0)
assert_is(fld, afld)
def test_default(self):
self.t('', int, float, str)
def test_s(self):
self.t('s', str)
def test_int(self):
for k in 'bcdoxX':
self.t(k, int)
def test_n(self):
self.t('n', int, float)
def test_float(self):
for k in 'eEfFgG':
self.t(k, float)
class test_conversion:
def t(self, c, k, *types):
types = frozenset(tp.__name__ for tp in types)
fmt = M.FormatString('{!' + c + ':' + k + '}')
[fld] = fmt
assert_is_instance(fld, M.Field)
assert_equal(fld.types, types)
assert_equal(len(fmt.argument_map), 1)
[(key, [afld])] = fmt.argument_map.items()
assert_equal(key, 0)
assert_is(fld, afld)
def test_default(self):
for c in 'sra':
self.t(c, '', int, float, str)
def test_s(self):
for c in 'sra':
self.t(c, 's', str)
def test_numeric(self):
for c in 'sra':
for k in 'bcdoxXneEfFgG':
with assert_raises(M.FormatTypeMismatch):
self.t(c, k, int)
def test_bad(self):
with assert_raises(M.ConversionError):
self.t('z', '')
class test_numbered_arguments:
tp_int = frozenset({'int'})
tp_float = frozenset({'float'})
def t(self, s, *types):
fmt = M.FormatString(s)
assert_equal(len(fmt), len(types))
assert_equal(len(fmt.argument_map), len(types))
for (key, args), (xkey, xtype) in zip(sorted(fmt.argument_map.items()), enumerate(types)):
[arg] = args
assert_equal(key, xkey)
assert_equal(arg.types, frozenset({xtype.__name__}))
def test_unnumbered(self):
self.t('{:d}{:f}', int, float)
def test_numbered(self):
self.t('{0:d}{1:f}', int, float)
def test_swapped(self):
self.t('{1:d}{0:f}', float, int)
def test_mixed(self):
with assert_raises(M.ArgumentNumberingMixture):
self.t('{0:d}{:f}')
with assert_raises(M.ArgumentNumberingMixture):
self.t('{:d}{0:f}')
def test_numbered_out_of_range(self):
def t(i):
s = ('{' + str(i) + '}')
M.FormatString(s)
t(M.SSIZE_MAX)
with assert_raises(M.ArgumentRangeError):
t(M.SSIZE_MAX + 1)
@small_SSIZE_MAX
def test_unnumbered_out_of_range(self):
def t(i):
s = '{}' * i
M.FormatString(s)
t(M.SSIZE_MAX + 1)
with assert_raises(M.ArgumentRangeError):
t(M.SSIZE_MAX + 2)
class test_named_arguments:
def test_good(self):
fmt = M.FormatString('{spam}')
[fld] = fmt
[(aname, [afld])] = fmt.argument_map.items()
assert_equal(aname, 'spam')
assert_is(fld, afld)
def test_bad(self):
with assert_raises(M.Error):
M.FormatString('{3ggs}')
class test_format_spec:
def test_bad_char(self):
with assert_raises(M.Error):
M.FormatString('{:@}')
def test_bad_letter(self):
with assert_raises(M.Error):
M.FormatString('{:Z}')
def test_comma(self):
def t(k):
M.FormatString('{:,' + k + '}')
t('')
for k in 'bcdoxXeEfFgG':
t(k)
for k in 'ns':
with assert_raises(M.Error):
t(k)
def test_alt_sign(self):
def t(c, k):
M.FormatString('{:' + c + k + '}')
for c in ' +-#':
t(c, '')
for k in 'bcdoxXneEfFgG':
t(c, k)
with assert_raises(M.Error):
t(c, 's')
def test_align(self):
def t(c, k):
M.FormatString('{:' + c + k + '}')
for c in '<>^':
t(c, '')
for k in 'bcdoxXneEfFgGs':
t(c, k)
t(c + '0', k)
for c in '=0':
t(c, '')
for k in 'bcdoxXneEfFgG':
t(c, k)
with assert_raises(M.Error):
t(c, 's')
def test_width(self):
def t(w, k):
if k == '\0':
k = ''
M.FormatString('{:' + str(w) + k + '}')
for k in 'bcdoxXneEfFgGs\0':
for i in 4, 37, M.SSIZE_MAX:
t(i, k)
with assert_raises(M.Error):
t(M.SSIZE_MAX + 1, k)
def test_precision(self):
def t(w, k):
if k == '\0':
k = ''
M.FormatString('{:.' + str(w) + k + '}')
for k in 'neEfFgGs\0':
for i in {4, 37, M.SSIZE_MAX}:
t(i, k)
with assert_raises(M.Error):
t(M.SSIZE_MAX + 1, k)
for k in 'bcdoxX':
for i in {4, 37, M.SSIZE_MAX, M.SSIZE_MAX + 1}:
with assert_raises(M.Error):
t(i, k)
def test_type_compat(self):
def t(k1, k2):
s = '{0:' + k1 + '}{0:' + k2 + '}'
M.FormatString(s)
def e(k1, k2):
with assert_raises(M.ArgumentTypeMismatch):
t(k1, k2)
ks = 'bcdoxXneEfFgGs'
compat = [
('s', 's'),
('bcdoxX', 'bcdoxXn'),
('n', 'bcdoxXneEfFgG'),
('eEfFgG', 'neEfFgG'),
]
for k in ks:
t(k, '')
t('', k)
for (k1s, k2s) in compat:
for k1 in k1s:
for k2 in k2s:
t(k1, k2)
for k2 in ks:
if k2 not in k2s:
e(k1, k2)
def test_nested_fields(self):
def t(v=None, f=None):
if v is None:
v = ''
if f is None:
f = ''
s = '{' + str(v) + ':{' + str(f) + '}}'
return M.FormatString(s)
fmt = t()
assert_equal(len(fmt.argument_map), 2)
t(v=0, f=M.SSIZE_MAX)
with assert_raises(M.ArgumentRangeError):
t(v=0, f=(M.SSIZE_MAX + 1))
with assert_raises(M.ArgumentNumberingMixture):
t(v=0)
with assert_raises(M.ArgumentNumberingMixture):
t(f=0)
# vim:ts=4 sts=4 sw=4 et
| [
"[email protected]"
] | |
26a8e7dd07b21f480488e1f4a850785dfd0f4f0d | f98c174d9011ed29cd8d304f0e4d7042b00d0233 | /automaton/lib/autoplatform.py | 40fe96107a47a44e1797c0eae35c56deb42b1d0e | [
"MIT"
] | permissive | nemec/Automaton | 10755e544a2004b31b55bf213c516001955a89f1 | eea2f89dc10031fba45c80eb63053480dfc3543f | refs/heads/master | 2020-12-24T15:04:49.102660 | 2016-01-04T20:23:35 | 2016-01-04T20:23:35 | 703,746 | 7 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,318 | py | import platform as pl
import os
# pylint: disable-msg=C0103
# This module deals with platform-specific paths
# Set the platform we are currently running on
if pl.system().lower().startswith('windows'):
platform = 'windows'
elif pl.system().lower().startswith('darwin'):
platform = 'mac'
else:
platform = 'linux'
def get_dir_hierarchy():
"""An ordered hierarchy of directories to use."""
return (personaldir(), systemdir(), localdir())
def personaldir():
"""
The personal directory for settings storage.
The settings location in the "home" directory for a user.
"""
if platform == 'windows':
return os.path.join(os.environ['APPDATA'], 'automaton')
else:
return os.path.expanduser('~/.automaton/')
def systemdir():
"""
The system directory for settings storage.
Usually the default "/etc" directory.
"""
if platform == 'windows':
return os.path.join(os.environ['ProgramFiles'], 'automaton')
else:
return "/etc/automaton/"
def localdir():
"""
The local directory for settings storage.
Located in the same place as the rest of the Automaton modules.
Method for getting dir taken from wxPython project
"""
root = __file__
if os.path.islink(root):
root = os.path.realpath(root)
directory = os.path.dirname(os.path.abspath(root))
return os.path.normpath(os.path.join(directory, "../settings/"))
def get_existing_file(filename, strict=False):
"""
Searches through the directory hierarchy for a file/path named "filename"
If 'strict' is false, it returns a path where the file can be placed if there
is no existing file.
If 'strict' is true, returns None there is no existing file.
"""
path = None
# First check to see if the queue file exists anywhere
for d in get_dir_hierarchy():
if os.path.exists(d):
filepath = os.path.join(d, filename)
if os.access(filepath, os.W_OK):
path = filepath
break
# Now try to create a queue file in one of the dirs
if path is None and not strict:
for directory in get_dir_hierarchy():
if not os.path.exists(directory):
try:
os.mkdir(directory)
except IOError:
pass
filepath = os.path.join(directory, filename)
if os.access(directory, os.W_OK):
path = filepath
break
return path
| [
"[email protected]"
] | |
afb78213b6b7a098c79cada1633fcf560bcdde47 | f156f2d94c1334b60afaab93fedb25da937af7a5 | /world/models.py | 90bbe5d05af7a6b3c2c4eb9441a8121432a07ae1 | [] | no_license | zeroam/geodjango | 74e0484263b23a024f453ec5c7fa68da3a2ccbc5 | b56a79ac22a126f11bbf6addbc734b6714f516cb | refs/heads/master | 2020-04-19T09:32:28.141513 | 2019-01-29T07:58:18 | 2019-01-29T07:58:18 | 168,114,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 894 | py | from django.contrib.gis.db import models
class WorldBorder(models.Model):
# Regular Django fields corresponding to the attributes in the world borders shapefile.
name = models.CharField(max_length=50)
area = models.IntegerField()
pop2005 = models.IntegerField('Polulation 2005')
fips = models.CharField('FIPS Code', max_length=2)
iso2 = models.CharField('2 Digit ISO', max_length=2)
iso3 = models.CharField('3 Digit ISO', max_length=3)
un = models.IntegerField('United Nation Code')
region = models.IntegerField('Region Code')
subregion = models.IntegerField('Sub-Region Code')
lon = models.FloatField()
lat = models.FloatField()
# GeoDjango-specific: a geometry field (MultiPolygonField)
mpoly = models.MultiPolygonField()
# Returns the string represenation of the modle.
def __str__(self):
return self.name
| [
"[email protected]"
] | |
d70057826d20d1c2123c88d7b0b4fc2374b67a16 | 49536aafb22a77a6caf249c7fadef46d63d24dfe | /tensorflow/tensorflow/python/kernel_tests/matrix_solve_op_test.py | 46c0c0de944b57256fb9fa5f616169edea2a8e3b | [
"Apache-2.0"
] | permissive | wangzhi01/deeplearning-1 | 4e5ad93f0d9ecd302b74352f80fe1fa6ae70bf0d | 46ab82253d956953b8aa98e97ceb6cd290e82288 | refs/heads/master | 2020-05-28T03:14:55.687567 | 2018-09-12T16:52:09 | 2018-09-12T16:52:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,173 | py | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.math_ops.matrix_solve."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.python.client import session
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import control_flow_ops
from tensorflow.python.ops import linalg_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
class MatrixSolveOpTest(test.TestCase):
def _verifySolve(self, x, y, batch_dims=None):
for np_type in [np.float32, np.float64, np.complex64, np.complex128]:
if np_type == np.float32 or np_type == np.complex64:
tol = 1e-5
else:
tol = 1e-12
for adjoint in False, True:
if np_type is [np.float32, np.float64]:
a = x.real().astype(np_type)
b = y.real().astype(np_type)
else:
a = x.astype(np_type)
b = y.astype(np_type)
a_np = np.conj(np.transpose(a)) if adjoint else a
if batch_dims is not None:
a = np.tile(a, batch_dims + [1, 1])
a_np = np.tile(a_np, batch_dims + [1, 1])
b = np.tile(b, batch_dims + [1, 1])
np_ans = np.linalg.solve(a_np, b)
for use_placeholder in False, True:
with self.test_session(use_gpu=True) as sess:
if use_placeholder:
a_ph = array_ops.placeholder(dtypes.as_dtype(np_type))
b_ph = array_ops.placeholder(dtypes.as_dtype(np_type))
tf_ans = linalg_ops.matrix_solve(a_ph, b_ph, adjoint=adjoint)
out = sess.run(tf_ans, {a_ph: a, b_ph: b})
else:
tf_ans = linalg_ops.matrix_solve(a, b, adjoint=adjoint)
out = tf_ans.eval()
self.assertEqual(tf_ans.get_shape(), out.shape)
self.assertEqual(np_ans.shape, out.shape)
self.assertAllClose(np_ans, out, atol=tol, rtol=tol)
def _generateMatrix(self, m, n):
matrix = (np.random.normal(-5, 5,
m * n).astype(np.complex128).reshape([m, n]))
matrix.imag = (np.random.normal(-5, 5, m * n).astype(np.complex128).reshape(
[m, n]))
return matrix
def testSolve(self):
for n in 1, 2, 4, 9:
matrix = self._generateMatrix(n, n)
for nrhs in 1, 2, n:
rhs = self._generateMatrix(n, nrhs)
self._verifySolve(matrix, rhs)
def testSolveBatch(self):
for n in 2, 5:
matrix = self._generateMatrix(n, n)
for nrhs in 1, n:
rhs = self._generateMatrix(n, nrhs)
for batch_dims in [[2], [2, 2], [7, 4]]:
self._verifySolve(matrix, rhs, batch_dims=batch_dims)
def testNonSquareMatrix(self):
# When the solve of a non-square matrix is attempted we should return
# an error
with self.test_session(use_gpu=True):
with self.assertRaises(ValueError):
matrix = constant_op.constant([[1., 2., 3.], [3., 4., 5.]])
linalg_ops.matrix_solve(matrix, matrix)
def testWrongDimensions(self):
# The matrix and right-hand sides should have the same number of rows.
with self.test_session(use_gpu=True):
matrix = constant_op.constant([[1., 0.], [0., 1.]])
rhs = constant_op.constant([[1., 0.]])
with self.assertRaises(ValueError):
linalg_ops.matrix_solve(matrix, rhs)
def testNotInvertible(self):
# The input should be invertible.
with self.test_session(use_gpu=True):
with self.assertRaisesOpError("Input matrix is not invertible."):
# All rows of the matrix below add to zero
matrix = constant_op.constant([[1., 0., -1.], [-1., 1., 0.],
[0., -1., 1.]])
linalg_ops.matrix_solve(matrix, matrix).eval()
def testConcurrent(self):
with self.test_session(use_gpu=True) as sess:
all_ops = []
for adjoint_ in False, True:
lhs1 = random_ops.random_normal([3, 3], seed=42)
lhs2 = random_ops.random_normal([3, 3], seed=42)
rhs1 = random_ops.random_normal([3, 3], seed=42)
rhs2 = random_ops.random_normal([3, 3], seed=42)
s1 = linalg_ops.matrix_solve(lhs1, rhs1, adjoint=adjoint_)
s2 = linalg_ops.matrix_solve(lhs2, rhs2, adjoint=adjoint_)
all_ops += [s1, s2]
val = sess.run(all_ops)
self.assertAllEqual(val[0], val[1])
self.assertAllEqual(val[2], val[3])
class MatrixSolveBenchmark(test.Benchmark):
matrix_shapes = [
(4, 4),
(10, 10),
(16, 16),
(101, 101),
(256, 256),
(1001, 1001),
(1024, 1024),
(2048, 2048),
(513, 4, 4),
(513, 16, 16),
(513, 256, 256),
]
def _GenerateTestData(self, matrix_shape, num_rhs):
batch_shape = matrix_shape[:-2]
matrix_shape = matrix_shape[-2:]
assert matrix_shape[0] == matrix_shape[1]
n = matrix_shape[0]
matrix = (np.ones(matrix_shape).astype(np.float32) /
(2.0 * n) + np.diag(np.ones(n).astype(np.float32)))
rhs = np.ones([n, num_rhs]).astype(np.float32)
matrix = variables.Variable(
np.tile(matrix, batch_shape + (1, 1)), trainable=False)
rhs = variables.Variable(
np.tile(rhs, batch_shape + (1, 1)), trainable=False)
return matrix, rhs
def benchmarkMatrixSolveOp(self):
run_gpu_test = test.is_gpu_available(True)
for adjoint in False, True:
for matrix_shape in self.matrix_shapes:
for num_rhs in 1, 2, matrix_shape[-1]:
with ops.Graph().as_default(), \
session.Session() as sess, \
ops.device("/cpu:0"):
matrix, rhs = self._GenerateTestData(matrix_shape, num_rhs)
x = linalg_ops.matrix_solve(matrix, rhs, adjoint=adjoint)
variables.global_variables_initializer().run()
self.run_op_benchmark(
sess,
control_flow_ops.group(x),
min_iters=25,
store_memory_usage=False,
name=("matrix_solve_cpu_shape_{matrix_shape}_num_rhs_{num_rhs}_"
"adjoint_{adjoint}").format(
matrix_shape=matrix_shape,
num_rhs=num_rhs,
adjoint=adjoint))
if run_gpu_test:
with ops.Graph().as_default(), \
session.Session() as sess, \
ops.device("/gpu:0"):
matrix, rhs = self._GenerateTestData(matrix_shape, num_rhs)
x = linalg_ops.matrix_solve(matrix, rhs, adjoint=adjoint)
variables.global_variables_initializer().run()
self.run_op_benchmark(
sess,
control_flow_ops.group(x),
min_iters=25,
store_memory_usage=False,
name=("matrix_solve_gpu_shape_{matrix_shape}_num_rhs_"
"{num_rhs}_adjoint_{adjoint}").format(
matrix_shape=matrix_shape, num_rhs=num_rhs,
adjoint=adjoint))
if __name__ == "__main__":
test.main()
| [
"[email protected]"
] | |
284a051b4199ebc1e0859e2bc7ce26faacac59c5 | b7a97c2919807983cd418d9262a1246fff9d95a1 | /apps/feeder/models/order.py | 77d0d82a6596d396f812baa2efc04c2fd78f327f | [] | no_license | PUYUP/kirimsaran | da2f439c70979ab88ef2e62e3b2a73c2278ce077 | 250dddddc3d22429c26eed6bfeaf054666f0c110 | refs/heads/main | 2023-08-04T10:11:23.016982 | 2021-09-29T00:59:11 | 2021-09-29T00:59:11 | 397,851,918 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,508 | py | from django.core.validators import RegexValidator
from django.db import models, transaction
from django.conf import settings
from django.apps import apps
from django.utils.translation import ugettext_lazy as _
from .abstract import AbstractCommonField
from ..utils import save_random_identifier
class AbstractOrder(AbstractCommonField):
user = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name='orders'
)
broadcast = models.ForeignKey(
'feeder.Broadcast',
on_delete=models.SET_NULL,
related_name='orders',
null=True,
blank=True
)
fragment = models.ForeignKey(
'feeder.Fragment',
on_delete=models.SET_NULL,
related_name='orders',
null=True,
blank=True
)
identifier = models.CharField(
max_length=7,
editable=False,
validators=[
RegexValidator(
regex='^[a-zA-Z0-9]*$',
message=_("Can only contain the letters a-Z and 0-9."),
code='invalid_identifier'
),
]
)
class Meta:
abstract = True
app_label = 'feeder'
ordering = ['-create_at']
def __str__(self) -> str:
return self.broadcast.label
@transaction.atomic
def save(self, *args, **kwargs):
# Generate random identifier
if not self.pk and not self.identifier:
# We pass the model instance that is being saved
self.identifier = save_random_identifier(self)
return super().save(*args, **kwargs)
@transaction.atomic
def insert_meta(self, meta_dict):
OrderMeta = apps.get_registered_model('feeder', 'OrderMeta')
bulk_meta = []
for meta in meta_dict:
o = OrderMeta(order=self, **meta)
bulk_meta.append(o)
if len(meta_dict) > 0:
try:
OrderMeta.objects.bulk_create(
bulk_meta,
ignore_conflicts=False
)
except Exception as e:
print(e)
@transaction.atomic
def insert_order_item(self, item_dict):
OrderItem = apps.get_registered_model('feeder', 'OrderItem')
bulk_item = []
for item in item_dict:
target = item.get('target', None)
if target:
o = OrderItem(order=self, target=target)
bulk_item.append(o)
if len(bulk_item) > 0:
try:
OrderItem.objects.bulk_create(
bulk_item,
ignore_conflicts=False
)
except Exception as e:
print(e)
class AbstractOrderMeta(AbstractCommonField):
order = models.ForeignKey(
'feeder.Order',
on_delete=models.CASCADE,
related_name='metas'
)
meta_key = models.CharField(max_length=255)
meta_value = models.TextField()
class Meta:
abstract = True
app_label = 'feeder'
ordering = ['-create_at']
def __str__(self) -> str:
return self.meta_key
class OrderItemManager(models.Manager):
@transaction.atomic
def bulk_create(self, objs, **kwargs):
for obj in objs:
target = getattr(obj, 'target', None)
if target:
setattr(obj, 'price', target.price)
setattr(obj, 'method', target.method)
setattr(obj, 'value', target.value)
return super().bulk_create(objs, **kwargs)
class AbstractOrderItem(AbstractCommonField):
order = models.ForeignKey(
'feeder.Order',
on_delete=models.CASCADE,
related_name='items'
)
target = models.ForeignKey(
'feeder.Target',
on_delete=models.SET_NULL,
related_name='items',
null=True,
blank=True
)
price = models.IntegerField(default=0)
method = models.CharField(max_length=255)
value = models.CharField(max_length=255)
objects = OrderItemManager()
class Meta:
abstract = True
app_label = 'feeder'
ordering = ['-create_at']
def __str__(self) -> str:
return str(self.price)
@transaction.atomic
def save(self, *args, **kwargs):
if not self.pk:
self.price = self.target.price
self.method = self.target.method
self.value = self.target.value
return super().save(*args, **kwargs)
| [
"[email protected]"
] | |
316e7365092c96ced14f54d6d4595c49ec57a2ca | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/392/usersdata/314/71167/submittedfiles/formula.py | f70b595a824c5bd58d3737692eea90af30944fcf | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 92 | py | # -*- coding: utf-8 -*-
print('digite P: '\n)
print('digite i: '\n)
print('digite n: '\n)
| [
"[email protected]"
] | |
9cbf73b313ceadb9a84a8983a41d3478ed5d80c4 | bd4144e919786b4aded4345a2a69ed79e0922946 | /1월 3주차/공통조상.py | 2f554303324d304c1362d167d0514050064e797d | [] | no_license | 2020-ASW/kwoneyng-Park | 670ee027a77c1559f808a51aaf58f27ab3bb85b9 | 3ef556889bbf3f2762c01fdfd10b59869d5e912f | refs/heads/master | 2023-05-14T16:14:04.227511 | 2021-06-11T08:00:37 | 2021-06-11T08:00:37 | 321,286,504 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,294 | py | from math import log2
def makeTree(cur, parent):
depth[cur] = depth[parent] + 1 # 자식노드 차수가 부모노드 + 1
dp[cur][0] = parent
for i in range(1,mxL):
upper = dp[cur][i-1] #1 2^n
if upper == 0:
break
dp[cur][i] = dp[upper][i-1]
# dp[13][2] = dp[6][1]
for child in narr[cur]:
cnt[cur] += makeTree(child, cur)
return cnt[cur]
def find(a,b):
if depth[a] == depth[b]:
# start
for i in range(mxL):
if dp[a][i] == dp[b][i]:
if i == 0:
return dp[a][0]
return find(dp[a][i-1], dp[b][i-1])
if depth[a] < depth[b]:
a,b = b,a
for i in range(mxL):
if depth[b] > depth[dp[a][i]]:
return find(dp[a][i-1],b)
for T in range(1,int(input())+1):
v,e,st,ed = map(int,input().split())
data = list(map(int,input().split()))
narr = [[] for _ in range(v+1)]
mxL = int(log2(v))+1 # 최대 점프하는 수
for i in range(e):
narr[data[i*2]].append(data[i*2+1])
depth = [0]*(v+1)
depth[0] = -1
dp = [[0]*mxL for _ in range(v+1)] # dp[node][jump한 수 (2^n)]
cnt = [1]*(v+1)
makeTree(1,0)
ans = find(st,ed)
rs = cnt[ans]
print(ans, rs)
| [
"[email protected]"
] | |
96f31bfeb86c80ba89858cec03aa42169c5c1f39 | 9f98ed0db445cd69e22eea9e6cfefa929111fe7f | /setup.py | 8afab05cdee21e2c40619e9211f70e7c7243323a | [] | no_license | zhuyoucai168/talospider | 670c34fc75e709814c1dd9f9f72e0a21e07dee47 | da4f0bdc6f6046c306be5c36d9016b74794823b0 | refs/heads/master | 2020-08-29T05:39:57.661905 | 2019-02-22T06:55:48 | 2019-02-22T06:55:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 470 | py | #!/usr/bin/env python
from setuptools import find_packages, setup
setup(
name='talospider',
version='0.0.6',
author='Howie Hu',
description="A simple,lightweight scraping micro-framework",
author_email='[email protected]',
install_requires=['lxml', 'requests', 'cchardet', 'cssselect'],
url="https://github.com/howie6879/talospider/blob/master/README.md",
packages=find_packages(),
package_data={'talospider': ['utils/*.txt']})
| [
"[email protected]"
] | |
0121af025b75095b667e0d0416853d7206c880a4 | ac83d1ddb84ecc904c73bdf779f458bd77efc98c | /src/programy/config/brain/binaries.py | 730ef746b85832414db26d3fdd3828a61fc3a8a2 | [
"MIT"
] | permissive | secrecy27/chatbot | 77829f32a15e17563f038663aebebdb71e52c5a7 | e65a753cf665a4d6d97b57703431cba5331e4f0b | refs/heads/master | 2022-07-24T08:39:57.788009 | 2020-07-16T03:55:21 | 2020-07-16T03:55:21 | 130,678,143 | 4 | 4 | NOASSERTION | 2022-07-06T19:49:14 | 2018-04-23T10:12:01 | Python | UTF-8 | Python | false | false | 2,747 | py | """
Copyright (c) 2016-2018 Keith Sterling http://www.keithsterling.com
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
from programy.utils.logging.ylogger import YLogger
from programy.config.section import BaseSectionConfigurationData
class BrainBinariesConfiguration(BaseSectionConfigurationData):
def __init__(self):
BaseSectionConfigurationData.__init__(self, "binaries")
self._save_binary = False
self._load_binary = False
self._binary_filename = None
self._load_aiml_on_binary_fail = False
@property
def save_binary(self):
return self._save_binary
@property
def load_binary(self):
return self._load_binary
@property
def binary_filename(self):
return self._binary_filename
@property
def load_aiml_on_binary_fail(self):
return self._load_aiml_on_binary_fail
def load_config_section(self, configuration_file, configuration, bot_root):
binaries = configuration_file.get_section("binaries", configuration)
if binaries is not None:
self._save_binary = configuration_file.get_option(binaries, "save_binary", missing_value=None)
self._load_binary = configuration_file.get_option(binaries, "load_binary", missing_value=None)
binary_filename = configuration_file.get_option(binaries, "binary_filename", missing_value=None)
if binary_filename is not None:
self._binary_filename = self.sub_bot_root(binary_filename, bot_root)
self._load_aiml_on_binary_fail = configuration_file.get_option(binaries, "load_aiml_on_binary_fail", missing_value=None)
else:
YLogger.warning(self, "'binaries' section missing from bot config, using to defaults")
| [
"[email protected]"
] | |
582e6d7977304ec94ff5e09011134c56548fddee | 8644a2174c3cb7ccfe211a5e49edffbcc3a74a46 | /HackerrankSolutions/ProblemSolving/DataStructures/LinkedList/Easy/insert_node_doubly_ll.py | 30a3ceddc485daee86a8b335eec39479fd28e2eb | [] | no_license | bhavya2403/Learning-Python | 9e7cc9dee21172321fb217cae27c8072357f71ce | 3898211b357fbab320010a82a4811b68611d0422 | refs/heads/main | 2023-03-24T03:19:49.989965 | 2021-03-22T20:11:04 | 2021-03-22T20:11:04 | 315,962,811 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,170 | py | class DoublyLinkedListNode:
def __init__(self, node_data):
self.data = node_data
self.next = None
self.prev = None
class DoublyLinkedList:
def __init__(self):
self.head = None
self.tail = None
def insert_node(self, node_data):
node = DoublyLinkedListNode(node_data)
if not self.head:
self.head = node
else:
self.tail.next = node
node.prev = self.tail
self.tail = node
def sortedInsert(head, data):
node = DoublyLinkedListNode(data)
if data < head.data:
node.next = head
head.prev = node
node.prev = None
head = node
return head
curr = head
while curr:
if curr.next is None:
curr.next = node
node.prev = curr
node.next = None
break
if curr.data < data < curr.next.data or curr.data ==data:
node.next = curr.next
node.prev = curr
curr.next = node
curr.next.prev = node
break
curr = curr.next
return head
| [
"[email protected]"
] | |
3c40d89ebe58b12e75def3e0190a55e9fe582789 | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/contrib/cv/semantic_segmentation/MMseg-swin/configs/ann/ann_r50-d8_512x512_20k_voc12aug.py | 84eaca27405633ca786ead28b974db2f7f527e5c | [
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference",
"GPL-1.0-or-later"
] | permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 3,652 | py | # -*- coding: utf-8 -*-
# BSD 3-Clause License
#
# Copyright (c) 2017
# All rights reserved.
# Copyright 2022 Huawei Technologies Co., Ltd
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ==========================================================================
# -*- coding: utf-8 -*-
# BSD 3-Clause License
#
# Copyright (c) 2017
# All rights reserved.
# Copyright 2022 Huawei Technologies Co., Ltd
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# ==========================================================================
_base_ = [
'../_base_/models/ann_r50-d8.py', '../_base_/datasets/pascal_voc12_aug.py',
'../_base_/default_runtime.py', '../_base_/schedules/schedule_20k.py'
]
model = dict(
decode_head=dict(num_classes=21), auxiliary_head=dict(num_classes=21))
| [
"[email protected]"
] | |
c3b5005a2b16bf465392034a5dd8560026528ce1 | 9318b1885946f639f1446431abc6ec4fa33fc9ac | /Cisco_python/module_4/act-3.py | 4a247a9c4472018c19b3a620743bb178d2405f56 | [] | no_license | mcewenar/PYTHON_INFO_I_BASIC | 1d365bcd3d0186c8955e3cde2605831717d0a412 | e5c3278969b420e7ce03bf7903cf57e63865aaca | refs/heads/master | 2023-06-04T02:26:42.124304 | 2021-06-22T02:48:08 | 2021-06-22T02:48:08 | 326,510,259 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,371 | py | #Tu tarea es escribir y probar una función que toma tres argumentos (un año, un mes y un día del mes)
#y devuelve el día correspondiente del año, o devuelve None si cualquiera de los argumentos no es válido.
#Debes utilizar las funciones previamente escritas y probadas. Agrega algunos casos de prueba al código.
#Esta prueba es solo el comienzo.
def isYearLeap(year):
if year % 4 == 0 and (year %100 != 0 or year % 400 == 0):
return True
else:
return False
def daysInMonth(year, month):
if month <= 0 or month > 12 or year < 1582:
return None
else:
if month in [1,3,5,7,8,10,12]:
return 31
elif month == 2:
if isYearLeap(year):
return 29
else:
return 28
else:
return 30
def dayOfYear(year, month, day):
days = 0
for m in range(1, month):
md = daysInMonth(year,m)
if md == None:
return None
days += md
md = daysInMonth(year, month)
if md == None or month == None:
return None
elif day >= 1 and day <= md:
return days + day
else:
return None
while True:
try:
x=int(input("Ingrese un año: "))
y=int(input("Ingrese el mes: "))
z=int(input("Ingrese el día: "))
print(dayOfYear(x, y, z))
except ValueError:
print("No se permite ingresar datos alfanuméricos")
| [
"[email protected]"
] | |
a9ffbf6927f011eca02197d776c8cdf231525322 | 42ff36f4c6c967d2f39bf75c1f24c8b5911a8491 | /whileloop.py | b570fd612b80873ea6da85ac6f2859b7c5ebf077 | [] | no_license | cal1log/python | 0d47b688e619d0cdd464267225f76fff7d3101a4 | c8196c40e5505d4e83301ada97dd384611660778 | refs/heads/main | 2023-06-29T13:11:31.869976 | 2021-07-27T22:36:04 | 2021-07-27T22:36:04 | 366,841,686 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 213 | py | #!/usr/bin/env python3
i = 1
''' incremental while loop '''
while i <= 5:
print('hello calilog')
i += 1
print()
i = 5
''' decremental while loop '''
while i >= 1:
print('hello calilog')
i -= 1
| [
"[email protected]"
] | |
16bec49a939949dec19469329515808a53e2b58d | ddd35c693194aefb9c009fe6b88c52de7fa7c444 | /Live 10.1.18/_NKFW2/ResettingMixerComponent.py | c2477c605980a00da5595cf0a5b14ce75043c10b | [] | no_license | notelba/midi-remote-scripts | 819372d9c22573877c7912091bd8359fdd42585d | e3ec6846470eed7da8a4d4f78562ed49dc00727b | refs/heads/main | 2022-07-30T00:18:33.296376 | 2020-10-04T00:00:12 | 2020-10-04T00:00:12 | 301,003,961 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,013 | py | # uncompyle6 version 3.7.4
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.8.5 (default, Aug 12 2020, 00:00:00)
# [GCC 10.2.1 20200723 (Red Hat 10.2.1-1)]
# Embedded file name: C:\ProgramData\Ableton\Live 9.7 Suite\Resources\MIDI Remote Scripts\_NKFW2\ResettingMixerComponent.py
# Compiled at: 2017-10-14 18:54:45
from itertools import izip_longest
from _Framework.CompoundComponent import CompoundComponent
from _Framework.SubjectSlot import subject_slot
from ResettingChannelStripComponent import ResettingChannelStripComponent
from Utils import right_justify_track_components
justify_function = right_justify_track_components
class ResettingMixerComponent(CompoundComponent):
""" ResettingMixerComponent works with a SlaveManager to control a group of
ResettingChannelStripComponents. """
def __init__(self, slave_manager, num_tracks=8, right_just_returns=True, name='Resetting_Mixer_Control', *a, **k):
super(ResettingMixerComponent, self).__init__(name=name, *a, **k)
self._right_justify_returns = bool(right_just_returns)
self._channel_strips = []
for _ in xrange(num_tracks):
strip = self.register_component(ResettingChannelStripComponent())
self._channel_strips.append(strip)
self._reassign_tracks.subject = slave_manager
self._reassign_tracks(slave_manager.track_offset)
def set_reset_volume_buttons(self, buttons):
""" Sets the buttons to use for resetting volume. """
for strip, button in izip_longest(self._channel_strips, buttons or []):
strip.set_reset_volume_button(button)
def set_reset_pan_buttons(self, buttons):
""" Sets the buttons to use for resetting pan. """
for strip, button in izip_longest(self._channel_strips, buttons or []):
strip.set_reset_pan_button(button)
def set_reset_send_a_buttons(self, buttons):
""" Sets the buttons to use for resetting send A. """
for strip, button in izip_longest(self._channel_strips, buttons or []):
strip.set_reset_send_a_button(button)
def set_reset_send_b_buttons(self, buttons):
""" Sets the buttons to use for resetting send B. """
for strip, button in izip_longest(self._channel_strips, buttons or []):
strip.set_reset_send_b_button(button)
@subject_slot('track_offset')
def _reassign_tracks(self, offset):
tracks = self._reassign_tracks.subject.tracks_to_use
if self._right_justify_returns:
justify_function(self.song(), tracks, offset, self._channel_strips)
else:
for index, comp in enumerate(self._channel_strips):
track_offset = offset + index
if track_offset in xrange(len(tracks)):
comp.set_track(tracks[track_offset])
else:
comp.set_track(None)
return
# okay decompiling /home/deniz/data/projects/midiremote/Live 10.1.18/_NKFW2/ResettingMixerComponent.pyc
| [
"[email protected]"
] | |
c306ee028f03366c34bdca2afb22d77a7303c459 | a6f70134a9bfdcc630e67a6d05c174d35496ada3 | /Sum of Inverse of Numbers^n.py | bb4e17b7740256803b5bc189aaea48aee10de4d2 | [] | no_license | nauman-sakharkar/Python-2.x | 9c0e9d9e5968631e44ab595175ddcbe0a1b615ad | 31df433481d75c7b76a40b2fc372fa6fefbb779f | refs/heads/master | 2022-10-08T17:20:46.387977 | 2020-06-10T07:36:03 | 2020-06-10T07:36:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 147 | py | n=int(input("Enter the Number Of Times = "))
q=int(input("Enter The Number = "))
sum=0
for i in range(1,n+1):
sum=sum+((1/q)**i)
print("",sum)
| [
"[email protected]"
] | |
4ea666bc8e896fbcd40fb73d27e4e967147c0a7b | 3e85618c79a1a934fec543e1327e772ca081a5b9 | /N1226.py | f9cf0945dd8c9496a8325051fcd4c4ce8e6bba04 | [] | no_license | ghdus4185/SWEXPERT | 72d79aa4a668452327a676a644b952bab191c79b | 4dc74ad74df7837450de4ce55526dac7760ce738 | refs/heads/master | 2020-07-16T18:31:22.153239 | 2019-12-20T04:18:30 | 2019-12-20T04:18:30 | 205,843,190 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,138 | py | import sys
sys.stdin = open('input.txt', 'r')
def find(x,y):
global di, dj, maze, possible, check
stack = []
stack.append([x,y])
while stack:
n = stack.pop()
for k in range(4):
ni = n[0] + di[k]
nj = n[1] + dj[k]
# 범위 안에 있는지
if 0 <= ni < 16 and 0 <= nj < 16:
if maze[ni][nj] == 3:
possible = 1
return possible
if maze[ni][nj] == 0:
stack.append([ni, nj])
maze[n[0]][n[1]] = 1
return possible
di = [-1, 1, 0, 0]
dj = [0, 0, -1, 1]
for tc in range(1, 11):
t = int(input())
maze = [list(map(int, ' '.join(input()).split())) for _ in range(16)]
# 시작점 찾기
res = 0
for i in range(16):
for j in range(16):
if maze[i][j] == 2:
res = 1
break
if res == 1:
break
check = [[0]*16 for _ in range(16)]
possible = 0
find(i, j)
if possible == 1:
print('#{} 1'.format(t))
else:
print('#{} 0'.format(t))
| [
"[email protected]"
] | |
7ede643951e1f15dbbd488aee63423bae39dbced | 33db9e6d0a73f2353747a4c9d3223d55a38730a8 | /apps/first_app/models.py | 9c9e3168c45565effb1144ef8f0ded356a58890e | [] | no_license | philmccormick23/Likes-and-Books | 94d778df265fe9b1645f783c83358617ca6fe0c0 | 0a9b18ceb7ce33a72334900e7f9f62b10d87a796 | refs/heads/master | 2020-04-02T15:14:46.314382 | 2018-10-24T19:46:10 | 2018-10-24T19:46:10 | 154,559,822 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 753 | py | from __future__ import unicode_literals
from django.db import models
# Create your models here.
class User(models.Model):
first_name = models.CharField(max_length=200)
last_name = models.CharField(max_length=200)
email = models.EmailField()
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
class Books(models.Model):
name = models.CharField(max_length=255)
desc = models.CharField(max_length=255)
upload = models.ForeignKey(User, null=True,related_name="codingdojo", on_delete=models.PROTECT)
users = models.ManyToMfanyField(User, related_name="likes")
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True) | [
"[email protected]"
] | |
0c21269cec3d106c781ee734f3a60a7415c78889 | 1792509a9accac11c837e2a18dcb3d34f1d7e30e | /client/category.py | edfa52d0b3eb42271cc8d9e90fe84f84bc763d38 | [] | no_license | kafura-kafiri/herb | 2d3166b94e5fdacd106d6c4bc21d09f6c9cf568e | 48329a0059e2843c72ad2d85e7bb31379f0042e5 | refs/heads/master | 2020-04-09T09:35:03.720161 | 2018-12-17T11:02:25 | 2018-12-17T11:02:25 | 160,238,902 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 478 | py | import requests
url = 'http://localhost:5000/categories/'
headers = {'content-type': 'application/json'}
_categories = [
{
'ancestors': ['a', 'b', 'c'],
'title': 'd'
}, {
'ancestors': ['x', 'y'],
'title': 'z'
}
]
def fill():
requests.post(url + '*')
print()
print('categories >>')
for category in _categories:
response = requests.post(url + '+', data={'json': str(category)})
print(response.content) | [
"[email protected]"
] | |
28cb89506c201fba276f34362a75f76ce01ffe95 | f6d2385cd8eb896e17c5e72ac75abe6a0ba28659 | /greffe1/essais.py | 9f4aebeaa116aa85140e83a9274bb4de511e3b61 | [] | no_license | pastrouveedespeudo/greffegreffe | fba94c9169c3d021714eabf1a45812ca762cfe9d | 8ebe4d555246aed26e705671014a260a23148a6a | refs/heads/master | 2020-06-12T14:50:17.590418 | 2019-07-04T14:01:25 | 2019-07-04T14:01:25 | 194,335,511 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 521 | py | from fonction import function
from fonction import ecrire
from fonction import lecture
from fonction import ecrire2
page = 'https://fr.yahoo.com/?guccounter=1&guce_referrer=aHR0cHM6Ly93d3cuZ29vZ2xlLmNvbS8&guce_referrer_sig=AQAAAMdlxFFv1CpIEQ0VuhLMZl4pjm_0Ur2KGpLoKBkg4lBqmzqdwLxulK-E29QEXf815EL1VsURfRYB-M3USUSs2fFR6tT63nGaOfQyk5mY4V9AltWx-EzQiluy32sS5KxDY0lQRsL6YmEXNMq4qWdOpBoyt2T6KtkfK9Bce2Dt8ViB'
page = function(page)
page = ecrire(page)
page_affichage = lecture()
ececrire2(page_affichage)
| [
"[email protected]"
] | |
79258c9426d558486274c453e5f1e7bd0cbb4a0a | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/validPalindrome_20200803230103.py | 15299f1055653cb18098fa47a7ef7af4c4238410 | [] | no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 402 | py | import re
def palindrome(str):
if len(str) == 0:
return True
actualStr = str.lower()
str = str.lower()
cleanStr = re.sub(r"[,.;:@#?!&$]+",' ',str)
print('cleanStr',cleanStr)
str = str.split(" ")
str.reverse()
newArr = []
print(actualStr)
for i in str:
newArr.append(i[::-1])
print(newArr)
palindrome("A man, a plan, a canal: Panama")
| [
"[email protected]"
] | |
ca7095ab3d8c8f9a438a75a24c6495f62b664b90 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_geologies.py | 84e56d430f9355cad6a66d3b9a709b593d67b684 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 247 | py |
from xai.brain.wordbase.nouns._geology import _GEOLOGY
#calss header
class _GEOLOGIES(_GEOLOGY, ):
def __init__(self,):
_GEOLOGY.__init__(self)
self.name = "GEOLOGIES"
self.specie = 'nouns'
self.basic = "geology"
self.jsondata = {}
| [
"[email protected]"
] | |
84eef6cc65ec245e27db562aaabcc91b480142bb | bd72c02af0bbd8e3fc0d0b131e3fb9a2aaa93e75 | /Hash Table/logger_rate_limiter.py | 2d6dcba5dcff6b6585ced12fe6631fc0e2af2b74 | [] | no_license | harvi7/Leetcode-Problems-Python | d3a5e8898aceb11abc4cae12e1da50061c1d352c | 73adc00f6853e821592c68f5dddf0a823cce5d87 | refs/heads/master | 2023-05-11T09:03:03.181590 | 2023-04-29T22:03:41 | 2023-04-29T22:03:41 | 222,657,838 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 636 | py | class Logger:
def __init__(self):
"""
Initialize your data structure here.
"""
self._msg_dict = {}
def shouldPrintMessage(self, timestamp: int, message: str) -> bool:
"""
Returns true if the message should be printed in the given timestamp, otherwise returns false.
If this method returns false, the message will not be printed.
The timestamp is in seconds granularity.
"""
if not message in self._msg_dict or 10 <= timestamp - self._msg_dict[message]:
self._msg_dict[message] = timestamp
return True
return False
| [
"[email protected]"
] | |
377e0a1762965418f5d2a4d4871feeed710a71e8 | 565f95f207f49d987bdc372cd80942be95451731 | /python/misc/hlgrep | d76c0a07ff98af04d294df10bef1e8a2e4b4256a | [] | no_license | dustin/snippets | 76724c2131546bddd35a80da16921a44a49e2262 | 5be535890f9e71e298fec601d55c469c542ea330 | refs/heads/master | 2023-08-18T22:01:39.546961 | 2022-12-29T07:39:40 | 2022-12-29T07:39:40 | 18,840 | 18 | 4 | null | 2013-01-06T08:10:39 | 2008-05-22T07:58:19 | C | UTF-8 | Python | false | false | 306 | #!/usr/bin/env python
import sys
import posix
import re
smso=posix.popen("tput smso").read()
rmso=posix.popen("tput rmso").read()
expression=re.compile("(" + sys.argv[1] + ")")
l=sys.stdin.readline()
while l != '':
s=expression.sub(smso + '\\1' + rmso, l)
sys.stdout.write(s)
l=sys.stdin.readline()
| [
"[email protected]"
] | ||
a35928309c1fa5bf69a6928dedc88f21e8e1bf73 | d05a59feee839a4af352b7ed2fd6cf10a288a3cb | /examples/chartsheet.py | 3edbd9dbfe920d08f7d3e6d4ecf08d471cba16e0 | [
"BSD-2-Clause-Views"
] | permissive | elessarelfstone/XlsxWriter | 0d958afd593643f990373bd4d8a32bafc0966534 | bb7b7881c7a93c89d6eaac25f12dda08d58d3046 | refs/heads/master | 2020-09-24T06:17:20.840848 | 2019-11-24T23:43:01 | 2019-11-24T23:43:01 | 225,685,272 | 1 | 0 | NOASSERTION | 2019-12-03T18:09:06 | 2019-12-03T18:09:05 | null | UTF-8 | Python | false | false | 1,774 | py | #######################################################################
#
# An example of creating an Excel chart in a chartsheet with Python
# and XlsxWriter.
#
# Copyright 2013-2019, John McNamara, [email protected]
#
import xlsxwriter
workbook = xlsxwriter.Workbook('chartsheet.xlsx')
# Add a worksheet to hold the data.
worksheet = workbook.add_worksheet()
# Add a chartsheet. A worksheet that only holds a chart.
chartsheet = workbook.add_chartsheet()
# Add a format for the headings.
bold = workbook.add_format({'bold': 1})
# Add the worksheet data that the charts will refer to.
headings = ['Number', 'Batch 1', 'Batch 2']
data = [
[2, 3, 4, 5, 6, 7],
[10, 40, 50, 20, 10, 50],
[30, 60, 70, 50, 40, 30],
]
worksheet.write_row('A1', headings, bold)
worksheet.write_column('A2', data[0])
worksheet.write_column('B2', data[1])
worksheet.write_column('C2', data[2])
# Create a new bar chart.
chart1 = workbook.add_chart({'type': 'bar'})
# Configure the first series.
chart1.add_series({
'name': '=Sheet1!$B$1',
'categories': '=Sheet1!$A$2:$A$7',
'values': '=Sheet1!$B$2:$B$7',
})
# Configure a second series. Note use of alternative syntax to define ranges.
chart1.add_series({
'name': ['Sheet1', 0, 2],
'categories': ['Sheet1', 1, 0, 6, 0],
'values': ['Sheet1', 1, 2, 6, 2],
})
# Add a chart title and some axis labels.
chart1.set_title ({'name': 'Results of sample analysis'})
chart1.set_x_axis({'name': 'Test number'})
chart1.set_y_axis({'name': 'Sample length (mm)'})
# Set an Excel chart style.
chart1.set_style(11)
# Add the chart to the chartsheet.
chartsheet.set_chart(chart1)
# Display the chartsheet as the active sheet when the workbook is opened.
chartsheet.activate();
workbook.close()
| [
"[email protected]"
] | |
f485b1fe84144a2e6e02f8c6db683e8241399c64 | 831fe3255ab2dd7abb9fc79a21756012d57cb863 | /projects/nerf/nerf/raymarcher.py | 3be73d32299a15739202136510193efb2809c1ef | [
"BSD-3-Clause",
"CC-BY-4.0"
] | permissive | ksengin/pytorch3d | 3e84365ed2499c11ef5a443c4ab28bda85e71f7e | 1fffa20541c9fa3248e02473bee294724922d989 | refs/heads/master | 2021-05-16T21:24:36.150263 | 2021-03-19T07:07:23 | 2021-03-19T07:07:23 | 250,474,512 | 0 | 0 | NOASSERTION | 2020-03-27T08:00:17 | 2020-03-27T08:00:17 | null | UTF-8 | Python | false | false | 2,796 | py | # Copyright (c) Facebook, Inc. and its affiliates. All rights reserved.
import torch
from pytorch3d.renderer import EmissionAbsorptionRaymarcher
from pytorch3d.renderer.implicit.raymarching import (
_check_density_bounds,
_check_raymarcher_inputs,
_shifted_cumprod,
)
class EmissionAbsorptionNeRFRaymarcher(EmissionAbsorptionRaymarcher):
"""
This is essentially the `pytorch3d.renderer.EmissionAbsorptionRaymarcher`
which additionally returns the rendering weights. It also skips returning
the computation of the alpha-mask which is, in case of NeRF, equal to 1
everywhere.
The weights are later used in the NeRF pipeline to carry out the importance
ray-sampling for the fine rendering pass.
For more details about the EmissionAbsorptionRaymarcher please refer to
the documentation of `pytorch3d.renderer.EmissionAbsorptionRaymarcher`.
"""
def forward(
self,
rays_densities: torch.Tensor,
rays_features: torch.Tensor,
eps: float = 1e-10,
**kwargs,
) -> torch.Tensor:
"""
Args:
rays_densities: Per-ray density values represented with a tensor
of shape `(..., n_points_per_ray, 1)` whose values range in [0, 1].
rays_features: Per-ray feature values represented with a tensor
of shape `(..., n_points_per_ray, feature_dim)`.
eps: A lower bound added to `rays_densities` before computing
the absorbtion function (cumprod of `1-rays_densities` along
each ray). This prevents the cumprod to yield exact 0
which would inhibit any gradient-based learning.
Returns:
features: A tensor of shape `(..., feature_dim)` containing
the rendered features for each ray.
weights: A tensor of shape `(..., n_points_per_ray)` containing
the ray-specific emission-absorbtion distribution.
Each ray distribution `(..., :)` is a valid probability
distribution, i.e. it contains non-negative values that integrate
to 1, such that `weights.sum(dim=-1)==1).all()` yields `True`.
"""
_check_raymarcher_inputs(
rays_densities,
rays_features,
None,
z_can_be_none=True,
features_can_be_none=False,
density_1d=True,
)
_check_density_bounds(rays_densities)
rays_densities = rays_densities[..., 0]
absorption = _shifted_cumprod(
(1.0 + eps) - rays_densities, shift=self.surface_thickness
)
weights = rays_densities * absorption
features = (weights[..., None] * rays_features).sum(dim=-2)
return features, weights
| [
"[email protected]"
] | |
ada1ac04d0162f1f086d1ebfc1bb718c67f74aee | 2a34a824e1a2d3bac7b99edcf19926a477a157a0 | /src/cr/vision/core/colors.py | 2865015e52642389b5b3c74caf559bef6dda8111 | [
"Apache-2.0"
] | permissive | carnotresearch/cr-vision | a7cb07157dbf470ed3fe560ef85d6e5194c660ae | 317fbf70c558e8f9563c3d0ba3bebbc5f84af622 | refs/heads/master | 2023-04-10T22:34:34.833043 | 2021-04-25T13:32:14 | 2021-04-25T13:32:14 | 142,256,002 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,002 | py | '''
List of common colors in b g r format
'''
ALICEBLUE = (255, 248, 240)
ANTIQUEWHITE = (215, 235, 250)
AQUA = (255, 255, 0)
AQUAMARINE = (212, 255, 127)
AZURE = (255, 255, 240)
BEIGE = (220, 245, 245)
BISQUE = (196, 228, 255)
BLACK = (0, 0, 0)
BLANCHEDALMOND = (205, 235, 255)
BLUE = (255, 0, 0)
BLUEVIOLET = (226, 43, 138)
BROWN = (42, 42, 165)
BURLYWOOD = (135, 184, 222)
CADETBLUE = (160, 158, 95)
CHARTREUSE = (0, 255, 127)
CHOCOLATE = (30, 105, 210)
CORAL = (80, 127, 255)
CORNFLOWERBLUE = (237, 149, 100)
CORNSILK = (220, 248, 255)
CRIMSON = (60, 20, 220)
CYAN = (255, 255, 0)
DARKBLUE = (139, 0, 0)
DARKCYAN = (139, 139, 0)
DARKGOLDENROD = (11, 134, 184)
DARKGRAY = (169, 169, 169)
DARKGREEN = (0, 100, 0)
DARKGREY = (169, 169, 169)
DARKKHAKI = (107, 183, 189)
DARKMAGENTA = (139, 0, 139)
DARKOLIVEGREEN = (47, 107, 85)
DARKORANGE = (0, 140, 255)
DARKORCHID = (204, 50, 153)
DARKRED = (0, 0, 139)
DARKSALMON = (122, 150, 233)
DARKSEAGREEN = (143, 188, 143)
DARKSLATEBLUE = (139, 61, 72)
DARKSLATEGRAY = (79, 79, 47)
DARKSLATEGREY = (79, 79, 47)
DARKTURQUOISE = (209, 206, 0)
DARKVIOLET = (211, 0, 148)
DEEPPINK = (147, 20, 255)
DEEPSKYBLUE = (255, 191, 0)
DIMGRAY = (105, 105, 105)
DIMGREY = (105, 105, 105)
DODGERBLUE = (255, 144, 30)
FIREBRICK = (34, 34, 178)
FLORALWHITE = (240, 250, 255)
FORESTGREEN = (34, 139, 34)
FUCHSIA = (255, 0, 255)
GAINSBORO = (220, 220, 220)
GHOSTWHITE = (255, 248, 248)
GOLD = (0, 215, 255)
GOLDENROD = (32, 165, 218)
GRAY = (128, 128, 128)
GREEN = (0, 128, 0)
GREENYELLOW = (47, 255, 173)
GREY = (128, 128, 128)
HONEYDEW = (240, 255, 240)
HOTPINK = (180, 105, 255)
INDIANRED = (92, 92, 205)
INDIGO = (130, 0, 75)
IVORY = (240, 255, 255)
KHAKI = (140, 230, 240)
LAVENDER = (250, 230, 230)
LAVENDERBLUSH = (245, 240, 255)
LAWNGREEN = (0, 252, 124)
LEMONCHIFFON = (205, 250, 255)
LIGHTBLUE = (230, 216, 173)
LIGHTCORAL = (128, 128, 240)
LIGHTCYAN = (255, 255, 224)
LIGHTGOLDENRODYELLOW = (210, 250, 250)
LIGHTGRAY = (211, 211, 211)
LIGHTGREEN = (144, 238, 144)
LIGHTGREY = (211, 211, 211)
LIGHTPINK = (193, 182, 255)
LIGHTSALMON = (122, 160, 255)
LIGHTSEAGREEN = (170, 178, 32)
LIGHTSKYBLUE = (250, 206, 135)
LIGHTSLATEGRAY = (153, 136, 119)
LIGHTSLATEGREY = (153, 136, 119)
LIGHTSTEELBLUE = (222, 196, 176)
LIGHTYELLOW = (224, 255, 255)
LIME = (0, 255, 0)
LIMEGREEN = (50, 205, 50)
LINEN = (230, 240, 250)
MAGENTA = (255, 0, 255)
MAROON = (0, 0, 128)
MEDIUMAQUAMARINE = (170, 205, 102)
MEDIUMBLUE = (205, 0, 0)
MEDIUMORCHID = (211, 85, 186)
MEDIUMPURPLE = (219, 112, 147)
MEDIUMSEAGREEN = (113, 179, 60)
MEDIUMSLATEBLUE = (238, 104, 123)
MEDIUMSPRINGGREEN = (154, 250, 0)
MEDIUMTURQUOISE = (204, 209, 72)
MEDIUMVIOLETRED = (133, 21, 199)
MIDNIGHTBLUE = (112, 25, 25)
MINTCREAM = (250, 255, 245)
MISTYROSE = (225, 228, 255)
MOCCASIN = (181, 228, 255)
NAVAJOWHITE = (173, 222, 255)
NAVY = (128, 0, 0)
OLDLACE = (230, 245, 253)
OLIVE = (0, 128, 128)
OLIVEDRAB = (35, 142, 107)
ORANGE = (0, 165, 255)
ORANGERED = (0, 69, 255)
ORCHID = (214, 112, 218)
PALEGOLDENROD = (170, 232, 238)
PALEGREEN = (152, 251, 152)
PALETURQUOISE = (238, 238, 175)
PALEVIOLETRED = (147, 112, 219)
PAPAYAWHIP = (213, 239, 255)
PEACHPUFF = (185, 218, 255)
PERU = (63, 133, 205)
PINK = (203, 192, 255)
PLUM = (221, 160, 221)
POWDERBLUE = (230, 224, 176)
PURPLE = (128, 0, 128)
RED = (0, 0, 255)
ROSYBROWN = (143, 143, 188)
ROYALBLUE = (225, 105, 65)
SADDLEBROWN = (19, 69, 139)
SALMON = (114, 128, 250)
SANDYBROWN = (96, 164, 244)
SEAGREEN = (87, 139, 46)
SEASHELL = (238, 245, 255)
SIENNA = (45, 82, 160)
SILVER = (192, 192, 192)
SKYBLUE = (235, 206, 135)
SLATEBLUE = (205, 90, 106)
SLATEGRAY = (144, 128, 112)
SLATEGREY = (144, 128, 112)
SNOW = (250, 250, 255)
SPRINGGREEN = (127, 255, 0)
STEELBLUE = (180, 130, 70)
TAN = (140, 180, 210)
TEAL = (128, 128, 0)
THISTLE = (216, 191, 216)
TOMATO = (71, 99, 255)
TURQUOISE = (208, 224, 64)
VIOLET = (238, 130, 238)
WHEAT = (179, 222, 245)
WHITE = (255, 255, 255)
WHITESMOKE = (245, 245, 245)
YELLOW = (0, 255, 255)
YELLOWGREEN = (50, 205, 154)
| [
"[email protected]"
] | |
83ef5f92fe38434ba6cab8b23f51c000ed6735e5 | fb4a589b87fde22d43fe4345794c00bbc3785085 | /resources/oci-lib/lib/python3.6/site-packages/services/events/src/oci_cli_events/generated/events_cli.py | 2f9059015121b13e588187ead12c46120419a5ed | [] | no_license | dickiesanders/oci-cli-action | a29ccf353a09cb110a38dc9c7f9ea76260c62a48 | ef409321a0b9bdbce37e0e39cfe0e6499ccffe1f | refs/heads/master | 2022-12-18T02:52:07.786446 | 2020-09-19T09:44:02 | 2020-09-19T09:44:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 28,182 | py | # coding: utf-8
# Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from __future__ import print_function
import click
import oci # noqa: F401
import six # noqa: F401
import sys # noqa: F401
from oci_cli.cli_root import cli
from oci_cli import cli_constants # noqa: F401
from oci_cli import cli_util
from oci_cli import json_skeleton_utils
from oci_cli import custom_types # noqa: F401
from oci_cli.aliasing import CommandGroupWithAlias
@cli.command(cli_util.override('events.events_root_group.command_name', 'events'), cls=CommandGroupWithAlias, help=cli_util.override('events.events_root_group.help', """API for the Events Service. Use this API to manage rules and actions that create automation
in your tenancy. For more information, see [Overview of Events]."""), short_help=cli_util.override('events.events_root_group.short_help', """Events API"""))
@cli_util.help_option_group
def events_root_group():
pass
@click.command(cli_util.override('events.rule_group.command_name', 'rule'), cls=CommandGroupWithAlias, help="""The configuration details of an Events rule. For more information, see [Managing Rules for Events].""")
@cli_util.help_option_group
def rule_group():
pass
events_root_group.add_command(rule_group)
@rule_group.command(name=cli_util.override('events.change_rule_compartment.command_name', 'change-compartment'), help=u"""Moves a rule into a different compartment within the same tenancy. For information about moving resources between compartments, see [Moving Resources to a Different Compartment].""")
@cli_util.option('--rule-id', required=True, help=u"""The [OCID] of this rule.""")
@cli_util.option('--compartment-id', required=True, help=u"""The [OCID] of the compartment into which the resource should be moved.""")
@cli_util.option('--if-match', help=u"""For optimistic concurrency control. In the PUT or DELETE call for a resource, set the if-match parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={})
@cli_util.wrap_exceptions
def change_rule_compartment(ctx, from_json, rule_id, compartment_id, if_match):
if isinstance(rule_id, six.string_types) and len(rule_id.strip()) == 0:
raise click.UsageError('Parameter --rule-id cannot be whitespace or empty string')
kwargs = {}
if if_match is not None:
kwargs['if_match'] = if_match
kwargs['opc_request_id'] = cli_util.use_or_generate_request_id(ctx.obj['request_id'])
_details = {}
_details['compartmentId'] = compartment_id
client = cli_util.build_client('events', 'events', ctx)
result = client.change_rule_compartment(
rule_id=rule_id,
change_rule_compartment_details=_details,
**kwargs
)
cli_util.render_response(result, ctx)
@rule_group.command(name=cli_util.override('events.create_rule.command_name', 'create'), help=u"""Creates a new rule.""")
@cli_util.option('--display-name', required=True, help=u"""A string that describes the rule. It does not have to be unique, and you can change it. Avoid entering confidential information.""")
@cli_util.option('--is-enabled', required=True, type=click.BOOL, help=u"""Whether or not this rule is currently enabled.
Example: `true`""")
@cli_util.option('--condition', required=True, help=u"""A filter that specifies the event that will trigger actions associated with this rule. A few important things to remember about filters:
* Fields not mentioned in the condition are ignored. You can create a valid filter that matches all events with two curly brackets: `{}`
For more examples, see [Matching Events with Filters]. * For a condition with fields to match an event, the event must contain all the field names listed in the condition. Field names must appear in the condition with the same nesting structure used in the event.
For a list of reference events, see [Services that Produce Events]. * Rules apply to events in the compartment in which you create them and any child compartments. This means that a condition specified by a rule only matches events emitted from resources in the compartment or any of its child compartments. * Wildcard matching is supported with the asterisk (*) character.
For examples of wildcard matching, see [Matching Events with Filters]
Example: `\\\"eventType\\\": \\\"com.oraclecloud.databaseservice.autonomous.database.backup.end\\\"`""")
@cli_util.option('--compartment-id', required=True, help=u"""The [OCID] of the compartment to which this rule belongs.""")
@cli_util.option('--actions', required=True, type=custom_types.CLI_COMPLEX_TYPE, help=u"""""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--description', help=u"""A string that describes the details of the rule. It does not have to be unique, and you can change it. Avoid entering confidential information.""")
@cli_util.option('--freeform-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. Exists for cross-compatibility only. For more information, see [Resource Tags].
Example: `{\"Department\": \"Finance\"}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--defined-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags].
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["CREATING", "ACTIVE", "INACTIVE", "UPDATING", "DELETING", "DELETED", "FAILED"]), multiple=True, help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. Multiple states can be specified, returning on the first state. For example, --wait-for-state SUCCEEDED --wait-for-state FAILED would return on whichever lifecycle state is reached first. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({'actions': {'module': 'events', 'class': 'ActionDetailsList'}, 'freeform-tags': {'module': 'events', 'class': 'dict(str, string)'}, 'defined-tags': {'module': 'events', 'class': 'dict(str, dict(str, object))'}})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={'actions': {'module': 'events', 'class': 'ActionDetailsList'}, 'freeform-tags': {'module': 'events', 'class': 'dict(str, string)'}, 'defined-tags': {'module': 'events', 'class': 'dict(str, dict(str, object))'}}, output_type={'module': 'events', 'class': 'Rule'})
@cli_util.wrap_exceptions
def create_rule(ctx, from_json, wait_for_state, max_wait_seconds, wait_interval_seconds, display_name, is_enabled, condition, compartment_id, actions, description, freeform_tags, defined_tags):
kwargs = {}
kwargs['opc_request_id'] = cli_util.use_or_generate_request_id(ctx.obj['request_id'])
_details = {}
_details['displayName'] = display_name
_details['isEnabled'] = is_enabled
_details['condition'] = condition
_details['compartmentId'] = compartment_id
_details['actions'] = cli_util.parse_json_parameter("actions", actions)
if description is not None:
_details['description'] = description
if freeform_tags is not None:
_details['freeformTags'] = cli_util.parse_json_parameter("freeform_tags", freeform_tags)
if defined_tags is not None:
_details['definedTags'] = cli_util.parse_json_parameter("defined_tags", defined_tags)
client = cli_util.build_client('events', 'events', ctx)
result = client.create_rule(
create_rule_details=_details,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_rule') and callable(getattr(client, 'get_rule')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
result = oci.wait_until(client, client.get_rule(result.data.id), 'lifecycle_state', wait_for_state, **wait_period_kwargs)
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
@rule_group.command(name=cli_util.override('events.delete_rule.command_name', 'delete'), help=u"""Deletes a rule.""")
@cli_util.option('--rule-id', required=True, help=u"""The [OCID] of this rule.""")
@cli_util.option('--if-match', help=u"""For optimistic concurrency control. In the PUT or DELETE call for a resource, set the if-match parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.""")
@cli_util.confirm_delete_option
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["CREATING", "ACTIVE", "INACTIVE", "UPDATING", "DELETING", "DELETED", "FAILED"]), multiple=True, help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. Multiple states can be specified, returning on the first state. For example, --wait-for-state SUCCEEDED --wait-for-state FAILED would return on whichever lifecycle state is reached first. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={})
@cli_util.wrap_exceptions
def delete_rule(ctx, from_json, wait_for_state, max_wait_seconds, wait_interval_seconds, rule_id, if_match):
if isinstance(rule_id, six.string_types) and len(rule_id.strip()) == 0:
raise click.UsageError('Parameter --rule-id cannot be whitespace or empty string')
kwargs = {}
if if_match is not None:
kwargs['if_match'] = if_match
kwargs['opc_request_id'] = cli_util.use_or_generate_request_id(ctx.obj['request_id'])
client = cli_util.build_client('events', 'events', ctx)
result = client.delete_rule(
rule_id=rule_id,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_rule') and callable(getattr(client, 'get_rule')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
oci.wait_until(client, client.get_rule(rule_id), 'lifecycle_state', wait_for_state, succeed_on_not_found=True, **wait_period_kwargs)
except oci.exceptions.ServiceError as e:
# We make an initial service call so we can pass the result to oci.wait_until(), however if we are waiting on the
# outcome of a delete operation it is possible that the resource is already gone and so the initial service call
# will result in an exception that reflects a HTTP 404. In this case, we can exit with success (rather than raising
# the exception) since this would have been the behaviour in the waiter anyway (as for delete we provide the argument
# succeed_on_not_found=True to the waiter).
#
# Any non-404 should still result in the exception being thrown.
if e.status == 404:
pass
else:
raise
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Please retrieve the resource to find its current state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
@rule_group.command(name=cli_util.override('events.get_rule.command_name', 'get'), help=u"""Retrieves a rule.""")
@cli_util.option('--rule-id', required=True, help=u"""The [OCID] of this rule.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={}, output_type={'module': 'events', 'class': 'Rule'})
@cli_util.wrap_exceptions
def get_rule(ctx, from_json, rule_id):
if isinstance(rule_id, six.string_types) and len(rule_id.strip()) == 0:
raise click.UsageError('Parameter --rule-id cannot be whitespace or empty string')
kwargs = {}
kwargs['opc_request_id'] = cli_util.use_or_generate_request_id(ctx.obj['request_id'])
client = cli_util.build_client('events', 'events', ctx)
result = client.get_rule(
rule_id=rule_id,
**kwargs
)
cli_util.render_response(result, ctx)
@rule_group.command(name=cli_util.override('events.list_rules.command_name', 'list'), help=u"""Lists rules for this compartment.""")
@cli_util.option('--compartment-id', required=True, help=u"""The [OCID] of the compartment to which this rule belongs.""")
@cli_util.option('--limit', type=click.INT, help=u"""The maximum number of items to return. 1 is the minimum, 50 is the maximum. Default: 10""")
@cli_util.option('--page', help=u"""For list pagination. The value of the opc-next-page response header from the previous \"List\" call. For important details about how pagination works, see [List Pagination].""")
@cli_util.option('--lifecycle-state', type=custom_types.CliCaseInsensitiveChoice(["CREATING", "ACTIVE", "INACTIVE", "UPDATING", "DELETING", "DELETED", "FAILED"]), help=u"""A filter to return only rules that match the lifecycle state in this parameter.
Example: `Creating`""")
@cli_util.option('--display-name', help=u"""A filter to return only rules with descriptions that match the displayName string in this parameter.
Example: `\"This rule sends a notification upon completion of DbaaS backup.\"`""")
@cli_util.option('--sort-by', type=custom_types.CliCaseInsensitiveChoice(["TIME_CREATED", "ID", "DISPLAY_NAME"]), help=u"""Specifies the attribute with which to sort the rules.
Default: `timeCreated`
* **TIME_CREATED:** Sorts by timeCreated. * **DISPLAY_NAME:** Sorts by displayName. * **ID:** Sorts by id.""")
@cli_util.option('--sort-order', type=custom_types.CliCaseInsensitiveChoice(["ASC", "DESC"]), help=u"""Specifies sort order.
* **ASC:** Ascending sort order. * **DESC:** Descending sort order.""")
@cli_util.option('--all', 'all_pages', is_flag=True, help="""Fetches all pages of results. If you provide this option, then you cannot provide the --limit option.""")
@cli_util.option('--page-size', type=click.INT, help="""When fetching results, the number of results to fetch per call. Only valid when used with --all or --limit, and ignored otherwise.""")
@json_skeleton_utils.get_cli_json_input_option({})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={}, output_type={'module': 'events', 'class': 'list[RuleSummary]'})
@cli_util.wrap_exceptions
def list_rules(ctx, from_json, all_pages, page_size, compartment_id, limit, page, lifecycle_state, display_name, sort_by, sort_order):
if all_pages and limit:
raise click.UsageError('If you provide the --all option you cannot provide the --limit option')
kwargs = {}
if limit is not None:
kwargs['limit'] = limit
if page is not None:
kwargs['page'] = page
if lifecycle_state is not None:
kwargs['lifecycle_state'] = lifecycle_state
if display_name is not None:
kwargs['display_name'] = display_name
if sort_by is not None:
kwargs['sort_by'] = sort_by
if sort_order is not None:
kwargs['sort_order'] = sort_order
kwargs['opc_request_id'] = cli_util.use_or_generate_request_id(ctx.obj['request_id'])
client = cli_util.build_client('events', 'events', ctx)
if all_pages:
if page_size:
kwargs['limit'] = page_size
result = cli_util.list_call_get_all_results(
client.list_rules,
compartment_id=compartment_id,
**kwargs
)
elif limit is not None:
result = cli_util.list_call_get_up_to_limit(
client.list_rules,
limit,
page_size,
compartment_id=compartment_id,
**kwargs
)
else:
result = client.list_rules(
compartment_id=compartment_id,
**kwargs
)
cli_util.render_response(result, ctx)
@rule_group.command(name=cli_util.override('events.update_rule.command_name', 'update'), help=u"""Updates a rule.""")
@cli_util.option('--rule-id', required=True, help=u"""The [OCID] of this rule.""")
@cli_util.option('--display-name', help=u"""A string that describes the rule. It does not have to be unique, and you can change it. Avoid entering confidential information.""")
@cli_util.option('--description', help=u"""A string that describes the details of the rule. It does not have to be unique, and you can change it. Avoid entering confidential information.""")
@cli_util.option('--is-enabled', type=click.BOOL, help=u"""Whether or not this rule is currently enabled.
Example: `true`""")
@cli_util.option('--condition', help=u"""A filter that specifies the event that will trigger actions associated with this rule. A few important things to remember about filters:
* Fields not mentioned in the condition are ignored. You can create a valid filter that matches all events with two curly brackets: `{}`
For more examples, see [Matching Events with Filters]. * For a condition with fields to match an event, the event must contain all the field names listed in the condition. Field names must appear in the condition with the same nesting structure used in the event.
For a list of reference events, see [Services that Produce Events]. * Rules apply to events in the compartment in which you create them and any child compartments. This means that a condition specified by a rule only matches events emitted from resources in the compartment or any of its child compartments. * Wildcard matching is supported with the asterisk (*) character.
For examples of wildcard matching, see [Matching Events with Filters]
Example: `\\\"eventType\\\": \\\"com.oraclecloud.databaseservice.autonomous.database.backup.end\\\"`""")
@cli_util.option('--actions', type=custom_types.CLI_COMPLEX_TYPE, help=u"""""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--freeform-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace. Exists for cross-compatibility only. For more information, see [Resource Tags].
Example: `{\"Department\": \"Finance\"}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--defined-tags', type=custom_types.CLI_COMPLEX_TYPE, help=u"""Defined tags for this resource. Each key is predefined and scoped to a namespace. For more information, see [Resource Tags].
Example: `{\"Operations\": {\"CostCenter\": \"42\"}}`""" + custom_types.cli_complex_type.COMPLEX_TYPE_HELP)
@cli_util.option('--if-match', help=u"""For optimistic concurrency control. In the PUT or DELETE call for a resource, set the if-match parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.""")
@cli_util.option('--force', help="""Perform update without prompting for confirmation.""", is_flag=True)
@cli_util.option('--wait-for-state', type=custom_types.CliCaseInsensitiveChoice(["CREATING", "ACTIVE", "INACTIVE", "UPDATING", "DELETING", "DELETED", "FAILED"]), multiple=True, help="""This operation creates, modifies or deletes a resource that has a defined lifecycle state. Specify this option to perform the action and then wait until the resource reaches a given lifecycle state. Multiple states can be specified, returning on the first state. For example, --wait-for-state SUCCEEDED --wait-for-state FAILED would return on whichever lifecycle state is reached first. If timeout is reached, a return code of 2 is returned. For any other error, a return code of 1 is returned.""")
@cli_util.option('--max-wait-seconds', type=click.INT, help="""The maximum time to wait for the resource to reach the lifecycle state defined by --wait-for-state. Defaults to 1200 seconds.""")
@cli_util.option('--wait-interval-seconds', type=click.INT, help="""Check every --wait-interval-seconds to see whether the resource to see if it has reached the lifecycle state defined by --wait-for-state. Defaults to 30 seconds.""")
@json_skeleton_utils.get_cli_json_input_option({'actions': {'module': 'events', 'class': 'ActionDetailsList'}, 'freeform-tags': {'module': 'events', 'class': 'dict(str, string)'}, 'defined-tags': {'module': 'events', 'class': 'dict(str, dict(str, object))'}})
@cli_util.help_option
@click.pass_context
@json_skeleton_utils.json_skeleton_generation_handler(input_params_to_complex_types={'actions': {'module': 'events', 'class': 'ActionDetailsList'}, 'freeform-tags': {'module': 'events', 'class': 'dict(str, string)'}, 'defined-tags': {'module': 'events', 'class': 'dict(str, dict(str, object))'}}, output_type={'module': 'events', 'class': 'Rule'})
@cli_util.wrap_exceptions
def update_rule(ctx, from_json, force, wait_for_state, max_wait_seconds, wait_interval_seconds, rule_id, display_name, description, is_enabled, condition, actions, freeform_tags, defined_tags, if_match):
if isinstance(rule_id, six.string_types) and len(rule_id.strip()) == 0:
raise click.UsageError('Parameter --rule-id cannot be whitespace or empty string')
if not force:
if actions or freeform_tags or defined_tags:
if not click.confirm("WARNING: Updates to actions and freeform-tags and defined-tags will replace any existing values. Are you sure you want to continue?"):
ctx.abort()
kwargs = {}
if if_match is not None:
kwargs['if_match'] = if_match
kwargs['opc_request_id'] = cli_util.use_or_generate_request_id(ctx.obj['request_id'])
_details = {}
if display_name is not None:
_details['displayName'] = display_name
if description is not None:
_details['description'] = description
if is_enabled is not None:
_details['isEnabled'] = is_enabled
if condition is not None:
_details['condition'] = condition
if actions is not None:
_details['actions'] = cli_util.parse_json_parameter("actions", actions)
if freeform_tags is not None:
_details['freeformTags'] = cli_util.parse_json_parameter("freeform_tags", freeform_tags)
if defined_tags is not None:
_details['definedTags'] = cli_util.parse_json_parameter("defined_tags", defined_tags)
client = cli_util.build_client('events', 'events', ctx)
result = client.update_rule(
rule_id=rule_id,
update_rule_details=_details,
**kwargs
)
if wait_for_state:
if hasattr(client, 'get_rule') and callable(getattr(client, 'get_rule')):
try:
wait_period_kwargs = {}
if max_wait_seconds is not None:
wait_period_kwargs['max_wait_seconds'] = max_wait_seconds
if wait_interval_seconds is not None:
wait_period_kwargs['max_interval_seconds'] = wait_interval_seconds
click.echo('Action completed. Waiting until the resource has entered state: {}'.format(wait_for_state), file=sys.stderr)
result = oci.wait_until(client, client.get_rule(result.data.id), 'lifecycle_state', wait_for_state, **wait_period_kwargs)
except oci.exceptions.MaximumWaitTimeExceeded as e:
# If we fail, we should show an error, but we should still provide the information to the customer
click.echo('Failed to wait until the resource entered the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
sys.exit(2)
except Exception:
click.echo('Encountered error while waiting for resource to enter the specified state. Outputting last known resource state', file=sys.stderr)
cli_util.render_response(result, ctx)
raise
else:
click.echo('Unable to wait for the resource to enter the specified state', file=sys.stderr)
cli_util.render_response(result, ctx)
| [
"[email protected]"
] | |
2915daa920a718772f982608d13e1abbe0e0de8f | 96e76bcb634e0e48bcf3ae352eb235ed9bc32b36 | /app/migrations/0020_news_date_and_time.py | abf6be4f45c0a5659dc925a56654a48eab0b5a70 | [] | no_license | Ectroverse/EctroverseDjango | cef8a8a2149271c0995f1b60676f636e5dfc23ec | a3dad97b4e7a89694248c21df75ebdcc37e975f0 | refs/heads/master | 2023-04-18T21:12:20.062646 | 2021-04-28T11:06:01 | 2021-04-28T11:06:01 | 291,338,914 | 1 | 3 | null | 2021-01-23T14:32:21 | 2020-08-29T19:50:33 | Python | UTF-8 | Python | false | false | 409 | py | # Generated by Django 3.1 on 2021-01-24 16:50
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('app', '0019_auto_20210124_1638'),
]
operations = [
migrations.AddField(
model_name='news',
name='date_and_time',
field=models.DateTimeField(blank=True, default=None, null=True),
),
]
| [
"[email protected]"
] | |
6c58b0de7a6aaa29da887706c57a87152a52622a | 7208db50a22368c335e7d7d8b37a3fedb09c60e5 | /cairis/gui/ResponsesDialog.py | 462cb9196456757bb3e23ec2869fb3380d5121b9 | [
"Apache-2.0"
] | permissive | nebloc/cairis | 41c7f20af56c46bddcb3927dc4aa410f6477e6ed | 1277a148a270d5471b59fc238aa6590bc1d3044e | refs/heads/master | 2020-03-24T03:51:11.908096 | 2018-07-27T16:07:36 | 2018-07-27T16:07:36 | 142,434,768 | 0 | 0 | Apache-2.0 | 2018-07-26T11:58:24 | 2018-07-26T11:58:24 | null | UTF-8 | Python | false | false | 4,350 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import wx
from cairis.core.armid import *
import cairis.core.Risk
from ResponseDialog import ResponseDialog
from DialogClassParameters import DialogClassParameters
from ResponseDialogParameters import ResponseDialogParameters
from AcceptEnvironmentPanel import AcceptEnvironmentPanel
from TransferEnvironmentPanel import TransferEnvironmentPanel
from MitigateEnvironmentPanel import MitigateEnvironmentPanel
from DimensionBaseDialog import DimensionBaseDialog
from cairis.core.ARM import *
__author__ = 'Shamal Faily'
class ResponsesDialog(DimensionBaseDialog):
def __init__(self,parent):
DimensionBaseDialog.__init__(self,parent,RESPONSES_ID,'Responses',(800,300),'response.png')
self.theMainWindow = parent
idList = [RESPONSES_LISTRESPONSES_ID,RESPONSES_BUTTONADD_ID,RESPONSES_BUTTONDELETE_ID]
columnList = ['Name','Type']
self.buildControls(idList,columnList,self.dbProxy.getResponses,'response')
listCtrl = self.FindWindowById(RESPONSES_LISTRESPONSES_ID)
listCtrl.SetColumnWidth(0,300)
def addObjectRow(self,mitListCtrl,listRow,response):
mitListCtrl.InsertStringItem(listRow,response.name())
mitListCtrl.SetStringItem(listRow,1,response.__class__.__name__)
def onAdd(self,evt):
try:
riskDict = self.dbProxy.getDimensionNames('risk')
if (len(riskDict) == 0):
dlg = wx.MessageDialog(self,'Cannot mitigate for non-existing risks','Add response',wx.OK)
dlg.ShowModal()
dlg.Destroy()
return
responseTypes = ['Accept','Transfer','Mitigate']
from DimensionNameDialog import DimensionNameDialog
rtDlg = DimensionNameDialog(self,'response',responseTypes,'Select',(300,200))
if (rtDlg.ShowModal() == DIMNAME_BUTTONACTION_ID):
responseType = rtDlg.dimensionName()
responsePanel = MitigateEnvironmentPanel
if (responseType == 'Accept'):
responsePanel = AcceptEnvironmentPanel
elif (responseType == 'Transfer'):
responsePanel = TransferEnvironmentPanel
addParameters = ResponseDialogParameters(RESPONSE_ID,'Add response',ResponseDialog,RESPONSE_BUTTONCOMMIT_ID,self.dbProxy.addResponse,True,responsePanel,responseType)
self.addObject(addParameters)
rtDlg.Destroy()
except ARMException,errorText:
dlg = wx.MessageDialog(self,str(errorText),'Add response',wx.OK | wx.ICON_ERROR)
dlg.ShowModal()
dlg.Destroy()
return
def onUpdate(self,evt):
try:
selectedObjt = self.objts[self.selectedLabel]
responseType = selectedObjt.responseType()
responsePanel = MitigateEnvironmentPanel
if (responseType == 'Accept'):
responsePanel = AcceptEnvironmentPanel
elif (responseType == 'Transfer'):
responsePanel = TransferEnvironmentPanel
updateParameters = ResponseDialogParameters(RESPONSE_ID,'Edit response',ResponseDialog,RESPONSE_BUTTONCOMMIT_ID,self.dbProxy.updateResponse,False,responsePanel,responseType)
self.updateObject(selectedObjt,updateParameters)
except ARMException,errorText:
dlg = wx.MessageDialog(self,str(errorText),'Edit response',wx.OK | wx.ICON_ERROR)
dlg.ShowModal()
dlg.Destroy
def onDelete(self,evt):
try:
self.dbProxy.associateGrid(self.theMainWindow.FindWindowById(ID_REQGRID))
self.deleteObject('No response','Delete response',self.dbProxy.deleteResponse)
except ARMException,errorText:
dlg = wx.MessageDialog(self,str(errorText),'Delete response',wx.OK | wx.ICON_ERROR)
dlg.ShowModal()
dlg.Destroy()
| [
"[email protected]"
] | |
053f1ccda4e39457dc790683227d0bc1b6d7da4d | f9d564f1aa83eca45872dab7fbaa26dd48210d08 | /huaweicloud-sdk-ges/huaweicloudsdkges/v2/model/list_jobs_resp_job_list.py | 45bcf3b71b8ca926bfc21443a9d3494688005247 | [
"Apache-2.0"
] | permissive | huaweicloud/huaweicloud-sdk-python-v3 | cde6d849ce5b1de05ac5ebfd6153f27803837d84 | f69344c1dadb79067746ddf9bfde4bddc18d5ecf | refs/heads/master | 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 | NOASSERTION | 2023-06-22T14:50:48 | 2020-05-08T02:28:43 | Python | UTF-8 | Python | false | false | 11,035 | py | # coding: utf-8
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ListJobsRespJobList:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'job_id': 'str',
'status': 'str',
'job_type': 'str',
'job_name': 'str',
'related_graph': 'str',
'begin_time': 'str',
'end_time': 'str',
'job_detail': 'ShowJobRespJobDetail',
'fail_reason': 'str',
'job_progress': 'float'
}
attribute_map = {
'job_id': 'job_id',
'status': 'status',
'job_type': 'job_type',
'job_name': 'job_name',
'related_graph': 'related_graph',
'begin_time': 'begin_time',
'end_time': 'end_time',
'job_detail': 'job_detail',
'fail_reason': 'fail_reason',
'job_progress': 'job_progress'
}
def __init__(self, job_id=None, status=None, job_type=None, job_name=None, related_graph=None, begin_time=None, end_time=None, job_detail=None, fail_reason=None, job_progress=None):
"""ListJobsRespJobList
The model defined in huaweicloud sdk
:param job_id: 任务ID。
:type job_id: str
:param status: 任务状态。 - pending:等待中 - running:运行中 - success:成功 - failed:失败
:type status: str
:param job_type: 任务类型。
:type job_type: str
:param job_name: 任务名称。
:type job_name: str
:param related_graph: 关联图名称。
:type related_graph: str
:param begin_time: 任务开始时间,格式为UTC,\"yyyy-MM-dd'T'HH:mm:ss\"。
:type begin_time: str
:param end_time: 任务结束时间,格式为UTC,\"yyyy-MM-dd'T'HH:mm:ss\"。
:type end_time: str
:param job_detail:
:type job_detail: :class:`huaweicloudsdkges.v2.ShowJobRespJobDetail`
:param fail_reason: 任务失败原因。
:type fail_reason: str
:param job_progress: 任务执行进度,预留字段,暂未使用。
:type job_progress: float
"""
self._job_id = None
self._status = None
self._job_type = None
self._job_name = None
self._related_graph = None
self._begin_time = None
self._end_time = None
self._job_detail = None
self._fail_reason = None
self._job_progress = None
self.discriminator = None
if job_id is not None:
self.job_id = job_id
if status is not None:
self.status = status
if job_type is not None:
self.job_type = job_type
if job_name is not None:
self.job_name = job_name
if related_graph is not None:
self.related_graph = related_graph
if begin_time is not None:
self.begin_time = begin_time
if end_time is not None:
self.end_time = end_time
if job_detail is not None:
self.job_detail = job_detail
if fail_reason is not None:
self.fail_reason = fail_reason
if job_progress is not None:
self.job_progress = job_progress
@property
def job_id(self):
"""Gets the job_id of this ListJobsRespJobList.
任务ID。
:return: The job_id of this ListJobsRespJobList.
:rtype: str
"""
return self._job_id
@job_id.setter
def job_id(self, job_id):
"""Sets the job_id of this ListJobsRespJobList.
任务ID。
:param job_id: The job_id of this ListJobsRespJobList.
:type job_id: str
"""
self._job_id = job_id
@property
def status(self):
"""Gets the status of this ListJobsRespJobList.
任务状态。 - pending:等待中 - running:运行中 - success:成功 - failed:失败
:return: The status of this ListJobsRespJobList.
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this ListJobsRespJobList.
任务状态。 - pending:等待中 - running:运行中 - success:成功 - failed:失败
:param status: The status of this ListJobsRespJobList.
:type status: str
"""
self._status = status
@property
def job_type(self):
"""Gets the job_type of this ListJobsRespJobList.
任务类型。
:return: The job_type of this ListJobsRespJobList.
:rtype: str
"""
return self._job_type
@job_type.setter
def job_type(self, job_type):
"""Sets the job_type of this ListJobsRespJobList.
任务类型。
:param job_type: The job_type of this ListJobsRespJobList.
:type job_type: str
"""
self._job_type = job_type
@property
def job_name(self):
"""Gets the job_name of this ListJobsRespJobList.
任务名称。
:return: The job_name of this ListJobsRespJobList.
:rtype: str
"""
return self._job_name
@job_name.setter
def job_name(self, job_name):
"""Sets the job_name of this ListJobsRespJobList.
任务名称。
:param job_name: The job_name of this ListJobsRespJobList.
:type job_name: str
"""
self._job_name = job_name
@property
def related_graph(self):
"""Gets the related_graph of this ListJobsRespJobList.
关联图名称。
:return: The related_graph of this ListJobsRespJobList.
:rtype: str
"""
return self._related_graph
@related_graph.setter
def related_graph(self, related_graph):
"""Sets the related_graph of this ListJobsRespJobList.
关联图名称。
:param related_graph: The related_graph of this ListJobsRespJobList.
:type related_graph: str
"""
self._related_graph = related_graph
@property
def begin_time(self):
"""Gets the begin_time of this ListJobsRespJobList.
任务开始时间,格式为UTC,\"yyyy-MM-dd'T'HH:mm:ss\"。
:return: The begin_time of this ListJobsRespJobList.
:rtype: str
"""
return self._begin_time
@begin_time.setter
def begin_time(self, begin_time):
"""Sets the begin_time of this ListJobsRespJobList.
任务开始时间,格式为UTC,\"yyyy-MM-dd'T'HH:mm:ss\"。
:param begin_time: The begin_time of this ListJobsRespJobList.
:type begin_time: str
"""
self._begin_time = begin_time
@property
def end_time(self):
"""Gets the end_time of this ListJobsRespJobList.
任务结束时间,格式为UTC,\"yyyy-MM-dd'T'HH:mm:ss\"。
:return: The end_time of this ListJobsRespJobList.
:rtype: str
"""
return self._end_time
@end_time.setter
def end_time(self, end_time):
"""Sets the end_time of this ListJobsRespJobList.
任务结束时间,格式为UTC,\"yyyy-MM-dd'T'HH:mm:ss\"。
:param end_time: The end_time of this ListJobsRespJobList.
:type end_time: str
"""
self._end_time = end_time
@property
def job_detail(self):
"""Gets the job_detail of this ListJobsRespJobList.
:return: The job_detail of this ListJobsRespJobList.
:rtype: :class:`huaweicloudsdkges.v2.ShowJobRespJobDetail`
"""
return self._job_detail
@job_detail.setter
def job_detail(self, job_detail):
"""Sets the job_detail of this ListJobsRespJobList.
:param job_detail: The job_detail of this ListJobsRespJobList.
:type job_detail: :class:`huaweicloudsdkges.v2.ShowJobRespJobDetail`
"""
self._job_detail = job_detail
@property
def fail_reason(self):
"""Gets the fail_reason of this ListJobsRespJobList.
任务失败原因。
:return: The fail_reason of this ListJobsRespJobList.
:rtype: str
"""
return self._fail_reason
@fail_reason.setter
def fail_reason(self, fail_reason):
"""Sets the fail_reason of this ListJobsRespJobList.
任务失败原因。
:param fail_reason: The fail_reason of this ListJobsRespJobList.
:type fail_reason: str
"""
self._fail_reason = fail_reason
@property
def job_progress(self):
"""Gets the job_progress of this ListJobsRespJobList.
任务执行进度,预留字段,暂未使用。
:return: The job_progress of this ListJobsRespJobList.
:rtype: float
"""
return self._job_progress
@job_progress.setter
def job_progress(self, job_progress):
"""Sets the job_progress of this ListJobsRespJobList.
任务执行进度,预留字段,暂未使用。
:param job_progress: The job_progress of this ListJobsRespJobList.
:type job_progress: float
"""
self._job_progress = job_progress
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ListJobsRespJobList):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
3d9cb190898bb0de72ad98aa055083f485cc3c08 | f07392633118f7f6aff0a5a9b2a5c9eaab1a0299 | /Examples/packaging/Capitalize/capitalize/capital_mod.py | 2b4d8147fce933a404c366dee5112a3e807866e5 | [] | no_license | UWPCE-PythonCert/Py300 | afc4abca736cfea031292db6bed996465f37604f | 7f93d20ae66ba9a56c4dcc0c1fdafcf79db15349 | refs/heads/master | 2020-05-26T13:43:38.098926 | 2018-03-05T07:11:48 | 2018-03-05T07:11:48 | 85,002,542 | 4 | 7 | null | null | null | null | UTF-8 | Python | false | false | 1,092 | py | #!/usr/bin/env python
"""
A really simple module, just to demonstrate packaging
"""
def capitalize_line(instr):
"""
capitalizes the input string
:param instr: the string to capitalize it should be a single line.
:type instr: string
:returns: a capitalized version of instr
"""
return " ".join( word.capitalize() for word in instr.split() )
def capitalize(infilename, outfilename):
"""
reads the contents of infilename, and writes it to outfilename, but with
every word capitalized
note: very primitive -- it will mess some files up!
this is called by the capitalize script
:param infilename: The file name you want to process
:type infilename: string
:param outfilename: the name of the new file that will be created
:type outfilename: string
:returns: None
:raises: IOError if infilename doesn't exist.
"""
infile = open(infilename, 'U')
outfile = open(outfilename, 'w')
for line in infile:
outfile.write(capitalize_line(line))
outfile.write("\n")
return None | [
"[email protected]"
] | |
086e7cd5094e3ff935e8b9311c8664873ac1cfc8 | d57b51ec207002e333b8655a8f5832ed143aa28c | /.history/l3/start_20200620180546.py | 304413c0994d4341aa3bde6dc27f92c38feb6864 | [] | no_license | yevheniir/python_course_2020 | b42766c4278a08b8b79fec77e036a1b987accf51 | a152d400ab4f45d9d98d8ad8b2560d6f0b408c0b | refs/heads/master | 2022-11-15T07:13:24.193173 | 2020-07-11T15:43:26 | 2020-07-11T15:43:26 | 278,890,802 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 10 | py | import dis | [
"[email protected]"
] | |
6dce8ab5aa0b8bd0c0ee86d7753accc09fc9c3a9 | 8fa8ded3772dd7a124c1bbb91fc109ed2b63574b | /mycelium/apps/groups/migrations/0024_auto__add_field_grouprule_account__add_field_group_account.py | 6efb4463b3442d5bbdba8fed2d4d0f47a61bb622 | [] | no_license | skoczen/mycelium | 3642b0f5e5ea03d609a3e499c7ad68092101dce0 | da0f169163f4dc93e2dc2b0d934abf4f18c18af0 | refs/heads/master | 2020-04-10T09:21:46.893254 | 2014-05-20T02:27:06 | 2014-05-20T02:27:06 | 2,114,887 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,761 | py | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
depends_on = (
("accounts", "0004_create_old_data_account"),
)
def forwards(self, orm):
# Adding field 'GroupRule.account'
db.add_column('groups_grouprule', 'account', self.gf('django.db.models.fields.related.ForeignKey')(default=1, to=orm['accounts.Account']), keep_default=False)
# Adding field 'Group.account'
db.add_column('groups_group', 'account', self.gf('django.db.models.fields.related.ForeignKey')(default=1, to=orm['accounts.Account']), keep_default=False)
def backwards(self, orm):
# Deleting field 'GroupRule.account'
db.delete_column('groups_grouprule', 'account_id')
# Deleting field 'Group.account'
db.delete_column('groups_group', 'account_id')
models = {
'accounts.account': {
'Meta': {'ordering': "('name',)", 'object_name': 'Account'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'plan': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['accounts.Plan']"}),
'subdomain': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255', 'db_index': 'True'})
},
'accounts.plan': {
'Meta': {'ordering': "('name',)", 'object_name': 'Plan'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'})
},
'groups.group': {
'Meta': {'ordering': "('name',)", 'object_name': 'Group'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['accounts.Account']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'qi_simple_searchable_search_field': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'rules_boolean': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'groups.grouprule': {
'Meta': {'ordering': "('group', 'id')", 'object_name': 'GroupRule'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['accounts.Account']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'group': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['groups.Group']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'left_side': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['rules.LeftSide']", 'null': 'True', 'blank': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'operator': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['rules.Operator']", 'null': 'True', 'blank': 'True'}),
'right_side_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['rules.RightSideType']", 'null': 'True', 'blank': 'True'}),
'right_side_value': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'})
},
'rules.leftside': {
'Meta': {'ordering': "('order',)", 'object_name': 'LeftSide'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['accounts.Account']"}),
'add_closing_paren': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'allowed_operators': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['rules.Operator']", 'symmetrical': 'False'}),
'allowed_right_side_types': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['rules.RightSideType']", 'symmetrical': 'False'}),
'choices': ('picklefield.fields.PickledObjectField', [], {'null': 'True', 'blank': 'True'}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'display_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '100'}),
'query_string_partial': ('django.db.models.fields.TextField', [], {})
},
'rules.operator': {
'Meta': {'ordering': "('order',)", 'object_name': 'Operator'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['accounts.Account']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'display_name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'order': ('django.db.models.fields.IntegerField', [], {'default': '100'}),
'query_string_partial': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'use_filter': ('django.db.models.fields.BooleanField', [], {'default': 'True'})
},
'rules.rightsidetype': {
'Meta': {'object_name': 'RightSideType'},
'account': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['accounts.Account']"}),
'created_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified_at': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'})
}
}
complete_apps = ['groups']
| [
"[email protected]"
] | |
133eb8ff9bdd88c775a362eb91c937e712aea0bb | c50e7eb190802d7849c0d0cea02fb4d2f0021777 | /src/workloads/setup.py | 4eb445f63f094bb7c215e4c40b08e0266e1db3d9 | [
"LicenseRef-scancode-generic-cla",
"MIT"
] | permissive | Azure/azure-cli-extensions | c1615b19930bba7166c282918f166cd40ff6609c | b8c2cf97e991adf0c0a207d810316b8f4686dc29 | refs/heads/main | 2023-08-24T12:40:15.528432 | 2023-08-24T09:17:25 | 2023-08-24T09:17:25 | 106,580,024 | 336 | 1,226 | MIT | 2023-09-14T10:48:57 | 2017-10-11T16:27:31 | Python | UTF-8 | Python | false | false | 1,704 | py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
#
# Code generated by aaz-dev-tools
# --------------------------------------------------------------------------------------------
from codecs import open
from setuptools import setup, find_packages
# HISTORY.rst entry.
VERSION = '0.1.0a1'
# The full list of classifiers is available at
# https://pypi.python.org/pypi?%3Aaction=list_classifiers
CLASSIFIERS = [
'Development Status :: 4 - Beta',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'License :: OSI Approved :: MIT License',
]
DEPENDENCIES = []
with open('README.md', 'r', encoding='utf-8') as f:
README = f.read()
with open('HISTORY.rst', 'r', encoding='utf-8') as f:
HISTORY = f.read()
setup(
name='workloads',
version=VERSION,
description='Microsoft Azure Command-Line Tools Workloads Extension.',
long_description=README + '\n\n' + HISTORY,
license='MIT',
author='Microsoft Corporation',
author_email='[email protected]',
url='https://github.com/Azure/azure-cli-extensions/tree/main/src/workloads',
classifiers=CLASSIFIERS,
packages=find_packages(exclude=["tests"]),
package_data={'azext_workloads': ['azext_metadata.json']},
install_requires=DEPENDENCIES
)
| [
"[email protected]"
] | |
842ea8d847ca0d8514eb8efc7bf05841e0aa0d31 | e04d7dedd28e6ae77fdead98cc870e8969e4e7fc | /venv/bin/easy_install-3.6 | 0884f63c734475a699effa551be434a39db10d2c | [] | no_license | damodharn/Python_Week3 | eb9670707ffcf07feb2596431eb747ab90ea0c89 | 3a47a6f464fb066a00f7277de4ca9d9c37850da9 | refs/heads/master | 2020-06-29T21:53:42.311181 | 2019-08-13T13:29:17 | 2019-08-13T13:29:17 | 200,634,151 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 446 | 6 | #!/home/admin1/PycharmProjects/week3/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install-3.6'
__requires__ = 'setuptools==40.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install-3.6')()
)
| [
"[email protected]"
] | |
0b7dfd99cdf13d9ecafbc21d8fc4f34870cc081b | e1d6de1fb5ce02907df8fa4d4e17e61d98e8727d | /crawlers/urllib2s/urllib2_posts.py | e58f5ccf7fb07608478bd5d3e0cbb37eff0ded44 | [] | no_license | neuroph12/nlpy | 3f3d1a8653a832d6230cb565428ee0c77ef7451d | 095976d144dacf07414bf7ee42b811eaa67326c1 | refs/heads/master | 2020-09-16T08:24:37.381353 | 2016-09-10T19:24:05 | 2016-09-10T19:24:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 257 | py | import urllib
import urllib2
url = 'http://www.douban.com/accounts/login'
values = {'form_email': '',
'form_password': ''}
data = urllib.urlencode(values)
req = urllib2.Request(url, data)
resp = urllib2.urlopen(req)
html = resp.read()
print(html) | [
"[email protected]"
] | |
66259f17ed43af8cc07fab9f59f2c6e11087508a | e84f8bcf2ea91ac12f9850a6f487b8b6bff09235 | /pyfr/backends/cuda/types.py | 0cc8c1b194cd8f1297244c06bf5c39a0ec500c80 | [
"CC-BY-4.0",
"BSD-3-Clause"
] | permissive | Aerojspark/PyFR | 2bdbbf8a1a0770dc6cf48100dc5f895eb8ab8110 | b59e67f3aa475f7e67953130a45f264f90e2bb92 | refs/heads/master | 2021-01-14T08:51:48.893378 | 2014-09-01T15:02:28 | 2014-09-01T15:02:28 | 24,884,060 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,726 | py | # -*- coding: utf-8 -*-
import collections
import itertools as it
import numpy as np
import pycuda.driver as cuda
import pyfr.backends.base as base
class CUDAMatrixBase(base.MatrixBase):
def onalloc(self, basedata, offset):
self.basedata = int(basedata)
self.data = self.basedata + offset
self.offset = offset
# Process any initial value
if self._initval is not None:
self._set(self._initval)
# Remove
del self._initval
def _get(self):
# Allocate an empty buffer
buf = np.empty(self.datashape, dtype=self.dtype)
# Copy
cuda.memcpy_dtoh(buf, self.data)
# Slice to give the expected I/O shape
return buf[...,:self.ioshape[-1]]
def _set(self, ary):
# Allocate a new buffer with suitable padding and assign
buf = np.zeros(self.datashape, dtype=self.dtype)
buf[...,:self.ioshape[-1]] = ary
# Copy
cuda.memcpy_htod(self.data, buf)
@property
def _as_parameter_(self):
return self.data
def __long__(self):
return self.data
class CUDAMatrix(CUDAMatrixBase, base.Matrix):
def __init__(self, backend, ioshape, initval, extent, tags):
super(CUDAMatrix, self).__init__(backend, backend.fpdtype, ioshape,
initval, extent, tags)
class CUDAMatrixRSlice(base.MatrixRSlice):
@property
def _as_parameter_(self):
return self.parent.basedata + self.offset
def __long__(self):
return self.parent.basedata + self.offset
class CUDAMatrixBank(base.MatrixBank):
def __long__(self):
return self._curr_mat.data
class CUDAConstMatrix(CUDAMatrixBase, base.ConstMatrix):
def __init__(self, backend, initval, extent, tags):
ioshape = initval.shape
super(CUDAConstMatrix, self).__init__(backend, backend.fpdtype,
ioshape, initval, extent, tags)
class CUDAView(base.View):
def __init__(self, backend, matmap, rcmap, stridemap, vshape, tags):
super(CUDAView, self).__init__(backend, matmap, rcmap, stridemap,
vshape, tags)
self.mapping = CUDAMatrixBase(backend, np.int32, (1, self.n),
self.mapping, None, tags)
if self.nvcol > 1:
self.cstrides = CUDAMatrixBase(backend, np.int32, (1, self.n),
self.cstrides, None, tags)
if self.nvrow > 1:
self.rstrides = CUDAMatrixBase(backend, np.int32, (1, self.n),
self.rstrides, None, tags)
class CUDAMPIMatrix(CUDAMatrix, base.MPIMatrix):
def __init__(self, backend, ioshape, initval, extent, tags):
# Call the standard matrix constructor
super(CUDAMPIMatrix, self).__init__(backend, ioshape, initval, extent,
tags)
# Allocate a page-locked buffer on the host for MPI to send/recv from
self.hdata = cuda.pagelocked_empty((self.nrow, self.ncol),
self.dtype, 'C')
class CUDAMPIView(base.MPIView):
pass
class CUDAQueue(base.Queue):
def __init__(self, backend):
super(CUDAQueue, self).__init__(backend)
# Last kernel we executed
self._last = None
# CUDA stream and MPI request list
self._stream_comp = cuda.Stream()
self._stream_copy = cuda.Stream()
self._mpireqs = []
# Items waiting to be executed
self._items = collections.deque()
def __lshift__(self, items):
self._items.extend(items)
def __mod__(self, items):
self.run()
self << items
self.run()
def __nonzero__(self):
return bool(self._items)
def _exec_item(self, item, rtargs):
if item.ktype == 'compute':
item.run(self._stream_comp, self._stream_copy, *rtargs)
elif item.ktype == 'mpi':
item.run(self._mpireqs, *rtargs)
else:
raise ValueError('Non compute/MPI kernel in queue')
self._last = item
def _exec_next(self):
item, rtargs = self._items.popleft()
# If we are at a sequence point then wait for current items
if self._at_sequence_point(item):
self._wait()
# Execute the item
self._exec_item(item, rtargs)
def _exec_nowait(self):
while self._items and not self._at_sequence_point(self._items[0][0]):
self._exec_item(*self._items.popleft())
def _wait(self):
last = self._last
if last and last.ktype == 'compute':
self._stream_comp.synchronize()
self._stream_copy.synchronize()
elif last and last.ktype == 'mpi':
from mpi4py import MPI
MPI.Prequest.Waitall(self._mpireqs)
self._mpireqs = []
self._last = None
def _at_sequence_point(self, item):
return self._last and self._last.ktype != item.ktype
def run(self):
while self._items:
self._exec_next()
self._wait()
@staticmethod
def runall(queues):
# First run any items which will not result in an implicit wait
for q in queues:
q._exec_nowait()
# So long as there are items remaining in the queues
while any(queues):
# Execute a (potentially) blocking item from each queue
for q in it.ifilter(None, queues):
q._exec_next()
q._exec_nowait()
# Wait for all tasks to complete
for q in queues:
q._wait()
| [
"[email protected]"
] | |
91f594c0d623009fa6d5f267254ce89dd81b5e16 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02846/s097636759.py | 3a28136572804ebc45464a64aaca2efeebe9c309 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,115 | py | import sys
sys.setrecursionlimit(10**9)
INF=10**18
def input():
return sys.stdin.readline().rstrip()
def main():
def nibutan(ok,ng):
while abs(ok-ng) > 1:
mid = (ok + ng) // 2
if solve(mid,2):
ok = mid
else:
ng = mid
return ok
def solve(mid,n):
dif=(d_0+d_1)*(mid-1)
c=0
if dif*(dif+d_0) == 0:
c+=1
elif dif*(dif+d_0) < 0:
c+=1
if (dif+d_0)*(dif+d_0+d_1) < 0:
c+=1
if c==n:
return True
else:
return False
T=list(map(int,input().split()))
A=list(map(int,input().split()))
B=list(map(int,input().split()))
d_0=T[0]*(A[0]-B[0])
d_1=T[1]*(A[1]-B[1])
if d_0==-d_1:
print('infinity')
elif d_0*(d_0+d_1)<0:
if (d_0*2+d_1)*(d_0*2+d_1*2)<0:
n=nibutan(2,10**40)
ans=n*2-1
ans+=solve(n+1,1)
print(ans)
else:
print(1)
else:
print(0)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
5f43276c56036f6fb66ed1d8b271c4f884b8a619 | b7125b27e564d2cc80a2ce8d0a6f934aa22c8445 | /.history/display_board_20201108143615.py | 98a8605857b33b641a94ead1694ab306c856f3eb | [] | no_license | JensVL96/Puzzle-solver-for-fun | 4c15dcd570c3705b7ac555efb56b52913e81083c | 6d8a4378a480372213a596a336a4deca727a00fc | refs/heads/master | 2021-07-15T05:19:42.185495 | 2020-11-08T13:59:49 | 2020-11-08T13:59:49 | 224,855,888 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,475 | py | from config import *
import pygame as pg
class Display_board():
def __init__(self, screen):
self.screen = screen
self.font_num = pg.font.SysFont("comicsans", NUMBER_SIZE)
self.font_cdt = pg.font.SysFont("comicsans", CANDIDATE_SIZE)
def draw_val(self, val, x, y):
text1 = self.font_num.render(str(val), 1, BLACK)
self.screen.blit(text1, (x * BLOCK_SIZE + 15, y * BLOCK_SIZE + 15))
def draw_cdt(self, val, x, y):
text1 = self.font_cdt.render(str(val), 1, BLACK)
self.screen.blit(text1, (x * BLOCK_SIZE + 1, y * BLOCK_SIZE + 1))
def on_mouse_press(self, x, y, symbol, modifier):
pass
def numbers(self, n):
# Generator that yields odd numbers twice
for i in range(n):
yield i
if i % 2 == 1:
yield i
def draw(self, grid, cell):
for i in range (9):
for j in range (9):
if grid[i][j] != 0:
if type(grid[i][j]) != int:
self.draw_candidates(grid[i][j], cell)
else:
print(cell[0])
text1 = self.font_num.render(str(grid[i][j]), 1, BLACK)
self.screen.blit(text1, (cell[0] + 15, cell[1] + 14))
indent = []
for i in self.numbers(7):
indent.append(i)
# TLX = TOP_LX - (indent[9] * BOX_INDENT) / 2
# TLY = TOP_LY - (indent[9] * BOX_INDENT) / 2
# TRX = TOP_RX + (indent[9] * BOX_INDENT) / 2
# TRY = TOP_RY - (indent[9] * BOX_INDENT) / 2
# BLX = BOT_LX - (indent[9] * BOX_INDENT) / 2
# BLY = BOT_LY - (indent[9] * BOX_INDENT) / 2
for i in range(NUM_LINES):
#print("\nline: ---", i, "---", "indent: ", indent[i])
if i % 3 == 0:
thick = THICK_LINE
else:
thick = THIN_LINE
# print("(TOP_LX, TOP_LY + ", i, " * ", BLOCK_SIZE, " + ", i, " * ", BOX_INDENT, ")")
# print("(TOP_RX + ", indent[9], " * ", BOX_INDENT, ", TOP_LY + ", i, "*", BLOCK_SIZE, "+", i, "*", BOX_INDENT, "), ", thick)
pg.draw.line(self.screen, BLACK, (TOP_LX,
TOP_LY + i * BLOCK_SIZE + indent[i] * BOX_INDENT),
(TOP_RX + (indent[9] * BOX_INDENT),
TOP_RY + i * BLOCK_SIZE + indent[i] * BOX_INDENT), thick)
pg.draw.line(self.screen, BLACK, (TOP_LX + i * BLOCK_SIZE + indent[i] * BOX_INDENT,
TOP_LY),
(BOT_LX + i * BLOCK_SIZE + indent[i] * BOX_INDENT,
BOT_LY + (indent[9] * BOX_INDENT)), thick)
# 3.5 * -- 0 -- 1 -- 1 -- 2 -- 3 -- 3 -- 4 -- 5 -- 5 -- 6
# 0 --- 45 * 1 + 3.5 --- 45 * 2 + 3.5 --- 45 * 3 + 7 --- 45 * 4 + 10.5 --- 45 * 5 + 10.5 --- 45 * 6 + 14 --- 45 * 7 + 17.5 --- 45 * 8 + 17.5 --- 45 * 9 + 21
# Horizontal
# pg.draw.line(self.screen, BLACK, (TOP_LX, TOP_LY), (TOP_RX + 6 * 3.5, TOP_RY), 7)
# pg.draw.line(self.screen, BLACK, (TOP_LX, TOP_LY + 1 * 45 + 1 * 3.5), (TOP_RX + 6 * 3.5, TOP_RY + 1 * 45 + 1 * 3.5), 1)
# pg.draw.line(self.screen, BLACK, (TOP_LX, TOP_LY + 2 * 45 + 1 * 3.5), (TOP_RX + 6 * 3.5, TOP_RY + 2 * 45 + 1 * 3.5), 1)
# pg.draw.line(self.screen, BLACK, (TOP_LX, TOP_LY + 3 * 45 + 2 * 3.5), (TOP_RX + 6 * 3.5, TOP_RY + 3 * 45 + 2 * 3.5), 7)
# pg.draw.line(self.screen, BLACK, (TOP_LX, TOP_LY + 4 * 45 + 3 * 3.5), (TOP_RX + 6 * 3.5, TOP_RY + 4 * 45 + 3 * 3.5), 1)
# pg.draw.line(self.screen, BLACK, (TOP_LX, TOP_LY + 5 * 45 + 3 * 3.5), (TOP_RX + 6 * 3.5, TOP_RY + 5 * 45 + 3 * 3.5), 1)
# pg.draw.line(self.screen, BLACK, (TOP_LX, TOP_LY + 6 * 45 + 4 * 3.5), (TOP_RX + 6 * 3.5, TOP_RY + 6 * 45 + 4 * 3.5), 7)
# pg.draw.line(self.screen, BLACK, (TOP_LX, TOP_LY + 7 * 45 + 5 * 3.5), (TOP_RX + 6 * 3.5, TOP_RY + 7 * 45 + 5 * 3.5), 1)
# pg.draw.line(self.screen, BLACK, (TOP_LX, TOP_LY + 8 * 45 + 5 * 3.5), (TOP_RX + 6 * 3.5, TOP_RY + 8 * 45 + 5 * 3.5), 1)
# pg.draw.line(self.screen, BLACK, (TOP_LX, TOP_LY + 9 * 45 + 6 * 3.5), (TOP_RX + 6 * 3.5, TOP_RY + 9 * 45 + 6 * 3.5), 7)
# Vertical
# pg.draw.line(self.screen, BLACK, (TOP_LX, TOP_LY), (BOT_LX, BOT_LY + 21), 7)
# pg.draw.line(self.screen, BLACK, (TOP_LX + 48.5, TOP_LY), (BOT_LX + 48.5, BOT_LY + 21), 1)
# pg.draw.line(self.screen, BLACK, (TOP_LX + 48.5 + 45, TOP_LY), (BOT_LX + 48.5 + 45, BOT_LY + 21), 1)
# pg.draw.line(self.screen, BLACK, (TOP_LX + 52 + 45 + 45, TOP_LY), (BOT_LX + 52 + 45 + 45, BOT_LY + 21), 7)
# pg.draw.line(self.screen, BLACK, (TOP_LX + 52 + 45 + 45 + 48.5, TOP_LY), (BOT_LX + 52 + 45 + 45 + 48.5, BOT_LY + 21), 1)
# pg.draw.line(self.screen, BLACK, (TOP_LX + 52 + 45 + 45 + 48.5 + 45, TOP_LY), (BOT_LX + 52 + 45 + 45 + 48.5 + 45, BOT_LY + 21), 1)
# pg.draw.line(self.screen, BLACK, (TOP_LX + 52 + 45 + 45 + 52 + 45 + 45, TOP_LY), (BOT_LX + 52 + 45 + 45 + 52 + 45 + 45, BOT_LY + 21), 7)
# pg.draw.line(self.screen, BLACK, (TOP_LX + 52 + 45 + 45 + 52 + 45 + 45 + 48.5, TOP_LY), (BOT_LX + 52 + 45 + 45 + 52 + 45 + 45 + 48.5, BOT_LY + 21), 1)
# pg.draw.line(self.screen, BLACK, (TOP_LX + 52 + 45 + 45 + 52 + 45 + 45 + 48.5 + 45, TOP_LY), (BOT_LX + 52 + 45 + 45 + 52 + 45 + 45 + 48.5 + 45, BOT_LY + 21), 1)
# pg.draw.line(self.screen, BLACK, (TOP_LX + 52 + 45 + 45 + 52 + 45 + 45 + 52 + 45 + 45, TOP_LY), (BOT_LX + 52 + 45 + 45 + 52 + 45 + 45 + 52 + 45 + 45, BOT_LY + 21), 7)
# For candidate placement
if i % 3 == 0:
pg.draw.line(self.screen, BLACK, (cell[0],
cell[1] + i * (cell[2] / 9)),
((cell[0] + cell[2]),
cell[1] + i * (cell[2] / 9)), 1)
pg.draw.line(self.screen, BLACK, (cell[0] + i * (cell[3] / 9),
cell[1]),
(cell[0] + i * (cell[3] / 9),
cell[1] + cell[3]), 1)
def draw_candidates(self, grid, cell):
new_line = 1
iteration = 1
indent = 15
for number in grid:
if iteration % 3 == 1: # Checking if first in line: 1, 4, 7
text1 = self.font_cdt.render(str(number), 1, BLACK)
self.screen.blit(text1, (cell[0] + 3, cell[1] + ((new_line - 1) * indent) + 2))
else:
text1 = self.font_cdt.render(str(number), 1, BLACK)
self.screen.blit(text1, (cell[0] + ((iteration - 1) * indent) + 3, cell[1] + ((new_line - 1) * indent) + 2))
if iteration % 3 == 0: # checking if last in line: 3, 6
new_line += 1
iteration = 0
iteration += 1
def update(self, grid, row, col, blk):
font_val = pg.font.SysFont("comicsans", BOLD)
if row != (-1, -1):
# Remove old number
text1 = self.font_num.render(str(grid[row[0]][row[1]]), 1, WHITE)
self.screen.blit(text1, (TOP_LX + row[0] * BLOCK_SIZE + 15, TOP_LY + row[1] * BLOCK_SIZE + 15))
# Rewrite in bigger font
text1 = font_val.render(str(grid[row[0]][row[1]]), 1, BLACK)
self.screen.blit(text1, (TOP_LX + row[0] * BLOCK_SIZE + 14, TOP_LY + row[1] * BLOCK_SIZE + 10))
if col != (-1, -1):
# Remove old number
text1 = self.font_num.render(str(grid[col[0]][col[1]]), 1, WHITE)
self.screen.blit(text1, (TOP_LX + col[0] * BLOCK_SIZE + 15, TOP_LY + col[1] * BLOCK_SIZE + 15))
# Rewrite in bigger font
text1 = font_val.render(str(grid[col[0]][col[1]]), 1, BLACK)
self.screen.blit(text1, (TOP_LX + col[0] * BLOCK_SIZE + 14, TOP_LY + col[1] * BLOCK_SIZE + 10))
if blk != (-1, -1):
# Remove old number
text1 = self.font_num.render(str(grid[blk[0]][blk[1]]), 1, WHITE)
self.screen.blit(text1, (TOP_LX + blk[0] * BLOCK_SIZE + 15, TOP_LY + blk[1] * BLOCK_SIZE + 15))
# Rewrite in bigger font
text1 = font_val.render(str(grid[blk[0]][blk[1]]), 1, BLACK)
self.screen.blit(text1, (TOP_LX + blk[0] * BLOCK_SIZE + 14, TOP_LY + blk[1] * BLOCK_SIZE + 10))
def find_cell(self, x, y):
# Only applies glow when a cell is selected
if x == -1 and y == -1:
return
width = BLOCK_SIZE
height = BLOCK_SIZE
block_x = block_y = 1
# Adjustment in size if bordering a thick line
# print("cell: ", x, y)
# if x % 3 == 0: # If thick line on the left
# print("column 1, 4 or 7")
# else:
# start_pos_x = TOP_LX + x * BLOCK_SIZE + block * 3.5
temp_x = x
for i in range(3):
if temp_x - 3 >= 0:
block_x += 2
temp_x += -3
i += 1
# print("block x: ", block_x)
# print("extra indent x:", block_x * 3.5)
start_pos_x = TOP_LX + x * BLOCK_SIZE + block_x * 3.5
# if (x + 1) % 3 == 0: # If thick line on the right
# print("column 3, 6 or 9")
# if y % 3 == 0: # If thick line on the top
# print("row 1, 4 or 7")
# else:
# start_pos_y = TOP_LY + y * BLOCK_SIZE# + 1
# if (y + 1) % 3 == 0: # If thick line on the bottom
# print("row 3, 6 or 9")
temp_y = y
for i in range(3):
if temp_y - 3 >= 0:
block_y += 2
temp_y += -3
i += 1
# print("block y: ", block_y)
# print("extra indent y:", block_x * 3.5)
start_pos_y = TOP_LY + y * BLOCK_SIZE + block_y * 3.5
return (start_pos_x, start_pos_y, width, height)
def blink(self, alpha, a_change):
if a_change:
alpha += BLINK_SPEED
if alpha >= 175:
a_change = False
elif a_change == False:
alpha += -BLINK_SPEED
if alpha <= 30:
a_change = True
return (alpha, a_change)
| [
"[email protected]"
] | |
70001b56d298f5befbbcdf00e94f61e060b46a96 | 21b0483666d8e5cbdc4a911bda93e1a3392c40ec | /lib/initialConditions.py | 1f3e0b6022939d9fa360915afc8482217719f223 | [] | no_license | Christopher-Bradshaw/fluids_final | 0541111323c640b40ee86f970acb896689bbb867 | 2e33b2ef04fdbd40760c1804a02c86c93c5fd926 | refs/heads/master | 2021-08-24T03:13:43.132099 | 2017-12-07T20:33:31 | 2017-12-07T20:33:31 | 113,245,782 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,193 | py | import numpy as np
def getPressure(energy, volume, gamma):
return energy * (gamma - 1) / volume
def getFlatConfig():
dx = 1
width = 5
gamma = 5/3
# Densities
initialRho = np.ones(width) # this will never change
summedInitialRho = np.array([
initialRho[i] + initialRho[i+1] for i in range(len(initialRho)-1)
])
# The grid
grid = np.zeros(width + 1, dtype=[
("position", "float64"),
("velocity", "float64"),
])
grid["position"] = np.arange(0, width + 1, dx)
grid["velocity"] = np.zeros_like(grid["position"])
grid["velocity"][0] = 0
grid["velocity"][-1] = 0
# Things defined in the gaps
gaps = np.zeros(width, dtype=[
("volume", "float64"),
("viscocity", "float64"),
("energy", "float64"),
("pressure", "float64"),
])
gaps["volume"] = 1/initialRho
gaps["viscocity"] = np.zeros(width)
gaps["energy"] = np.ones(width)
gaps["pressure"] = getPressure(gaps["energy"], gaps["volume"], gamma)
return {
"grid": grid,
"gaps": gaps,
"initialRho": initialRho,
"summedInitialRho": summedInitialRho,
"dx": dx,
"width": width,
"gamma": gamma,
}
def getVelocityConfig():
config = getFlatConfig()
config["grid"]["velocity"][1:-1] += 0.01
return config
def getShockTubeConfig():
dx = 1
width = 100
gamma = 5/3
# Densities
initialRho = np.ones(width) # this will never change
# initialRho[3] = 1.1
initialRho[:50] = 1.1
summedInitialRho = np.array([
initialRho[i] + initialRho[i+1] for i in range(len(initialRho)-1)
])
# The grid
grid = np.zeros(width + 1, dtype=[
("position", "float64"),
("velocity", "float64"),
])
grid["position"] = np.arange(0, width + 1, dx)
grid["velocity"] = np.zeros_like(grid["position"])
grid["velocity"][0] = 0
grid["velocity"][-1] = 0
# Things defined in the gaps
gaps = np.zeros(width, dtype=[
("volume", "float64"),
("viscocity", "float64"),
("energy", "float64"),
("pressure", "float64"),
])
gaps["volume"] = 1/initialRho
gaps["viscocity"] = np.zeros(width) # should we / can we give initial viscocity?
gaps["energy"] = 1 * initialRho
gaps["pressure"] = getPressure(gaps["energy"], gaps["volume"], gamma)
return {
"grid": grid,
"gaps": gaps,
"initialRho": initialRho,
"summedInitialRho": summedInitialRho,
"dx": dx,
"width": width,
"gamma": gamma,
}
def getExpansionConfig():
dx = 1
width = 100
gamma = 5/3
# Densities
initialRho = np.ones(width) # this will never change
# initialRho[3] = 1.1
initialRho[50:] = 0.1
summedInitialRho = np.array([
initialRho[i] + initialRho[i+1] for i in range(len(initialRho)-1)
])
# The grid
grid = np.zeros(width + 1, dtype=[
("position", "float64"),
("velocity", "float64"),
])
grid["position"] = np.arange(0, width + 1, dx)
grid["velocity"] = np.zeros_like(grid["position"])
grid["velocity"][0] = 0
grid["velocity"][-1] = 0
# Things defined in the gaps
gaps = np.zeros(width, dtype=[
("volume", "float64"),
("viscocity", "float64"),
("energy", "float64"),
("pressure", "float64"),
])
gaps["volume"] = 1/initialRho
gaps["viscocity"] = np.zeros(width) # should we / can we give initial viscocity?
gaps["energy"] = 1 * initialRho
gaps["pressure"] = getPressure(gaps["energy"], gaps["volume"], gamma)
return {
"grid": grid,
"gaps": gaps,
"initialRho": initialRho,
"summedInitialRho": summedInitialRho,
"dx": dx,
"width": width,
"gamma": gamma,
}
def getSedovConfig():
dx = 1
width = 100
gamma = 5/3
# Densities
initialRho = np.ones(width) # this will never change
summedInitialRho = np.array([
initialRho[i] + initialRho[i+1] for i in range(len(initialRho)-1)
])
# The grid
grid = np.zeros(width + 1, dtype=[
("position", "float64"),
("velocity", "float64"),
])
grid["position"] = np.arange(0, width + 1, dx)
grid["velocity"] = np.zeros_like(grid["position"])
grid["velocity"][0] = 0
grid["velocity"][-1] = 0
# Things defined in the gaps
gaps = np.zeros(width, dtype=[
("volume", "float64"),
("viscocity", "float64"),
("energy", "float64"),
("pressure", "float64"),
])
gaps["volume"] = 1/initialRho
gaps["viscocity"] = np.zeros(width) # should we / can we give initial viscocity?
gaps["energy"] = 1 * initialRho
gaps["energy"][0] *= 2
gaps["pressure"] = getPressure(gaps["energy"], gaps["volume"], gamma)
return {
"grid": grid,
"gaps": gaps,
"initialRho": initialRho,
"summedInitialRho": summedInitialRho,
"dx": dx,
"width": width,
"gamma": gamma,
}
| [
"[email protected]"
] | |
40b4fc7442a3dca396d30cd384a4df70fbca793d | a6d8465aed280c36fb7129e1fa762535bae19941 | /embroidery365/builder/migrations/0015_auto_20171107_1318.py | e8fb24e2d785b3b21a4799b1ab238de547240bcb | [] | no_license | rahuezo/365digitizing_and_embroidery | c61c53f567e73163a67d3fd568a20551a3681ccd | 41a22b6ff8bd83238219f2d34ce13b5a8ef9bb57 | refs/heads/master | 2020-09-02T11:59:07.702947 | 2017-11-11T02:40:01 | 2017-11-11T02:40:01 | 98,377,801 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 493 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.2 on 2017-11-07 21:18
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('builder', '0014_order_extra_details'),
]
operations = [
migrations.AlterField(
model_name='order',
name='extra_details',
field=models.TextField(blank=True, default='No Specifications Included'),
),
]
| [
"[email protected]"
] | |
2ef93f787a9d83908066ad2e141bcdc977dc2348 | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/cirq_new/cirq_program/startCirq_pragma99.py | e33f306588051a905793954fdd141d45e8a365b0 | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,468 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 5/15/20 4:49 PM
# @File : grover.py
# qubit number=4
# total number=11
import cirq
import cirq.google as cg
from typing import Optional
import sys
from math import log2
import numpy as np
class Opty(cirq.PointOptimizer):
def optimization_at(
self,
circuit: 'cirq.Circuit',
index: int,
op: 'cirq.Operation'
) -> Optional[cirq.PointOptimizationSummary]:
if (isinstance(op, cirq.ops.GateOperation) and isinstance(op.gate, cirq.CZPowGate)):
return cirq.PointOptimizationSummary(
clear_span=1,
clear_qubits=op.qubits,
new_operations=[
cirq.CZ(*op.qubits),
cirq.X.on_each(*op.qubits),
cirq.X.on_each(*op.qubits),
]
)
#thatsNoCode
def make_circuit(n: int, input_qubit):
c = cirq.Circuit() # circuit begin
c.append(cirq.H.on(input_qubit[0])) # number=1
c.append(cirq.H.on(input_qubit[1])) # number=2
c.append(cirq.H.on(input_qubit[2])) # number=3
c.append(cirq.H.on(input_qubit[3])) # number=4
c.append(cirq.SWAP.on(input_qubit[1],input_qubit[0])) # number=5
c.append(cirq.SWAP.on(input_qubit[1],input_qubit[0])) # number=6
c.append(cirq.CNOT.on(input_qubit[3],input_qubit[0])) # number=7
c.append(cirq.CNOT.on(input_qubit[3],input_qubit[0])) # number=8
c.append(cirq.SWAP.on(input_qubit[1],input_qubit[0])) # number=9
c.append(cirq.SWAP.on(input_qubit[1],input_qubit[0])) # number=10
# circuit end
c.append(cirq.measure(*input_qubit, key='result'))
return c
def bitstring(bits):
return ''.join(str(int(b)) for b in bits)
if __name__ == '__main__':
qubit_count = 4
input_qubits = [cirq.GridQubit(i, 0) for i in range(qubit_count)]
circuit = make_circuit(qubit_count,input_qubits)
circuit = cg.optimized_for_sycamore(circuit, optimizer_type='sqrt_iswap')
circuit_sample_count =2000
simulator = cirq.Simulator()
result = simulator.run(circuit, repetitions=circuit_sample_count)
frequencies = result.histogram(key='result', fold_func=bitstring)
writefile = open("../data/startCirq_pragma99.csv","w+")
print(format(frequencies),file=writefile)
print("results end", file=writefile)
print(circuit.__len__(), file=writefile)
print(circuit,file=writefile)
writefile.close() | [
"[email protected]"
] | |
b723760ee19970314f9f76ce4761b88748adc393 | d90af0def0e29ebaebcf986399fcee65e1e2916c | /python/PDB/HSExposure.py | 779b719cece4c85738eaaab62c3ecb814a685d26 | [
"LicenseRef-scancode-biopython"
] | permissive | Zaiyong/csrosetta | 2fdbbdd7da24ce971f7f2297a7cd14723cdd59d6 | 539c60664dba3972062002ff4e636c7f029927cb | refs/heads/master | 2020-12-25T15:18:39.274689 | 2020-02-25T09:15:35 | 2020-02-25T09:15:35 | 65,408,072 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,412 | py | # Copyright (C) 2002, Thomas Hamelryck ([email protected])
# This code is part of the Biopython distribution and governed by its
# license. Please see the LICENSE file that should have been included
# as part of this package.
"""Half-sphere exposure and coordination number calculation."""
import warnings
from math import pi
from PDB.AbstractPropertyMap import AbstractPropertyMap
from PDB.PDBParser import PDBParser
from PDB.Polypeptide import CaPPBuilder, is_aa
from PDB.Vector import rotaxis
class _AbstractHSExposure(AbstractPropertyMap):
"""
Abstract class to calculate Half-Sphere Exposure (HSE).
The HSE can be calculated based on the CA-CB vector, or the pseudo CB-CA
vector based on three consecutive CA atoms. This is done by two separate
subclasses.
"""
def __init__(self, model, radius, offset, hse_up_key, hse_down_key,
angle_key=None):
"""
@param model: model
@type model: L{Model}
@param radius: HSE radius
@type radius: float
@param offset: number of flanking residues that are ignored in the calculation
of the number of neighbors
@type offset: int
@param hse_up_key: key used to store HSEup in the entity.xtra attribute
@type hse_up_key: string
@param hse_down_key: key used to store HSEdown in the entity.xtra attribute
@type hse_down_key: string
@param angle_key: key used to store the angle between CA-CB and CA-pCB in
the entity.xtra attribute
@type angle_key: string
"""
assert(offset>=0)
# For PyMOL visualization
self.ca_cb_list=[]
ppb=CaPPBuilder()
ppl=ppb.build_peptides(model)
hse_map={}
hse_list=[]
hse_keys=[]
for pp1 in ppl:
for i in range(0, len(pp1)):
if i==0:
r1=None
else:
r1=pp1[i-1]
r2=pp1[i]
if i==len(pp1)-1:
r3=None
else:
r3=pp1[i+1]
# This method is provided by the subclasses to calculate HSE
result=self._get_cb(r1, r2, r3)
if result is None:
# Missing atoms, or i==0, or i==len(pp1)-1
continue
pcb, angle=result
hse_u=0
hse_d=0
ca2=r2['CA'].get_vector()
for pp2 in ppl:
for j in range(0, len(pp2)):
if pp1 is pp2 and abs(i-j)<=offset:
# neighboring residues in the chain are ignored
continue
ro=pp2[j]
if not is_aa(ro) or not ro.has_id('CA'):
continue
cao=ro['CA'].get_vector()
d=(cao-ca2)
if d.norm()<radius:
if d.angle(pcb)<(pi/2):
hse_u+=1
else:
hse_d+=1
res_id=r2.get_id()
chain_id=r2.get_parent().get_id()
# Fill the 3 data structures
hse_map[(chain_id, res_id)]=(hse_u, hse_d, angle)
hse_list.append((r2, (hse_u, hse_d, angle)))
hse_keys.append((chain_id, res_id))
# Add to xtra
r2.xtra[hse_up_key]=hse_u
r2.xtra[hse_down_key]=hse_d
if angle_key:
r2.xtra[angle_key]=angle
AbstractPropertyMap.__init__(self, hse_map, hse_keys, hse_list)
def _get_cb(self, r1, r2, r3):
"""This method is provided by the subclasses to calculate HSE."""
return NotImplemented
def _get_gly_cb_vector(self, residue):
"""
Return a pseudo CB vector for a Gly residue.
The pseudoCB vector is centered at the origin.
CB coord=N coord rotated over -120 degrees
along the CA-C axis.
"""
try:
n_v=residue["N"].get_vector()
c_v=residue["C"].get_vector()
ca_v=residue["CA"].get_vector()
except:
return None
# center at origin
n_v=n_v-ca_v
c_v=c_v-ca_v
# rotation around c-ca over -120 deg
rot=rotaxis(-pi*120.0/180.0, c_v)
cb_at_origin_v=n_v.left_multiply(rot)
# move back to ca position
cb_v=cb_at_origin_v+ca_v
# This is for PyMol visualization
self.ca_cb_list.append((ca_v, cb_v))
return cb_at_origin_v
class HSExposureCA(_AbstractHSExposure):
"""
Class to calculate HSE based on the approximate CA-CB vectors,
using three consecutive CA positions.
"""
def __init__(self, model, radius=12, offset=0):
"""
@param model: the model that contains the residues
@type model: L{Model}
@param radius: radius of the sphere (centred at the CA atom)
@type radius: float
@param offset: number of flanking residues that are ignored in the calculation of the number of neighbors
@type offset: int
"""
_AbstractHSExposure.__init__(self, model, radius, offset,
'EXP_HSE_A_U', 'EXP_HSE_A_D', 'EXP_CB_PCB_ANGLE')
def _get_cb(self, r1, r2, r3):
"""
Calculate the approximate CA-CB direction for a central
CA atom based on the two flanking CA positions, and the angle
with the real CA-CB vector.
The CA-CB vector is centered at the origin.
@param r1, r2, r3: three consecutive residues
@type r1, r2, r3: L{Residue}
"""
if r1 is None or r3 is None:
return None
try:
ca1=r1['CA'].get_vector()
ca2=r2['CA'].get_vector()
ca3=r3['CA'].get_vector()
except:
return None
# center
d1=ca2-ca1
d3=ca2-ca3
d1.normalize()
d3.normalize()
# bisection
b=(d1+d3)
b.normalize()
# Add to ca_cb_list for drawing
self.ca_cb_list.append((ca2, b+ca2))
if r2.has_id('CB'):
cb=r2['CB'].get_vector()
cb_ca=cb-ca2
cb_ca.normalize()
angle=cb_ca.angle(b)
elif r2.get_resname()=='GLY':
cb_ca=self._get_gly_cb_vector(r2)
if cb_ca is None:
angle=None
else:
angle=cb_ca.angle(b)
else:
angle=None
# vector b is centered at the origin!
return b, angle
def pcb_vectors_pymol(self, filename="hs_exp.py"):
"""
Write a PyMol script that visualizes the pseudo CB-CA directions
at the CA coordinates.
@param filename: the name of the pymol script file
@type filename: string
"""
if len(self.ca_cb_list)==0:
warnings.warn("Nothing to draw.", RuntimeWarning)
return
fp=open(filename, "w")
fp.write("from pymol.cgo import *\n")
fp.write("from pymol import cmd\n")
fp.write("obj=[\n")
fp.write("BEGIN, LINES,\n")
fp.write("COLOR, %.2f, %.2f, %.2f,\n" % (1.0, 1.0, 1.0))
for (ca, cb) in self.ca_cb_list:
x,y,z=ca.get_array()
fp.write("VERTEX, %.2f, %.2f, %.2f,\n" % (x,y,z))
x,y,z=cb.get_array()
fp.write("VERTEX, %.2f, %.2f, %.2f,\n" % (x,y,z))
fp.write("END]\n")
fp.write("cmd.load_cgo(obj, 'HS')\n")
fp.close()
class HSExposureCB(_AbstractHSExposure):
"""
Class to calculate HSE based on the real CA-CB vectors.
"""
def __init__(self, model, radius=12, offset=0):
"""
@param model: the model that contains the residues
@type model: L{Model}
@param radius: radius of the sphere (centred at the CA atom)
@type radius: float
@param offset: number of flanking residues that are ignored in the calculation of the number of neighbors
@type offset: int
"""
_AbstractHSExposure.__init__(self, model, radius, offset,
'EXP_HSE_B_U', 'EXP_HSE_B_D')
def _get_cb(self, r1, r2, r3):
"""
Method to calculate CB-CA vector.
@param r1, r2, r3: three consecutive residues (only r2 is used)
@type r1, r2, r3: L{Residue}
"""
if r2.get_resname()=='GLY':
return self._get_gly_cb_vector(r2), 0.0
else:
if r2.has_id('CB') and r2.has_id('CA'):
vcb=r2['CB'].get_vector()
vca=r2['CA'].get_vector()
return (vcb-vca), 0.0
return None
class ExposureCN(AbstractPropertyMap):
def __init__(self, model, radius=12.0, offset=0):
"""
A residue's exposure is defined as the number of CA atoms around
that residues CA atom. A dictionary is returned that uses a L{Residue}
object as key, and the residue exposure as corresponding value.
@param model: the model that contains the residues
@type model: L{Model}
@param radius: radius of the sphere (centred at the CA atom)
@type radius: float
@param offset: number of flanking residues that are ignored in the calculation of the number of neighbors
@type offset: int
"""
assert(offset>=0)
ppb=CaPPBuilder()
ppl=ppb.build_peptides(model)
fs_map={}
fs_list=[]
fs_keys=[]
for pp1 in ppl:
for i in range(0, len(pp1)):
fs=0
r1=pp1[i]
if not is_aa(r1) or not r1.has_id('CA'):
continue
ca1=r1['CA']
for pp2 in ppl:
for j in range(0, len(pp2)):
if pp1 is pp2 and abs(i-j)<=offset:
continue
r2=pp2[j]
if not is_aa(r2) or not r2.has_id('CA'):
continue
ca2=r2['CA']
d=(ca2-ca1)
if d<radius:
fs+=1
res_id=r1.get_id()
chain_id=r1.get_parent().get_id()
# Fill the 3 data structures
fs_map[(chain_id, res_id)]=fs
fs_list.append((r1, fs))
fs_keys.append((chain_id, res_id))
# Add to xtra
r1.xtra['EXP_CN']=fs
AbstractPropertyMap.__init__(self, fs_map, fs_keys, fs_list)
if __name__=="__main__":
import sys
p=PDBParser()
s=p.get_structure('X', sys.argv[1])
model=s[0]
# Neighbor sphere radius
RADIUS=13.0
OFFSET=0
hse=HSExposureCA(model, radius=RADIUS, offset=OFFSET)
for l in hse:
print l
print
hse=HSExposureCB(model, radius=RADIUS, offset=OFFSET)
for l in hse:
print l
print
hse=ExposureCN(model, radius=RADIUS, offset=OFFSET)
for l in hse:
print l
print
for c in model:
for r in c:
try:
print r.xtra['PCB_CB_ANGLE']
except:
pass
| [
"[email protected]"
] | |
7dfcd4cd24d5ed5f45978adad9a7d5d79e6a8df9 | 691793de7d07b17918d076b319281c706f7275c0 | /signing_today_client/api_client.py | 98948f9c78f3f2cce8197afcb845c184beb78226 | [
"MIT"
] | permissive | signingtoday/signingtoday-sdk-python | 1ddfae5340690c80760c500436631d4a8ff9c87f | ed267279622fb59f2ad8fa289157fc9cdf9d8a5b | refs/heads/master | 2020-12-03T15:32:35.755222 | 2020-03-24T08:27:11 | 2020-03-24T08:27:11 | 231,372,803 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 26,011 | py | # coding: utf-8
"""
Signing Today Web
*Signing Today* is the perfect Digital Signature Gateway. Whenever in Your workflow You need to add one or more Digital Signatures to Your document, *Signing Today* is the right choice. You prepare Your documents, *Signing Today* takes care of all the rest: send invitations (`signature tickets`) to signers, collects their signatures, send You back the signed document. Integrating *Signing Today* in Your existing applications is very easy. Just follow these API specifications and get inspired by the many examples presented hereafter. # noqa: E501
The version of the OpenAPI document: 2.0.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import atexit
import datetime
from dateutil.parser import parse
import json
import mimetypes
from multiprocessing.pool import ThreadPool
import os
import re
import tempfile
# python 2 and python 3 compatibility library
import six
from six.moves.urllib.parse import quote
from signing_today_client.configuration import Configuration
import signing_today_client.models
from signing_today_client import rest
from signing_today_client.exceptions import ApiValueError
class ApiClient(object):
"""Generic API client for OpenAPI client library builds.
OpenAPI generic API client. This client handles the client-
server communication, and is invariant across implementations. Specifics of
the methods and models for each application are generated from the OpenAPI
templates.
NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
:param configuration: .Configuration object for this client
:param header_name: a header to pass when making calls to the API.
:param header_value: a header value to pass when making calls to
the API.
:param cookie: a cookie to include in the header when making calls
to the API
:param pool_threads: The number of threads to use for async requests
to the API. More threads means more concurrent API requests.
"""
PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types
NATIVE_TYPES_MAPPING = {
'int': int,
'long': int if six.PY3 else long, # noqa: F821
'float': float,
'str': str,
'bool': bool,
'date': datetime.date,
'datetime': datetime.datetime,
'object': object,
}
_pool = None
def __init__(self, configuration=None, header_name=None, header_value=None,
cookie=None, pool_threads=1):
if configuration is None:
configuration = Configuration()
self.configuration = configuration
self.pool_threads = pool_threads
self.rest_client = rest.RESTClientObject(configuration)
self.default_headers = {}
if header_name is not None:
self.default_headers[header_name] = header_value
self.cookie = cookie
# Set default User-Agent.
self.user_agent = 'OpenAPI-Generator/1.0.0/python'
self.client_side_validation = configuration.client_side_validation
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def close(self):
if self._pool:
self._pool.close()
self._pool.join()
self._pool = None
if hasattr(atexit, 'unregister'):
atexit.unregister(self.close)
@property
def pool(self):
"""Create thread pool on first request
avoids instantiating unused threadpool for blocking clients.
"""
if self._pool is None:
atexit.register(self.close)
self._pool = ThreadPool(self.pool_threads)
return self._pool
@property
def user_agent(self):
"""User agent for this API client"""
return self.default_headers['User-Agent']
@user_agent.setter
def user_agent(self, value):
self.default_headers['User-Agent'] = value
def set_default_header(self, header_name, header_value):
self.default_headers[header_name] = header_value
def __call_api(
self, resource_path, method, path_params=None,
query_params=None, header_params=None, body=None, post_params=None,
files=None, response_type=None, auth_settings=None,
_return_http_data_only=None, collection_formats=None,
_preload_content=True, _request_timeout=None, _host=None):
config = self.configuration
# header parameters
header_params = header_params or {}
header_params.update(self.default_headers)
if self.cookie:
header_params['Cookie'] = self.cookie
if header_params:
header_params = self.sanitize_for_serialization(header_params)
header_params = dict(self.parameters_to_tuples(header_params,
collection_formats))
# path parameters
if path_params:
path_params = self.sanitize_for_serialization(path_params)
path_params = self.parameters_to_tuples(path_params,
collection_formats)
for k, v in path_params:
# specified safe chars, encode everything
resource_path = resource_path.replace(
'{%s}' % k,
quote(str(v), safe=config.safe_chars_for_path_param)
)
# query parameters
if query_params:
query_params = self.sanitize_for_serialization(query_params)
query_params = self.parameters_to_tuples(query_params,
collection_formats)
# post parameters
if post_params or files:
post_params = post_params if post_params else []
post_params = self.sanitize_for_serialization(post_params)
post_params = self.parameters_to_tuples(post_params,
collection_formats)
post_params.extend(self.files_parameters(files))
# auth setting
self.update_params_for_auth(header_params, query_params, auth_settings)
# body
if body:
body = self.sanitize_for_serialization(body)
# request url
if _host is None:
url = self.configuration.host + resource_path
else:
# use server/host defined in path or operation instead
url = _host + resource_path
# perform request and return response
response_data = self.request(
method, url, query_params=query_params, headers=header_params,
post_params=post_params, body=body,
_preload_content=_preload_content,
_request_timeout=_request_timeout)
self.last_response = response_data
return_data = response_data
if _preload_content:
# deserialize response data
if response_type:
return_data = self.deserialize(response_data, response_type)
else:
return_data = None
if _return_http_data_only:
return (return_data)
else:
return (return_data, response_data.status,
response_data.getheaders())
def sanitize_for_serialization(self, obj):
"""Builds a JSON POST object.
If obj is None, return None.
If obj is str, int, long, float, bool, return directly.
If obj is datetime.datetime, datetime.date
convert to string in iso8601 format.
If obj is list, sanitize each element in the list.
If obj is dict, return the dict.
If obj is OpenAPI model, return the properties dict.
:param obj: The data to serialize.
:return: The serialized form of data.
"""
if obj is None:
return None
elif isinstance(obj, self.PRIMITIVE_TYPES):
return obj
elif isinstance(obj, list):
return [self.sanitize_for_serialization(sub_obj)
for sub_obj in obj]
elif isinstance(obj, tuple):
return tuple(self.sanitize_for_serialization(sub_obj)
for sub_obj in obj)
elif isinstance(obj, (datetime.datetime, datetime.date)):
return obj.isoformat()
if isinstance(obj, dict):
obj_dict = obj
else:
# Convert model obj to dict except
# attributes `openapi_types`, `attribute_map`
# and attributes which value is not None.
# Convert attribute name to json key in
# model definition for request.
obj_dict = {obj.attribute_map[attr]: getattr(obj, attr)
for attr, _ in six.iteritems(obj.openapi_types)
if getattr(obj, attr) is not None}
return {key: self.sanitize_for_serialization(val)
for key, val in six.iteritems(obj_dict)}
def deserialize(self, response, response_type):
"""Deserializes response into an object.
:param response: RESTResponse object to be deserialized.
:param response_type: class literal for
deserialized object, or string of class name.
:return: deserialized object.
"""
# handle file downloading
# save response body into a tmp file and return the instance
if response_type == "file":
return self.__deserialize_file(response)
# fetch data from response object
try:
data = json.loads(response.data)
except ValueError:
data = response.data
return self.__deserialize(data, response_type)
def __deserialize(self, data, klass):
"""Deserializes dict, list, str into an object.
:param data: dict, list or str.
:param klass: class literal, or string of class name.
:return: object.
"""
if data is None:
return None
if type(klass) == str:
if klass.startswith('list['):
sub_kls = re.match(r'list\[(.*)\]', klass).group(1)
return [self.__deserialize(sub_data, sub_kls)
for sub_data in data]
if klass.startswith('dict('):
sub_kls = re.match(r'dict\(([^,]*), (.*)\)', klass).group(2)
return {k: self.__deserialize(v, sub_kls)
for k, v in six.iteritems(data)}
# convert str to class
if klass in self.NATIVE_TYPES_MAPPING:
klass = self.NATIVE_TYPES_MAPPING[klass]
else:
klass = getattr(signing_today_client.models, klass)
if klass in self.PRIMITIVE_TYPES:
return self.__deserialize_primitive(data, klass)
elif klass == object:
return self.__deserialize_object(data)
elif klass == datetime.date:
return self.__deserialize_date(data)
elif klass == datetime.datetime:
return self.__deserialize_datetime(data)
else:
return self.__deserialize_model(data, klass)
def call_api(self, resource_path, method,
path_params=None, query_params=None, header_params=None,
body=None, post_params=None, files=None,
response_type=None, auth_settings=None, async_req=None,
_return_http_data_only=None, collection_formats=None,
_preload_content=True, _request_timeout=None, _host=None):
"""Makes the HTTP request (synchronous) and returns deserialized data.
To make an async_req request, set the async_req parameter.
:param resource_path: Path to method endpoint.
:param method: Method to call.
:param path_params: Path parameters in the url.
:param query_params: Query parameters in the url.
:param header_params: Header parameters to be
placed in the request header.
:param body: Request body.
:param post_params dict: Request post form parameters,
for `application/x-www-form-urlencoded`, `multipart/form-data`.
:param auth_settings list: Auth Settings names for the request.
:param response: Response data type.
:param files dict: key -> filename, value -> filepath,
for `multipart/form-data`.
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param collection_formats: dict of collection formats for path, query,
header, and post parameters.
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return:
If async_req parameter is True,
the request will be called asynchronously.
The method will return the request thread.
If parameter async_req is False or missing,
then the method will return the response directly.
"""
if not async_req:
return self.__call_api(resource_path, method,
path_params, query_params, header_params,
body, post_params, files,
response_type, auth_settings,
_return_http_data_only, collection_formats,
_preload_content, _request_timeout, _host)
return self.pool.apply_async(self.__call_api, (resource_path,
method, path_params,
query_params,
header_params, body,
post_params, files,
response_type,
auth_settings,
_return_http_data_only,
collection_formats,
_preload_content,
_request_timeout,
_host))
def request(self, method, url, query_params=None, headers=None,
post_params=None, body=None, _preload_content=True,
_request_timeout=None):
"""Makes the HTTP request using RESTClient."""
if method == "GET":
return self.rest_client.GET(url,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
headers=headers)
elif method == "HEAD":
return self.rest_client.HEAD(url,
query_params=query_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
headers=headers)
elif method == "OPTIONS":
return self.rest_client.OPTIONS(url,
query_params=query_params,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout)
elif method == "POST":
return self.rest_client.POST(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "PUT":
return self.rest_client.PUT(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "PATCH":
return self.rest_client.PATCH(url,
query_params=query_params,
headers=headers,
post_params=post_params,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
elif method == "DELETE":
return self.rest_client.DELETE(url,
query_params=query_params,
headers=headers,
_preload_content=_preload_content,
_request_timeout=_request_timeout,
body=body)
else:
raise ApiValueError(
"http method must be `GET`, `HEAD`, `OPTIONS`,"
" `POST`, `PATCH`, `PUT` or `DELETE`."
)
def parameters_to_tuples(self, params, collection_formats):
"""Get parameters as list of tuples, formatting collections.
:param params: Parameters as dict or list of two-tuples
:param dict collection_formats: Parameter collection formats
:return: Parameters as list of tuples, collections formatted
"""
new_params = []
if collection_formats is None:
collection_formats = {}
for k, v in six.iteritems(params) if isinstance(params, dict) else params: # noqa: E501
if k in collection_formats:
collection_format = collection_formats[k]
if collection_format == 'multi':
new_params.extend((k, value) for value in v)
else:
if collection_format == 'ssv':
delimiter = ' '
elif collection_format == 'tsv':
delimiter = '\t'
elif collection_format == 'pipes':
delimiter = '|'
else: # csv is the default
delimiter = ','
new_params.append(
(k, delimiter.join(str(value) for value in v)))
else:
new_params.append((k, v))
return new_params
def files_parameters(self, files=None):
"""Builds form parameters.
:param files: File parameters.
:return: Form parameters with files.
"""
params = []
if files:
for k, v in six.iteritems(files):
if not v:
continue
file_names = v if type(v) is list else [v]
for n in file_names:
with open(n, 'rb') as f:
filename = os.path.basename(f.name)
filedata = f.read()
mimetype = (mimetypes.guess_type(filename)[0] or
'application/octet-stream')
params.append(
tuple([k, tuple([filename, filedata, mimetype])]))
return params
def select_header_accept(self, accepts):
"""Returns `Accept` based on an array of accepts provided.
:param accepts: List of headers.
:return: Accept (e.g. application/json).
"""
if not accepts:
return
accepts = [x.lower() for x in accepts]
if 'application/json' in accepts:
return 'application/json'
else:
return ', '.join(accepts)
def select_header_content_type(self, content_types):
"""Returns `Content-Type` based on an array of content_types provided.
:param content_types: List of content-types.
:return: Content-Type (e.g. application/json).
"""
if not content_types:
return 'application/json'
content_types = [x.lower() for x in content_types]
if 'application/json' in content_types or '*/*' in content_types:
return 'application/json'
else:
return content_types[0]
def update_params_for_auth(self, headers, querys, auth_settings):
"""Updates header and query params based on authentication setting.
:param headers: Header parameters dict to be updated.
:param querys: Query parameters tuple list to be updated.
:param auth_settings: Authentication setting identifiers list.
"""
if not auth_settings:
return
for auth in auth_settings:
auth_setting = self.configuration.auth_settings().get(auth)
if auth_setting:
if auth_setting['in'] == 'cookie':
headers['Cookie'] = auth_setting['value']
elif auth_setting['in'] == 'header':
headers[auth_setting['key']] = auth_setting['value']
elif auth_setting['in'] == 'query':
querys.append((auth_setting['key'], auth_setting['value']))
else:
raise ApiValueError(
'Authentication token must be in `query` or `header`'
)
def __deserialize_file(self, response):
"""Deserializes body to file
Saves response body into a file in a temporary folder,
using the filename from the `Content-Disposition` header if provided.
:param response: RESTResponse.
:return: file path.
"""
fd, path = tempfile.mkstemp(dir=self.configuration.temp_folder_path)
os.close(fd)
os.remove(path)
content_disposition = response.getheader("Content-Disposition")
if content_disposition:
filename = re.search(r'filename=[\'"]?([^\'"\s]+)[\'"]?',
content_disposition).group(1)
path = os.path.join(os.path.dirname(path), filename)
with open(path, "wb") as f:
f.write(response.data)
return path
def __deserialize_primitive(self, data, klass):
"""Deserializes string to primitive type.
:param data: str.
:param klass: class literal.
:return: int, long, float, str, bool.
"""
try:
return klass(data)
except UnicodeEncodeError:
return six.text_type(data)
except TypeError:
return data
def __deserialize_object(self, value):
"""Return an original value.
:return: object.
"""
return value
def __deserialize_date(self, string):
"""Deserializes string to date.
:param string: str.
:return: date.
"""
try:
return parse(string).date()
except ImportError:
return string
except ValueError:
raise rest.ApiException(
status=0,
reason="Failed to parse `{0}` as date object".format(string)
)
def __deserialize_datetime(self, string):
"""Deserializes string to datetime.
The string should be in iso8601 datetime format.
:param string: str.
:return: datetime.
"""
try:
return parse(string)
except ImportError:
return string
except ValueError:
raise rest.ApiException(
status=0,
reason=(
"Failed to parse `{0}` as datetime object"
.format(string)
)
)
def __deserialize_model(self, data, klass):
"""Deserializes list or dict to model.
:param data: dict, list.
:param klass: class literal.
:return: model object.
"""
if not klass.openapi_types and not hasattr(klass,
'get_real_child_model'):
return data
kwargs = {}
if (data is not None and
klass.openapi_types is not None and
isinstance(data, (list, dict))):
for attr, attr_type in six.iteritems(klass.openapi_types):
if klass.attribute_map[attr] in data:
value = data[klass.attribute_map[attr]]
kwargs[attr] = self.__deserialize(value, attr_type)
instance = klass(**kwargs)
if hasattr(instance, 'get_real_child_model'):
klass_name = instance.get_real_child_model(data)
if klass_name:
instance = self.__deserialize(data, klass_name)
return instance
| [
"[email protected]"
] | |
93de9a042164784e1ab3c9d2675bfade0049d3b5 | f84c51d8159e973913f5c537f08d285bdb3630e2 | /neural_sp/bin/args_asr.py | 54e0a69463158cfa3a6d65a4044267be461a7bd4 | [
"Apache-2.0"
] | permissive | lahiruts/neural_sp | d302ce0479bcbe813639c531f460d55a6c9c2a65 | 5b314ece12081db8b423d4dc32ce33f4228ff37b | refs/heads/master | 2022-11-20T06:45:34.891542 | 2020-07-22T09:20:55 | 2020-07-22T09:20:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 27,139 | py | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
# Copyright 2018 Kyoto University (Hirofumi Inaguma)
# Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
"""Args option for the ASR task."""
import configargparse
from distutils.util import strtobool
import os
from neural_sp.bin.train_utils import load_config
def parse_args_train(input_args):
parser = build_parser()
user_args, _ = parser.parse_known_args(input_args)
# register module specific arguments
parser = register_args_encoder(parser, user_args)
user_args, _ = parser.parse_known_args(input_args) # to avoid args conflict
parser = register_args_decoder(parser, user_args)
user_args = parser.parse_args()
return user_args
def parse_args_eval(input_args):
parser = build_parser()
user_args, _ = parser.parse_known_args(input_args)
# Load a yaml config file
dir_name = os.path.dirname(user_args.recog_model[0])
conf_train = load_config(os.path.join(dir_name, 'conf.yml'))
# register module specific arguments
user_args.enc_type = conf_train['enc_type']
parser = register_args_encoder(parser, user_args)
user_args, _ = parser.parse_known_args(input_args) # to avoid args conflict
user_args.dec_type = conf_train['dec_type'] # to avoid overlap
parser = register_args_decoder(parser, user_args)
user_args = parser.parse_args()
# NOTE: If new args are registered after training the model, the default value will be set
# Overwrite config
for k, v in conf_train.items():
if 'recog' not in k:
setattr(user_args, k, v)
return user_args, vars(user_args), dir_name
def register_args_encoder(parser, args):
if args.enc_type == 'tds':
from neural_sp.models.seq2seq.encoders.tds import TDSEncoder as module
elif args.enc_type == 'gated_conv':
from neural_sp.models.seq2seq.encoders.gated_conv import GatedConvEncoder as module
elif 'transformer' in args.enc_type:
from neural_sp.models.seq2seq.encoders.transformer import TransformerEncoder as module
elif 'conformer' in args.enc_type:
from neural_sp.models.seq2seq.encoders.conformer import ConformerEncoder as module
else:
from neural_sp.models.seq2seq.encoders.rnn import RNNEncoder as module
if hasattr(module, 'add_args'):
parser = module.add_args(parser, args)
return parser
def register_args_decoder(parser, args):
if args.dec_type in ['transformer', 'transformer_xl']:
from neural_sp.models.seq2seq.decoders.transformer import TransformerDecoder as module
elif args.dec_type in ['lstm_transducer', 'gru_transducer']:
from neural_sp.models.seq2seq.decoders.rnn_transducer import RNNTransducer as module
elif args.dec_type == 'asg':
from neural_sp.models.seq2seq.decoders.asg import ASGDecoder as module
else:
from neural_sp.models.seq2seq.decoders.las import RNNDecoder as module
if hasattr(module, 'add_args'):
parser = module.add_args(parser, args)
return parser
def build_parser():
parser = configargparse.ArgumentParser(
config_file_parser_class=configargparse.YAMLConfigFileParser,
formatter_class=configargparse.ArgumentDefaultsHelpFormatter)
parser.add('--config', is_config_file=True, help='config file path')
parser.add('--config2', is_config_file=True, default=False, nargs='?',
help='another config file path to overwrite --config')
# general
parser.add_argument('--corpus', type=str,
help='corpus name')
parser.add_argument('--n_gpus', type=int, default=1,
help='number of GPUs (0 indicates CPU)')
parser.add_argument('--cudnn_benchmark', type=strtobool, default=True,
help='use CuDNN benchmark mode')
parser.add_argument("--train_dtype", default="float32",
choices=["float16", "float32", "float64", "O0", "O1", "O2", "O3"],
help="Data type for training")
parser.add_argument('--model_save_dir', type=str, default=False,
help='directory to save a model')
parser.add_argument('--resume', type=str, default=False, nargs='?',
help='model path to resume training')
parser.add_argument('--job_name', type=str, default=False,
help='job name')
parser.add_argument('--stdout', type=strtobool, default=False,
help='print to standard output during training')
# dataset
parser.add_argument('--train_set', type=str,
help='tsv file path for the training set')
parser.add_argument('--train_set_sub1', type=str, default=False,
help='tsv file path for the training set for the 1st auxiliary task')
parser.add_argument('--train_set_sub2', type=str, default=False,
help='tsv file path for the training set for the 2nd auxiliary task')
parser.add_argument('--dev_set', type=str,
help='tsv file path for the development set')
parser.add_argument('--dev_set_sub1', type=str, default=False,
help='tsv file path for the development set for the 1st auxiliary task')
parser.add_argument('--dev_set_sub2', type=str, default=False,
help='tsv file path for the development set for the 2nd auxiliary task')
parser.add_argument('--eval_sets', type=str, default=[], nargs='+',
help='tsv file paths for the evaluation sets')
parser.add_argument('--nlsyms', type=str, default=False, nargs='?',
help='non-linguistic symbols file path')
parser.add_argument('--dict', type=str,
help='dictionary file path')
parser.add_argument('--dict_sub1', type=str, default=False,
help='dictionary file path for the 1st auxiliary task')
parser.add_argument('--dict_sub2', type=str, default=False,
help='dictionary file path for the 2nd auxiliary task')
parser.add_argument('--unit', type=str, default='wp',
choices=['word', 'wp', 'char', 'phone', 'word_char', 'char_space'],
help='output unit for the main task')
parser.add_argument('--unit_sub1', type=str, default=False,
choices=['wp', 'char', 'phone'],
help='output unit for the 1st auxiliary task')
parser.add_argument('--unit_sub2', type=str, default=False,
choices=['wp', 'char', 'phone'],
help='output unit for the 2nd auxiliary task')
parser.add_argument('--wp_model', type=str, default=False, nargs='?',
help='wordpiece model path for the main task')
parser.add_argument('--wp_model_sub1', type=str, default=False, nargs='?',
help='wordpiece model path for the 1st auxiliary task')
parser.add_argument('--wp_model_sub2', type=str, default=False, nargs='?',
help='wordpiece model path for the 2nd auxiliary task')
# features
parser.add_argument('--input_type', type=str, default='speech',
choices=['speech', 'text'],
help='type of input features')
parser.add_argument('--n_splices', type=int, default=1,
help='number of input frames to splice (both for left and right frames)')
parser.add_argument('--n_stacks', type=int, default=1,
help='number of input frames to stack (frame stacking)')
parser.add_argument('--n_skips', type=int, default=1,
help='number of input frames to skip')
parser.add_argument('--max_n_frames', type=int, default=2000,
help='maximum number of input frames')
parser.add_argument('--min_n_frames', type=int, default=40,
help='minimum number of input frames')
parser.add_argument('--dynamic_batching', type=strtobool, default=True,
help='')
parser.add_argument('--input_noise_std', type=float, default=0,
help='standard deviation of Gaussian noise to input features')
parser.add_argument('--weight_noise_std', type=float, default=0,
help='standard deviation of Gaussian noise to weight parameters')
parser.add_argument('--sequence_summary_network', type=strtobool, default=False,
help='use sequence summary network')
# topology (encoder)
parser.add_argument('--enc_type', type=str, default='blstm',
choices=['blstm', 'lstm', 'bgru', 'gru',
'conv_blstm', 'conv_lstm', 'conv_bgru', 'conv_gru',
'transformer', 'conv_transformer',
'conformer', 'conv_conformer',
'tds', 'gated_conv'],
help='type of the encoder')
parser.add_argument('--enc_n_layers', type=int, default=5,
help='number of encoder RNN layers')
parser.add_argument('--enc_n_layers_sub1', type=int, default=0,
help='number of encoder RNN layers in the 1st auxiliary task')
parser.add_argument('--enc_n_layers_sub2', type=int, default=0,
help='number of encoder RNN layers in the 2nd auxiliary task')
parser.add_argument('--subsample', type=str, default="1_1_1_1_1",
help='delimited list input')
parser.add_argument('--subsample_type', type=str, default='drop',
choices=['drop', 'concat', 'max_pool', '1dconv'],
help='type of subsampling in the encoder')
# topology (decoder)
parser.add_argument('--dec_type', type=str, default='lstm',
choices=['lstm', 'gru', 'transformer', 'transformer_xl',
'lstm_transducer', 'gru_transducer', 'transformer_transducer',
'asg'],
help='type of the decoder')
parser.add_argument('--dec_n_layers', type=int, default=1,
help='number of decoder RNN layers')
parser.add_argument('--tie_embedding', type=strtobool, default=False, nargs='?',
help='tie weights between an embedding matrix and a linear layer before the softmax layer')
parser.add_argument('--ctc_fc_list', type=str, default="", nargs='?',
help='')
parser.add_argument('--ctc_fc_list_sub1', type=str, default="", nargs='?',
help='')
parser.add_argument('--ctc_fc_list_sub2', type=str, default="", nargs='?',
help='')
# optimization
parser.add_argument('--batch_size', type=int, default=50,
help='mini-batch size')
parser.add_argument('--optimizer', type=str, default='adam',
choices=['adam', 'adadelta', 'adagrad', 'sgd', 'momentum', 'nesterov', 'noam'],
help='type of optimizer')
parser.add_argument('--n_epochs', type=int, default=25,
help='number of epochs to train the model')
parser.add_argument('--convert_to_sgd_epoch', type=int, default=100,
help='epoch to converto to SGD fine-tuning')
parser.add_argument('--print_step', type=int, default=200,
help='print log per this value')
parser.add_argument('--metric', type=str, default='edit_distance',
choices=['edit_distance', 'loss', 'accuracy', 'ppl', 'bleu', 'mse'],
help='metric for evaluation during training')
parser.add_argument('--lr', type=float, default=1e-3,
help='initial learning rate')
parser.add_argument('--lr_factor', type=float, default=10.0,
help='factor of learning rate for Transformer')
parser.add_argument('--eps', type=float, default=1e-6,
help='epsilon parameter for Adadelta optimizer')
parser.add_argument('--lr_decay_type', type=str, default='always',
choices=['always', 'metric', 'warmup'],
help='type of learning rate decay')
parser.add_argument('--lr_decay_start_epoch', type=int, default=10,
help='epoch to start to decay learning rate')
parser.add_argument('--lr_decay_rate', type=float, default=0.9,
help='decay rate of learning rate')
parser.add_argument('--lr_decay_patient_n_epochs', type=int, default=0,
help='number of epochs to tolerate learning rate decay when validation perfomance is not improved')
parser.add_argument('--early_stop_patient_n_epochs', type=int, default=5,
help='number of epochs to tolerate stopping training when validation perfomance is not improved')
parser.add_argument('--sort_stop_epoch', type=int, default=10000,
help='epoch to stop soring utterances by length')
parser.add_argument('--sort_short2long', type=strtobool, default=True,
help='sort utterances in the ascending order')
parser.add_argument('--shuffle_bucket', type=strtobool, default=False,
help='gather the similar length of utterances and shuffle them')
parser.add_argument('--eval_start_epoch', type=int, default=1,
help='first epoch to start evalaution')
parser.add_argument('--warmup_start_lr', type=float, default=0,
help='initial learning rate for learning rate warm up')
parser.add_argument('--warmup_n_steps', type=int, default=0,
help='number of steps to warm up learing rate')
parser.add_argument('--accum_grad_n_steps', type=int, default=1,
help='total number of steps to accumulate gradients')
# initialization
parser.add_argument('--param_init', type=float, default=0.1,
help='')
parser.add_argument('--asr_init', type=str, default=False, nargs='?',
help='pre-trained seq2seq model path')
parser.add_argument('--asr_init_enc_only', type=strtobool, default=False,
help='Initialize the encoder only')
parser.add_argument('--freeze_encoder', type=strtobool, default=False,
help='freeze the encoder parameter')
# regularization
parser.add_argument('--clip_grad_norm', type=float, default=5.0,
help='')
parser.add_argument('--dropout_in', type=float, default=0.0,
help='dropout probability for the input')
parser.add_argument('--dropout_enc', type=float, default=0.0,
help='dropout probability for the encoder')
parser.add_argument('--dropout_dec', type=float, default=0.0,
help='dropout probability for the decoder')
parser.add_argument('--dropout_emb', type=float, default=0.0,
help='dropout probability for the embedding')
parser.add_argument('--dropout_att', type=float, default=0.0,
help='dropout probability for the attention weights')
parser.add_argument('--weight_decay', type=float, default=0,
help='weight decay parameter')
parser.add_argument('--ss_prob', type=float, default=0.0,
help='probability of scheduled sampling')
parser.add_argument('--ss_type', type=str, default='constant',
choices=['constant', 'ramp'],
help='type of scheduled sampling')
parser.add_argument('--lsm_prob', type=float, default=0.0,
help='probability of label smoothing')
parser.add_argument('--ctc_lsm_prob', type=float, default=0.0,
help='probability of label smoothing for CTC')
# SpecAugment
parser.add_argument('--freq_width', type=int, default=27,
help='width of frequency mask for SpecAugment')
parser.add_argument('--n_freq_masks', type=int, default=0,
help='number of frequency masks for SpecAugment')
parser.add_argument('--time_width', type=int, default=100,
help='width of time mask for SpecAugment')
parser.add_argument('--n_time_masks', type=int, default=0,
help='number of time masks for SpecAugment')
parser.add_argument('--time_width_upper', type=float, default=1.0,
help='')
parser.add_argument('--adaptive_number_ratio', type=float, default=0.0,
help='adaptive multiplicity ratio for time masking')
parser.add_argument('--adaptive_size_ratio', type=float, default=0.0,
help='adaptive size ratio for time masking')
parser.add_argument('--max_n_time_masks', type=int, default=20,
help='maximum number of time masking')
# MTL
parser.add_argument('--ctc_weight', type=float, default=0.0,
help='CTC loss weight for the main task')
parser.add_argument('--ctc_weight_sub1', type=float, default=0.0,
help='CTC loss weight for the 1st auxiliary task')
parser.add_argument('--ctc_weight_sub2', type=float, default=0.0,
help='CTC loss weight for the 2nd auxiliary task')
parser.add_argument('--sub1_weight', type=float, default=0.0,
help='total loss weight for the 1st auxiliary task')
parser.add_argument('--sub2_weight', type=float, default=0.0,
help='total loss weight for the 2nd auxiliary task')
parser.add_argument('--mtl_per_batch', type=strtobool, default=False, nargs='?',
help='change mini-batch per task')
parser.add_argument('--task_specific_layer', type=strtobool, default=False, nargs='?',
help='insert a task-specific encoder layer per task')
# foroward-backward
parser.add_argument('--bwd_weight', type=float, default=0.0,
help='cross etnropy loss weight for the backward decoder in the main task')
# cold fusion, LM initialization
parser.add_argument('--external_lm', type=str, default=False, nargs='?',
help='LM path')
parser.add_argument('--lm_fusion', type=str, default='',
choices=['', 'cold', 'cold_prob', 'deep', 'cold_attention'],
help='type of LM fusion')
parser.add_argument('--lm_init', type=strtobool, default=False,
help='initialize the decoder with the external LM')
# contextualization
parser.add_argument('--discourse_aware', type=strtobool, default=False, nargs='?',
help='carry over the last decoder state to the initial state in the next utterance')
# MBR
parser.add_argument('--mbr_training', type=strtobool, default=False,
help='Minimum Bayes Risk (MBR) training')
parser.add_argument('--mbr_ce_weight', type=float, default=0.01,
help='MBR loss weight for the main task')
parser.add_argument('--mbr_nbest', type=int, default=4,
help='N-best for MBR training')
parser.add_argument('--mbr_softmax_smoothing', type=float, default=0.8,
help='softmax smoothing (beta) for MBR training')
# TransformerXL
parser.add_argument('--bptt', type=int, default=0,
help='number of tokens to truncate in TransformerXL decoder during training')
parser.add_argument('--mem_len', type=int, default=0,
help='number of tokens for memory in TransformerXL decoder during training')
# distillation related
parser.add_argument('--teacher', default=False, nargs='?',
help='Teacher ASR model for knowledge distillation')
parser.add_argument('--teacher_lm', default=False, nargs='?',
help='Teacher LM for knowledge distillation')
parser.add_argument('--distillation_weight', type=float, default=0.1,
help='soft label weight for knowledge distillation')
# special label
parser.add_argument('--replace_sos', type=strtobool, default=False,
help='')
# decoding parameters
parser.add_argument('--recog_stdout', type=strtobool, default=False,
help='print to standard output during evaluation')
parser.add_argument('--recog_n_gpus', type=int, default=0,
help='number of GPUs (0 indicates CPU)')
parser.add_argument('--recog_sets', type=str, default=[], nargs='+',
help='tsv file paths for the evaluation sets')
parser.add_argument('--recog_first_n_utt', type=int, default=-1,
help='recognize the first N utterances for quick evalaution')
parser.add_argument('--recog_model', type=str, default=False, nargs='+',
help='model path')
parser.add_argument('--recog_model_bwd', type=str, default=False, nargs='?',
help='model path in the reverse direction')
parser.add_argument('--recog_dir', type=str, default=False,
help='directory to save decoding results')
parser.add_argument('--recog_unit', type=str, default=False, nargs='?',
choices=['word', 'wp', 'char', 'phone', 'word_char', 'char_space'],
help='')
parser.add_argument('--recog_metric', type=str, default='edit_distance',
choices=['edit_distance', 'loss', 'accuracy', 'ppl', 'bleu'],
help='metric for evaluation')
parser.add_argument('--recog_oracle', type=strtobool, default=False,
help='recognize by teacher-forcing')
parser.add_argument('--recog_batch_size', type=int, default=1,
help='size of mini-batch in evaluation')
parser.add_argument('--recog_beam_width', type=int, default=1,
help='size of beam')
parser.add_argument('--recog_max_len_ratio', type=float, default=1.0,
help='')
parser.add_argument('--recog_min_len_ratio', type=float, default=0.0,
help='')
parser.add_argument('--recog_length_penalty', type=float, default=0.0,
help='length penalty')
parser.add_argument('--recog_length_norm', type=strtobool, default=False, nargs='?',
help='normalize score by hypothesis length')
parser.add_argument('--recog_coverage_penalty', type=float, default=0.0,
help='coverage penalty')
parser.add_argument('--recog_coverage_threshold', type=float, default=0.0,
help='coverage threshold')
parser.add_argument('--recog_gnmt_decoding', type=strtobool, default=False, nargs='?',
help='adopt Google NMT beam search decoding')
parser.add_argument('--recog_eos_threshold', type=float, default=1.5,
help='threshold for emitting a EOS token')
parser.add_argument('--recog_lm_weight', type=float, default=0.0,
help='weight of fisrt-path LM score')
parser.add_argument('--recog_lm_second_weight', type=float, default=0.0,
help='weight of second-path LM score')
parser.add_argument('--recog_lm_bwd_weight', type=float, default=0.0,
help='weight of second-path bakward LM score. \
First-pass backward LM in case of synchronous bidirectional decoding.')
parser.add_argument('--recog_ctc_weight', type=float, default=0.0,
help='weight of CTC score')
parser.add_argument('--recog_lm', type=str, default=False, nargs='?',
help='path to first path LM for shallow fusion')
parser.add_argument('--recog_lm_second', type=str, default=False, nargs='?',
help='path to second path LM for rescoring')
parser.add_argument('--recog_lm_bwd', type=str, default=False, nargs='?',
help='path to second path LM in the reverse direction for rescoring')
parser.add_argument('--recog_resolving_unk', type=strtobool, default=False,
help='resolving UNK for the word-based model')
parser.add_argument('--recog_fwd_bwd_attention', type=strtobool, default=False,
help='forward-backward attention decoding')
parser.add_argument('--recog_bwd_attention', type=strtobool, default=False,
help='backward attention decoding')
parser.add_argument('--recog_reverse_lm_rescoring', type=strtobool, default=False,
help='rescore with another LM in the reverse direction')
parser.add_argument('--recog_asr_state_carry_over', type=strtobool, default=False,
help='carry over ASR decoder state')
parser.add_argument('--recog_lm_state_carry_over', type=strtobool, default=False,
help='carry over LM state')
parser.add_argument('--recog_softmax_smoothing', type=float, default=1.0,
help='softmax smoothing (beta) for diverse hypothesis generation')
parser.add_argument('--recog_wordlm', type=strtobool, default=False,
help='')
parser.add_argument('--recog_n_average', type=int, default=1,
help='number of models for the model averaging of Transformer')
parser.add_argument('--recog_streaming', type=strtobool, default=False,
help='streaming decoding')
parser.add_argument('--recog_chunk_sync', type=strtobool, default=False,
help='chunk-synchronous beam search decoding for MoChA')
parser.add_argument('--recog_ctc_spike_forced_decoding', type=strtobool, default=False,
help='force MoChA to generate tokens corresponding to CTC spikes')
parser.add_argument('--recog_ctc_vad', type=strtobool, default=True,
help='')
parser.add_argument('--recog_ctc_vad_blank_threshold', type=int, default=40,
help='')
parser.add_argument('--recog_ctc_vad_spike_threshold', type=float, default=0.1,
help='')
parser.add_argument('--recog_ctc_vad_n_accum_frames', type=int, default=4000,
help='')
parser.add_argument('--recog_mma_delay_threshold', type=int, default=-1,
help='delay threshold for MMA decoder')
parser.add_argument('--recog_mem_len', type=int, default=0,
help='number of tokens for memory in TransformerXL decoder during evaluation')
return parser
| [
"[email protected]"
] | |
b89e6024ba7fcd2978bed43342381eaea6996fb3 | 5ebfced62f59052560c6adf89bfd2f249877cc75 | /webcomics/series/urls.py | 46b8c581e3ef21673277aa776913f4bad5bfbd5c | [] | no_license | lumenwrites/webcomics | 537c9bd0337ebd087dacdee7b72797b658481f8c | 34200eaf19021147c561bf140a685e398156589e | refs/heads/master | 2021-06-10T17:12:50.317113 | 2017-02-19T09:28:57 | 2017-02-19T09:28:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 882 | py | from django.conf.urls import url
from . import views
from posts.views import SeriesFeed
urlpatterns = [
# url(r'^$', views.BrowseView.as_view(), name='post-list'),
url(r'^create-series/$', views.SeriesCreate.as_view(), name='series-create'),
url(r'^series/(?P<slug>[^\.]+)/edit$', views.SeriesEdit.as_view()),
url(r'^series/(?P<slug>[^\.]+)/delete$', views.series_delete),
# url(r'^browse/$', views.BrowseView.as_view(), name='post-list'),
url(r'^series/(?P<slug>[^\.]+)/subscribe', views.subscribe),
url(r'^series/(?P<slug>[^\.]+)/unsubscribe', views.unsubscribe),
url(r'^series/(?P<slug>[^\.]+)/feed/atom/$', SeriesFeed()),
url(r'^series/(?P<slug>[^\.]+)$', views.SeriesView.as_view(), name='series-detail'),
url(r'^orangemind$', views.SeriesView.as_view(), {'slug': 'orangemind'}, name='series-detail'),
]
| [
"[email protected]"
] | |
daae7ab1b7ac6d998eca5a559c61ec45f2d7095e | 25985aeeee54373d26a164e4cc6a014770e3ebf3 | /windows/w3af/w3af/core/data/nltk_wrapper/.svn/text-base/nltk_wrapper.py.svn-base | 3f652ee04b353c653d75d4761f71621362d73520 | [] | no_license | sui84/tools | 4b750dae90940fbe3a226cba72dc071d8fb88b7c | 651cc08eb50199ce1044c684dbf714ea26df6432 | refs/heads/master | 2021-01-22T19:22:26.964580 | 2017-08-20T15:23:38 | 2017-08-20T15:23:38 | 100,774,276 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,047 | '''
nltk_wrapper.py
Copyright 2011 Andres Riancho
This file is part of w3af, w3af.sourceforge.net .
w3af is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation version 2 of the License.
w3af is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with w3af; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
'''
from nltk.corpus.util import LazyCorpusLoader
from nltk.data import ZipFilePathPointer
from nltk.corpus.reader.wordnet import WordNetCorpusReader
import os
class wordnet_loader(LazyCorpusLoader):
def __init__(self, name, reader_cls, *args, **kwargs):
from nltk.corpus.reader.api import CorpusReader
assert issubclass(reader_cls, CorpusReader)
self.__name = self.__name__ = name
self.__reader_cls = reader_cls
self.__args = args
self.__kwargs = kwargs
def __load(self):
# Find the corpus root directory.
zip_location = os.path.join('plugins', 'discovery', 'wordnet','wordnet.zip')
root = ZipFilePathPointer(zip_location, 'wordnet/')
# Load the corpus.
corpus = self.__reader_cls(root, *self.__args, **self.__kwargs)
# This is where the magic happens! Transform ourselves into
# the corpus by modifying our own __dict__ and __class__ to
# match that of the corpus.
self.__dict__ = corpus.__dict__
self.__class__ = corpus.__class__
def __getattr__(self, attr):
self.__load()
# This looks circular, but its not, since __load() changes our
# __class__ to something new:
return getattr(self, attr)
wn = wordnet_loader('wordnet', WordNetCorpusReader)
| [
"[email protected]"
] | ||
214374daa226d99e5073ab7b542cbb0a073ca027 | fce6762c17fc81009af226f71ca32d2dc8227beb | /Section 4 Matrix multiplications.py | 952e60e048ad9f035f59866a9b471ae7989ef640 | [] | no_license | kuangzijian/Linear-Algebra | 3f9599ef282283dfc6bd49c0c97327a8fa31e671 | 94a872502ff570f04d61cb7bf1db653681f403c3 | refs/heads/master | 2022-11-04T07:05:55.272865 | 2019-07-31T05:32:42 | 2019-07-31T05:32:42 | 196,622,918 | 0 | 1 | null | 2022-10-29T19:00:25 | 2019-07-12T17:57:11 | Python | UTF-8 | Python | false | false | 7,048 | py | import numpy as np
import matplotlib.pyplot as plt
import math
from sympy import *
#Standard matrix multiplication, parts 1 & 2
## rules for multiplication validity
m = 4
n = 3
k = 6
# make some matrices
A = np.random.randn(m,n)
B = np.random.randn(n,k)
C = np.random.randn(m,k)
# test which multiplications are valid.
np.matmul(A,B)
#np.matmul(A,A)
np.matmul(np.matrix.transpose(A),C)
np.matmul(B,np.matrix.transpose(B))
np.matmul(np.matrix.transpose(B),B)
#np.matmul(B,C)
#np.matmul(C,B)
#np.matmul(np.matrix.transpose(C),B)
np.matmul(C,np.matrix.transpose(B))
#Code challenge: matrix multiplication by layering
A = np.abs(np.round(5*np.random.randn(4,2)))
B = np.abs(np.round(5*np.random.randn(2,3)))
print(A)
print(B)
r1 = 0
for i in range(0, len(B)):
r1 = r1 + np.outer(A[:,i], B[i])
print(A[:,i])
print(B[i])
print(r1)
print(np.matmul(A, B))
#Order-of-operations on matrices
n = 2
L = np.random.randn(n,n)
I = np.random.randn(n,n)
V = np.random.randn(n,n)
E = np.random.randn(n,n)
# result of "forward" multiplication and then transpose
res1 = np.matrix.transpose( L @ I @ V @ E )
# result of "flipped" multiplication of transposed matrices
res2 = np.matrix.transpose(E) @ np.matrix.transpose(V) @ np.matrix.transpose(I) @ np.matrix.transpose(L)
# test equality by subtracting (ignore possible computer rounding errors)
res1-res2
#Matrix-vector multiplication
# number of elements
m = 4
# create matrices
N = np.round( 10*np.random.randn(m,m) )
S = np.round( np.matrix.transpose(N)*N/m**2 ) # scaled symmetric
# and vector
w = np.array([-1, 0, 1, 2])
print(S)
print(w)
print(N)
print("with symmetric matrix")
# NOTE: The @ symbol for matrix multiplication is relatively new to Python, a@b is the same as numpy.dot or a.dot(b)
print(S@w) # 1
print(np.matrix.transpose(S@w)) # 2
print(w@S) # 3
print(np.matrix.transpose(w)@np.matrix.transpose(S)) # 4
print(np.matrix.transpose(w)@S) # 5
print("with nonsymmetric matrix")
print(N@w) # 1
print(np.matrix.transpose(N@w)) # 2
print(w@N) # 3
print(np.matrix.transpose(w)@np.matrix.transpose(N)) # 4
print(np.matrix.transpose(w)@N) # 5
#2D transformation matrices
# 2D input vector
v = np.array([ 3, -2 ])
# 2x2 transformation matrix
A = np.array([ [1,-1], [2,1] ])
# output vector is Av (convert v to column)
w = [email protected](v)
# plot them
plt.plot([0,v[0]],[0,v[1]],label='v')
plt.plot([0,w[0]],[0,w[1]],label='Av')
plt.grid()
plt.axis((-6, 6, -6, 6))
plt.legend()
plt.title('Rotation + stretching')
plt.show()
## pure rotation
# 2D input vector
v = np.array([ 3, -2 ])
# 2x2 rotation matrix
th = np.pi/30
A = np.array([ [math.cos(th),-math.sin(th)], [math.sin(th),math.cos(th)] ])
# output vector is Av (convert v to column)
w = [email protected](v)
# plot them
plt.plot([0,v[0]],[0,v[1]],label='v')
plt.plot([0,w[0]],[0,w[1]],label='Av')
plt.grid()
plt.axis((-4, 4, -4, 4))
plt.legend()
plt.title('Pure rotation')
plt.show()
#code challenge: Pure and impure rotation matrices
v = np.array([ 3, -2 ])
# 2x2 rotation matrix
ths = np.linspace(0, 2*np.pi,100)
vecmags = np.zeros([len(ths),2])
for i in range(0, len(ths)):
th = ths[i]
#inpure transformation matrix
A1 = np.array([ [2*math.cos(th),-math.sin(th)], [math.sin(th),math.cos(th)] ])
#pure transformation matrix
A2 = np.array([ [math.cos(th),-math.sin(th)], [math.sin(th),math.cos(th)] ])
# output vector is Av (convert v to column)
vecmags[i, 0] = np.linalg.norm(A1 @ v)
vecmags[i, 1] = np.linalg.norm(A2 @ v)
# plot them
plt.plot(ths,vecmags)
plt.grid()
plt.legend(["inpure transformation","pure transformation matrix"])
plt.title('Pure and impure rotation matrices')
plt.show()
#Additive and multiplicative matrix identities
# size of matrices
n = 4
A = np.round( 10*np.random.randn(n,n) )
I = np.eye(n,n)
Z = np.zeros((n,n))
# test both identities
np.array_equal( A@I , A )
np.array_equal( A , A@I )
np.array_equal( A , A+I )
np.array_equal( A , A+I )
np.array_equal( A+Z , A@I )
#Additive and multiplicative symmetric matrices
## the additive method
# specify sizes
m = 5
n = 5
# create matrices
A = np.random.randn(m,n)
S = ( A + np.matrix.transpose(A) )/2
# A symmetric matrix minus its transpose should be all zeros
print( S-np.matrix.transpose(S) )
## the multiplicative method
# specify sizes
m = 5
n = 3
# create matrices
A = np.random.randn(m,n)
AtA = np.matrix.transpose(A)@A
AAt = [email protected](A)
# first, show that they are square
print( AtA.shape )
print( AAt.shape )
# next, show that they are symmetric
print( AtA - np.matrix.transpose(AtA) )
print( AAt - np.matrix.transpose(AAt) )
#Element-wise (Hadamard) multiplication
# any matrix sizes
m = 13
n = 2
# ...but the two matrices must be the same size
A = np.random.randn(m,n)
B = np.random.randn(m,n)
# note the different syntax compared to @ for matrix multiplication
C = np.multiply( A,B )
print(C)
#code challenge: Symmetry of combined symmetric matrices
print("Create two symmetric matrices")
S = np.round( 2*np.random.randn(3,2) )
S1 = S.dot(np.transpose(S))
print(S1)
S = np.round( 2*np.random.randn(3,2) )
S2 = S.dot(np.transpose(S))
print(S2)
print("compute sum, multiplication, and Hadamard multiplication of the two matrices")
#determine whether the result is still symmetric
print(S1+S2)
print(S1.dot(S2))
print(S1*S2)
#Multiplication of two symmetric matrices
a,b,c,d,e,f,g,h,k,l,m,n,o,p,q,r,s,t,u = symbols('a b c d e f g h k l m n o p q r s t u', real=True)
# symmetric and constant-diagonal matrices
A = Matrix([ [a,b,c,d],
[b,a,e,f],
[c,e,a,h],
[d,f,h,a] ])
B = Matrix([ [l,m,n,o],
[m,l,q,r],
[n,q,l,t],
[o,r,t,l] ])
# confirmation that A and B are symmetric
print( A - A.transpose() )
print( B - B.transpose() )
# ... and constant diagonal
for i in range(0,np.size(A,0)):
print( A[i,i] )
for i in range(0,np.size(B,0)):
print( B[i,i] )
# but AB neq (AB)'
A@B - (A@B).T
# maybe for a submatrix?
n = 3
A1 = A[ 0:n,0:n ]
B1 = B[ 0:n,0:n ]
A1@B1 - (A1*B1).T
#Frobenius dot-product
# any matrix sizes
m = 9
n = 4
# but the two matrices must be the same size
A = np.random.randn(m,n)
B = np.random.randn(m,n)
# first vectorize, then vector-dot-product
Av = np.reshape( A,m*n, order='F' ) # order='F' reshapes by columns instead of by rows
Bv = np.reshape( B,m*n, order='F' )
frob_dp = np.dot( Av,Bv )
# trace method
frob_dp2 = np.trace( np.matrix.transpose(A)@B )
print(frob_dp2)
print(frob_dp)
# matrix norm
Anorm = np.linalg.norm(A,'fro')
Anorm2 = np.sqrt( np.trace( np.matrix.transpose(A)@A ) )
print(Anorm)
print(Anorm2)
#Code challenge: standard and Hadamard multiplication for diagonal matrices
#Create two matrices 4x4 full and diagonal
D1 = np.random.randn(4,4)
D2 = np.diag([4,5,6,7])
#multiply each matrix by itself (A*A): standard and hadmard multiplications
RS1 = D1.dot(D1)
RS2 = D2.dot(D2)
RH1 = D1*D1
RH2 = D2*D2
print(D1)
print(RS1)
print(RH1)
print(D2)
print(RS2)
print(RH2)
| [
"[email protected]"
] | |
3482c862a6405f9d46af7e9c72673545f05201a1 | eb8b5cde971573668800146b3632e43ed6e493d2 | /python/oneflow/test/modules/test_instruction_replay.py | e9fbd188d1ecc88127be665d92a6ea691ab0065a | [
"Apache-2.0"
] | permissive | big-data-ai/oneflow | 16f167f7fb7fca2ce527d6e3383c577a90829e8a | b1c67df42fb9c5ab1335008441b0273272d7128d | refs/heads/master | 2023-07-08T21:21:41.136387 | 2021-08-21T11:31:14 | 2021-08-21T11:31:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,832 | py | """
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import unittest
from collections import OrderedDict
import numpy as np
from test_util import GenArgList
import oneflow
import oneflow as flow
import oneflow.unittest
def _test_instruction_replay_impl(test_case, device, shape):
x = flow.Tensor(np.random.rand(*shape), device=flow.device(device))
y = flow.Tensor(np.random.rand(*shape), device=flow.device(device))
oneflow._oneflow_internal.debug.start_recording_instructions()
z = x + y
oneflow._oneflow_internal.debug.end_recording_instructions()
test_case.assertTrue(np.allclose(z.numpy(), x.numpy() + y.numpy(), 0.0001, 0.0001))
z.zeros_()
oneflow._oneflow_internal.debug.replay_instructions()
test_case.assertTrue(np.allclose(z.numpy(), x.numpy() + y.numpy(), 0.0001, 0.0001))
oneflow._oneflow_internal.debug.clear_recorded_instructions()
@flow.unittest.skip_unless_1n1d()
class TestIntructionReplay(flow.unittest.TestCase):
def test_instruction_replay(test_case):
arg_dict = OrderedDict()
arg_dict["device"] = ["cpu", "cuda"]
arg_dict["shape"] = [[2, 3], [1, 10]]
for arg in GenArgList(arg_dict):
_test_instruction_replay_impl(test_case, *arg)
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
] | |
30b31dbb48ee318100dfe52ceb8b3bf19ac84ee9 | 9aab01a48d1af5c4f1889ae9d27940f8bc738d37 | /Mindshare/project_management/cvr/tables.py | 4143730b11b7ed81bf26920c54a9c284e43bd1ea | [] | no_license | raveena17/workout-ex | 274998170a3cfbf42bffe61d49fce8531eddc3f5 | a9c652535f33d05199b3c5d26b72c721a822a2b7 | refs/heads/master | 2021-09-05T10:06:46.399468 | 2018-01-26T08:36:58 | 2018-01-26T08:36:58 | 119,025,925 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 521 | py | # import django_tables2 as tables
# from .models import Cvr
# #class django_tables2.columns.LinkColumn(, urlconf=None, , kwargs=None, current_app=None, attrs=None, **extra)
# class CVRTable(tables.Table):
# id = tables.LinkColumn(viewname='edit_cvr', args=[tables.A('pk')])
# class Meta:
# model = Cvr
# exclude = ('comments', 'reason_for_visit', 'actions_taken_during_the_visit', 'next_plan_of_action',)
# # add class="paleblue" to <table> tag
# attrs = {'class': 'paleblue'}
| [
"[email protected]"
] | |
6c79ae8cc7aed21c5f2b9410bcf90b219dedfe16 | 07af444dafa5bde373b0730e92d67e455d4ff4df | /SFData/StackOverflow/s36972087_ground_truth.py | 79f82ae3f49c2bb32dc969c91d323ecc4f7a516f | [] | no_license | tensfa/tensfa | 9114595b58a2e989780af0c348afb89a2abb04b4 | 415dcfaec589b0b14c5b9864872c912f3851b383 | refs/heads/main | 2023-06-30T14:27:38.217089 | 2021-08-03T01:33:30 | 2021-08-03T01:33:30 | 368,465,614 | 2 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,008 | py | import tensorflow as tf
import numpy as np
train_images = np.array(np.random.random((10, 19)), dtype=np.float32)
train_labels = np.random.randint(0, 2, 10, dtype=np.int32)
train_labels = np.eye(2)[train_labels]
sess = tf.InteractiveSession()
x = tf.placeholder(tf.float32, shape=[None, 19])
y_ = tf.placeholder(tf.float32, shape=[None, 2])
W = tf.Variable(tf.zeros([19,2]))
b = tf.Variable(tf.zeros([2]))
sess.run(tf.global_variables_initializer())
y = tf.nn.softmax(tf.matmul(x,W) + b)
cross_entropy = tf.reduce_mean(-tf.reduce_sum(y_ * tf.log(y), reduction_indices=[1]))
train_step = tf.train.GradientDescentOptimizer(0.5).minimize(cross_entropy)
start = 0
batch_1 = 50
end = 100
for i in range(1):
# batch = mnist.train.next_batch(50)
x1 = train_images[start:end]
y1 = train_labels[start:end]
start = start + batch_1
end = end + batch_1
x1 = np.reshape(x1, (-1, 19))
y1 = np.reshape(y1, (-1, 2))
train_step.run(feed_dict={x: np.expand_dims(x1[0], 0), y_: np.expand_dims(y1[0], 0)}) | [
"[email protected]"
] | |
62a13abd4c0147da29cd785233f04f06aca6a23a | 2a8abd5d6acdc260aff3639bce35ca1e688869e9 | /telestream_cloud_qc_sdk/test/test_container_essence_consistency_test.py | a53e951acde1e1e1d545fa4c1388c5f5ecb32225 | [
"MIT"
] | permissive | Telestream/telestream-cloud-python-sdk | 57dd2f0422c83531e213f48d87bc0c71f58b5872 | ce0ad503299661a0f622661359367173c06889fc | refs/heads/master | 2021-01-18T02:17:44.258254 | 2020-04-09T11:36:07 | 2020-04-09T11:36:07 | 49,494,916 | 0 | 0 | MIT | 2018-01-22T10:07:49 | 2016-01-12T11:10:56 | Python | UTF-8 | Python | false | false | 1,600 | py | # coding: utf-8
"""
Qc API
Qc API # noqa: E501
The version of the OpenAPI document: 3.0.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import telestream_cloud_qc
from telestream_cloud_qc.models.container_essence_consistency_test import ContainerEssenceConsistencyTest # noqa: E501
from telestream_cloud_qc.rest import ApiException
class TestContainerEssenceConsistencyTest(unittest.TestCase):
"""ContainerEssenceConsistencyTest unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test ContainerEssenceConsistencyTest
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = telestream_cloud_qc.models.container_essence_consistency_test.ContainerEssenceConsistencyTest() # noqa: E501
if include_optional :
return ContainerEssenceConsistencyTest(
reject_on_error = True,
checked = True
)
else :
return ContainerEssenceConsistencyTest(
)
def testContainerEssenceConsistencyTest(self):
"""Test ContainerEssenceConsistencyTest"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
5c90209a2a85242d66565dc74c3d13c88a2f10b7 | e7b7505c084e2c2608cbda472bc193d4a0153248 | /DailyChallenge/LC_126.py | 6a71e599fd82c9936054243d450e4e182fae01a5 | [] | no_license | Taoge123/OptimizedLeetcode | 8e5c1cd07904dfce1248bc3e3f960d2f48057a5d | 3e50f6a936b98ad75c47d7c1719e69163c648235 | refs/heads/master | 2023-02-27T21:13:40.450089 | 2023-02-07T04:11:09 | 2023-02-07T04:11:09 | 170,044,224 | 9 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,181 | py |
class Solution:
def findLadders(self, beginWord: str, endWord: str, wordList):
#we dont need visited since we will remove the newLayer.values() for the words we have processed
wordList = set(wordList)
res = []
lowercase = string.ascii_lowercase
#layer is similar to queue in 127
layer = collections.defaultdict(list)
layer[beginWord] = [[beginWord]]
while layer:
newLayer = collections.defaultdict(list)
for word in layer:
if word == endWord:
for i in layer[word]:
res.append(i)
else:
for i in range(len(word)):
for char in lowercase:
newWord = word[:i] + char + word[i+1:]
if newWord in wordList:
for valList in layer[word]:
# print(newWord, valList + [newWord])
newLayer[newWord].append(valList + [newWord])
wordList -= set(newLayer.keys())
layer = newLayer
return res
| [
"[email protected]"
] | |
4e7eb91fe1d09211b9bd1a08ad237e37699b1484 | ac549e553263801bdc6962a10ebbe784dc2631df | /Python/graphs/traversal.py | e3e6b65ebfcfc36492062561afd6ccc02a61bcd2 | [] | no_license | Bishal44/DataStructure | e595890d18bde39e65f02a7ca3a6904c6070c3c8 | 939c47de6dcfe3b2578aaa0610d3cdc5726572c7 | refs/heads/master | 2020-09-10T22:40:46.368607 | 2020-03-28T12:15:08 | 2020-03-28T12:15:08 | 221,854,694 | 0 | 0 | null | 2019-12-10T15:47:45 | 2019-11-15T05:59:40 | Python | UTF-8 | Python | false | false | 1,863 | py | '''
Created on Sat Jan 11 2020
'''
graph = {'A': set(['B', 'C', 'F']),
'B': set(['A', 'D', 'E']),
'C': set(['A', 'F']),
'D': set(['B']),
'E': set(['B', 'F']),
'F': set(['A', 'C', 'E'])}
# dfs and bfs are the ultimately same except that they are visiting nodes in
# different order. To simulate this ordering we would use stack for dfs and
# queue for bfs.
#
def dfs_traverse(graph, start):
visited, stack = set(), [start]
while stack:
node = stack.pop()
if node not in visited:
visited.add(node)
for nextNode in graph[node]:
if nextNode not in visited:
stack.append(nextNode)
return visited
# print(dfs_traverse(graph, 'A'))
def bfs_traverse(graph, start):
visited, queue = set(), [start]
while queue:
node = queue.pop(0)
if node not in visited:
visited.add(node)
for nextNode in graph[node]:
if nextNode not in visited:
queue.append(nextNode)
return visited
# print(bfs_traverse(graph, 'A'))
def dfs_traverse_recursive(graph, start, visited=None):
if visited is None:
visited = set()
visited.add(start)
for nextNode in graph[start]:
if nextNode not in visited:
dfs_traverse_recursive(graph, nextNode, visited)
return visited
# print(dfs_traverse_recursive(graph, 'A'))
# def find_path(graph, start, end, visited=[]):
# # basecase
# visitied = visited + [start]
# if start == end:
# return visited
# if start not in graph:
# return None
# for node in graph[start]:
# if node not in visited:
# new_visited = find_path(graph, node, end, visited)
# return new_visited
# return None
# print(find_path(graph, 'A', 'F')) | [
"[email protected]"
] | |
aeb610b09b0c9d2cd3f230690fa462bbab915093 | f125a883dbcc1912dacb3bf13e0f9263a42e57fe | /tsis1/Loop for/3532.py | e1fad768f54887c4a2ca4540e551ec7aadfa2c56 | [] | no_license | AruzhanBazarbai/pp2 | 1f28b9439d1b55499dec4158e8906954b507f04a | 9d7f1203b6735b27bb54dfda73b3d2c6b90524c3 | refs/heads/master | 2023-07-13T05:26:02.154105 | 2021-08-27T10:20:34 | 2021-08-27T10:20:34 | 335,332,307 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 116 | py | #Задача №3532. Сумма кубов
n=int(input())
cnt=0
for i in range(1,n+1):
cnt+=i**3
print(cnt) | [
"[email protected]"
] | |
b08ad2fefef80365d87004cef4629d3c62aa60b3 | a46d135ba8fd7bd40f0b7d7a96c72be446025719 | /packages/python/plotly/plotly/validators/layout/legend/_traceorder.py | d5fe177e6cf14ddf521d4e55b0eef9d2d0fa8d2e | [
"MIT"
] | permissive | hugovk/plotly.py | 5e763fe96f225d964c4fcd1dea79dbefa50b4692 | cfad7862594b35965c0e000813bd7805e8494a5b | refs/heads/master | 2022-05-10T12:17:38.797994 | 2021-12-21T03:49:19 | 2021-12-21T03:49:19 | 234,146,634 | 0 | 0 | MIT | 2020-01-15T18:33:43 | 2020-01-15T18:33:41 | null | UTF-8 | Python | false | false | 532 | py | import _plotly_utils.basevalidators
class TraceorderValidator(_plotly_utils.basevalidators.FlaglistValidator):
def __init__(self, plotly_name="traceorder", parent_name="layout.legend", **kwargs):
super(TraceorderValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "legend"),
extras=kwargs.pop("extras", ["normal"]),
flags=kwargs.pop("flags", ["reversed", "grouped"]),
**kwargs
)
| [
"[email protected]"
] | |
fb2dc56539cdf51cd1d14fa04f375e98d0178ecc | ea16c6da19fce9a4dff085aaeff3ac12baa21d59 | /tests/test_obvs.py | 5febd213e3768347232d28f1e8c604c5c017648c | [] | no_license | changhoonhahn/specmulator | a31b17aeab1ba1a29118e431fd7558dd8bbc7e5b | 9453e7fcc30d74b732594bfb78f7e4f5d20bc95f | refs/heads/master | 2021-09-10T18:57:21.361837 | 2018-03-31T05:52:33 | 2018-03-31T05:52:33 | 106,511,656 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,710 | py | import numpy as np
import env
import util as UT
import obvs as Obvs
import matplotlib as mpl
import matplotlib.pyplot as plt
mpl.rcParams['text.usetex'] = True
mpl.rcParams['font.family'] = 'serif'
mpl.rcParams['axes.linewidth'] = 1.5
mpl.rcParams['axes.xmargin'] = 1
mpl.rcParams['xtick.labelsize'] = 'x-large'
mpl.rcParams['xtick.major.size'] = 5
mpl.rcParams['xtick.major.width'] = 1.5
mpl.rcParams['ytick.labelsize'] = 'x-large'
mpl.rcParams['ytick.major.size'] = 5
mpl.rcParams['ytick.major.width'] = 1.5
mpl.rcParams['legend.frameon'] = False
def Plk_halo_mneut_ratio(nzbin=4, zspace=False):
''' Plot the ratio of P_l^mneut(k)/P_l^0.0eV
for different neutrino masses
'''
mneuts = [0.0, 0.06, 0.10, 0.15, 0.6] # eV
p0ks_mneut, p2ks_mneut, p4ks_mneut = [], [], []
for mneut in mneuts:
p0ks, p2ks, p4ks = [], [], []
for ireal in range(1, 101):
# read all 100 realizations
plk_i = Obvs.Plk_halo(mneut, ireal, nzbin, zspace=zspace)
if ireal == 1: k = plk_i['k']
p0ks.append(plk_i['p0k'])
p2ks.append(plk_i['p2k'])
p4ks.append(plk_i['p4k'])
# plot the average
p0ks_mneut.append(np.average(np.array(p0ks), axis=0))
p2ks_mneut.append(np.average(np.array(p2ks), axis=0))
p4ks_mneut.append(np.average(np.array(p4ks), axis=0))
plks_mneut = [p0ks_mneut, p2ks_mneut, p4ks_mneut]
fig = plt.figure(figsize=(15, 5))
for i, ell in enumerate([0,2,4]):
sub = fig.add_subplot(1,3,i+1)
for ii in range(len(mneuts)):
sub.plot(k, plks_mneut[i][ii]/plks_mneut[i][0], lw=2, label=r'$\sum m_\nu = $ '+str(mneuts[ii])+'eV')
if i == 0:
sub.legend(loc='lower right', prop={'size': 12})
else:
sub.set_yticks([])
sub.set_xscale('log')
sub.set_xlim([0.01, 0.5])
sub.set_xlabel('k', fontsize=20)
sub.set_ylim([0.9, 1.15])
sub.set_ylabel('$P_{'+str(ell)+'}(k)/P_{'+str(ell)+'}^{0.0\mathrm{eV}}(k)$', fontsize=20)
if zspace: str_space = 'z'
else: str_space = 'r'
fig.savefig(''.join([UT.fig_dir(), 'tests/plk_halo.mneuts_ratio.nzbin', str(nzbin),
'.', str_space, 'space.png']), bbox_inches='tight')
return None
def Plk_halo_mneut(nzbin=4, zspace=False):
''' Plot P_l(k) for different neutrino masses
'''
mneuts = [0.0, 0.06, 0.10, 0.15, 0.6] # eV
p0ks_mneut, p2ks_mneut, p4ks_mneut = [], [], []
for mneut in mneuts:
p0ks, p2ks, p4ks = [], [], []
for ireal in range(1, 101):
# read all 100 realizations
plk_i = Obvs.Plk_halo(mneut, ireal, nzbin, zspace=zspace)
if ireal == 1: k = plk_i['k']
p0ks.append(plk_i['p0k'])
p2ks.append(plk_i['p2k'])
p4ks.append(plk_i['p4k'])
# plot the average
p0ks_mneut.append(np.average(np.array(p0ks), axis=0))
p2ks_mneut.append(np.average(np.array(p2ks), axis=0))
p4ks_mneut.append(np.average(np.array(p4ks), axis=0))
plks_mneut = [p0ks_mneut, p2ks_mneut, p4ks_mneut]
fig = plt.figure(figsize=(15, 5))
for i, ell in enumerate([0,2,4]):
sub = fig.add_subplot(1,3,i+1)
for mneut, plk in zip(mneuts, plks_mneut[i]):
sub.plot(k, plk, lw=2, label=r'$\sum m_\nu = $ '+str(mneut)+'eV')
if i == 0:
sub.legend(loc='lower right', prop={'size': 12})
else:
sub.set_yticks([])
sub.set_xscale('log')
sub.set_xlim([0.01, 0.15])
sub.set_xlabel('k', fontsize=20)
sub.set_ylim([1e3, 1e5])
sub.set_yscale('log')
sub.set_ylabel('$k P_{'+str(ell)+'}(k)$', fontsize=20)
if zspace: str_space = 'z'
else: str_space = 'r'
fig.savefig(''.join([UT.fig_dir(), 'tests/plk_halo.mneuts.nzbin', str(nzbin),
'.', str_space, 'space.png']), bbox_inches='tight')
return None
def Plk_halo(mneut=0.0, nzbin=4, zspace=False):
''' **TESTED --- Nov 7, 2017 **
Test the Plk_halo
'''
p0ks, p2ks, p4ks = [], [], []
for ireal in range(1, 101):
# read all 100 realizations
plk_i = Obvs.Plk_halo(mneut, ireal, nzbin, zspace=zspace)
if ireal == 1: k = plk_i['k']
p0ks.append(plk_i['p0k'])
p2ks.append(plk_i['p2k'])
p4ks.append(plk_i['p4k'])
fig = plt.figure()
sub = fig.add_subplot(111)
for p0k, p2k, p4k in zip(p0ks, p2ks, p4ks):
sub.plot(k, k * p0k, c='k', lw=0.1)
sub.plot(k, k * p2k, c='b', lw=0.1)
sub.plot(k, k * p4k, c='r', lw=0.1)
# plot the average
sub.plot(k, k * np.average(np.array(p0ks), axis=0), c='k', lw=2, ls='--', label='$\ell=0$')
sub.plot(k, k * np.average(np.array(p2ks), axis=0), c='b', lw=2, ls='--', label='$\ell=2$')
sub.plot(k, k * np.average(np.array(p4ks), axis=0), c='r', lw=2, ls='--', label='$\ell=4$')
sub.set_xlim([0.01, 0.15])
sub.set_xlabel('k', fontsize=20)
sub.set_ylim([-2000., 2500.])
sub.set_ylabel('$k P(k)$', fontsize=20)
sub.legend(loc='lower right', prop={'size': 15})
if zspace: str_space = 'z'
else: str_space = 'r'
fig.savefig(''.join([UT.fig_dir(), 'tests/plk_halo.', str(mneut), 'eV.nzbin', str(nzbin),
'.', str_space, 'space.png']), bbox_inches='tight')
return None
if __name__=="__main__":
Plk_halo_mneut_ratio(nzbin=4, zspace=False)
Plk_halo_mneut_ratio(nzbin=4, zspace=True)
#Plk_halo_mneut(nzbin=4, zspace=False)
#Plk_halo_mneut(nzbin=4, zspace=True)
#Plk_halo(mneut=0.6, zspace=False)
#Plk_halo(mneut=0.6, zspace=True)
| [
"[email protected]"
] | |
d7f53e22fde0ca53ee451f3ff3b5e007a16c8a41 | 9c61ec2a55e897e4a3bb9145296081c648d812c4 | /docs/cd/06443007程式碼/ch01/1-8.py | d27b4e821287c1d67dba80b1f5b27da4d527b6e6 | [] | no_license | wildboy2arthur/ML-Class | 47899246251d12972a6d3875160c1cc8d1052202 | 345c86e3f8890919d59a63a79674acbdcd4577c4 | refs/heads/main | 2023-07-16T11:32:07.683652 | 2021-08-24T08:25:04 | 2021-08-24T08:25:04 | 399,388,026 | 0 | 0 | null | 2021-08-24T08:18:36 | 2021-08-24T08:18:35 | null | UTF-8 | Python | false | false | 240 | py | def cal_price_dict(k_cost):
rate = 0.03
nt_cost = k_cost * rate
inc = 0.2
nt_price = nt_cost * (1 + inc)
data = {
'k_cost': k_cost,
'nt_cost': nt_cost,
'nt_price': nt_price
}
return data | [
"[email protected]"
] | |
4da999cb489a900fa165b6cd924ab3776644bd18 | 9973dd9a35333f1b24e4c1e3cd2098391d17e193 | /clones/migrations/0002_auto_20200216_2103.py | d210099b236272054745ccd1c53767889b1d5bc6 | [] | no_license | smilepogz/FinalTrelloClone | 5140f804ceeb02e6969cb5693daa3cad7e296961 | 9affade23a0b911baa5fa11d9d2ce83e3db669e7 | refs/heads/master | 2021-01-04T11:20:08.893932 | 2020-02-17T13:44:11 | 2020-02-17T13:44:11 | 240,524,496 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 699 | py | # Generated by Django 3.0.3 on 2020-02-16 13:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('clones', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='boardlist',
name='title',
field=models.CharField(max_length=200),
),
migrations.AlterField(
model_name='card',
name='Attachment',
field=models.FileField(upload_to=''),
),
migrations.AlterField(
model_name='card',
name='description',
field=models.TextField(blank=True, max_length=10),
),
]
| [
"[email protected]"
] | |
020942a036c94976bc69092a9f4d19b9c8c7ad90 | 8f455679fdb8e05c4c78141a8065250696d68d89 | /MultiNetV1.py | f46219308f3cf2135c2153d96f56870b3514b6ff | [
"MIT"
] | permissive | x5g/dogs_vs_cats | 63a17ac914ded5850d6d4e745408d50e4d242f74 | 8a6b992fe9abc6b20b31729eaec79ca8d6ec12e0 | refs/heads/master | 2022-10-20T02:25:51.097115 | 2020-06-09T17:21:52 | 2020-06-09T17:21:52 | 271,065,547 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,326 | py | import plaidml.keras
plaidml.keras.install_backend()
import os
os.environ["KERAS_BACKEND"] = "plaidml.keras.backend"
import keras
import matplotlib.pyplot as plt
import numpy as np
import ssl
ssl._create_default_https_context = ssl._create_unverified_context
ROWS = 299
COLS = 299
CHANNELS = 3
batch_size = 32
epochs = 10
train_dir = './train2'
validation_dir = './validation'
test_dir = './test1'
Inp = keras.layers.Input((ROWS, COLS, CHANNELS))
InceptionV3_model = keras.applications.InceptionV3(weights='imagenet', include_top=False, input_shape=(ROWS, COLS, CHANNELS))
Xception_model = keras.applications.Xception(weights='imagenet', include_top=False, input_shape=(ROWS, COLS, CHANNELS))
InceptionV3_layers = InceptionV3_model(Inp)
InceptionV3_layers = keras.layers.GlobalAveragePooling2D()(InceptionV3_layers)
Xception_layers = Xception_model(Inp)
Xception_layers = keras.layers.GlobalAveragePooling2D()(Xception_layers)
x = keras.layers.Concatenate()([InceptionV3_layers, Xception_layers])
output = keras.layers.Dense(2, activation='softmax')(x)
model = keras.Model(inputs=Inp, outputs=output)
for layer in InceptionV3_model.layers:
layer.trainable = False
for layer in Xception_model.layers:
layer.trainable = False
keras.utils.plot_model(model, show_shapes=True, show_layer_names=True, to_file='MultiNetV1_model.pdf')
train_datagen = keras.preprocessing.image.ImageDataGenerator(
rotation_range = 40, # 随机旋转度数
width_shift_range = 0.2, # 随机水平平移
height_shift_range = 0.2,# 随机竖直平移
rescale = 1/255, # 数据归一化
shear_range = 20, # 随机错切变换
zoom_range = 0.2, # 随机放大
horizontal_flip = True, # 水平翻转
fill_mode = 'nearest', # 填充方式
)
test_datagen = keras.preprocessing.image.ImageDataGenerator(
rescale = 1/255, # 数据归一化
)
# 生成训练数据
train_generator = train_datagen.flow_from_directory(
train_dir,
target_size=(ROWS,COLS),
batch_size=batch_size,
)
# 验证数据
validation_generator = test_datagen.flow_from_directory(
validation_dir,
target_size=(ROWS,COLS),
batch_size=batch_size,
)
model.summary()
# 定义优化器,代价函数,训练过程中计算准确率
model.compile(optimizer=keras.optimizers.SGD(lr=1e-4, momentum=0.9), loss=keras.losses.binary_crossentropy, metrics=['accuracy'])
## Callback for loss logging per epoch
class LossHistory(keras.callbacks.Callback):
def on_train_begin(self, logs={}):
self.losses = []
self.val_losses = []
def on_epoch_end(self, batch, logs={}):
self.losses.append(logs.get('loss'))
self.val_losses.append(logs.get('val_loss'))
early_stopping = keras.callbacks.EarlyStopping(monitor='val_loss', patience=3, verbose=1, mode='auto')
lossHistory = LossHistory()
history = model.fit_generator(
generator = train_generator,
steps_per_epoch=len(train_generator),
epochs = epochs,
validation_data=validation_generator,
validation_steps=len(validation_generator),
callbacks = [lossHistory, early_stopping])
model.save('MultiNetV1.h5')
acc = history.history['acc']
val_acc = history.history['val_acc']
loss = history.history['loss']
val_loss = history.history['val_loss']
# acc = [
# 0.9014070402083021,
# 0.9552851634870563,
# 0.9575885033298283,
# 0.9616944569640881,
# 0.9623454008312052,
# 0.9634469981488059,
# 0.963747433781964,
# 0.9642982324370337,
# 0.9672024435431376,
# 0.9662009914375845]
# val_acc = [
# 0.9805572257894484,
# 0.9821607535505228,
# 0.98296251743106,
# 0.9831629585087192,
# 0.9825616355983163,
# 0.9841651633593906,
# 0.984365604222,
# 0.9845660452996593,
# 0.9851673683414814,
# 0.9851673681025372]
# loss = [
# 0.34548001789042687,
# 0.1829768680474425,
# 0.15205100328394244,
# 0.1336793582993715,
# 0.12181056393720338,
# 0.11529702214687088,
# 0.1095373861976298,
# 0.10428516739372867,
# 0.10034206073545955,
# 0.09901416560581902]
# val_loss = [
# 0.16728722282750116,
# 0.11115399416999794,
# 0.0901722999804482,
# 0.07770438194887197,
# 0.07115493825619816,
# 0.06525685261254752,
# 0.0611271229343917,
# 0.058128020974982354,
# 0.05485415271406638,
# 0.05218703313500113]
from matplotlib.backends.backend_pdf import PdfPages
pdf = PdfPages('MultiNetV1_result.pdf')
from matplotlib.ticker import MultipleLocator
# 绘制训练 & 验证的准确率值
fig = plt.figure()
ax = fig.add_subplot(111)
lns1 = ax.plot(acc, color='blue', linestyle='-', label='Train accuracy')
lns2 = ax.plot(val_acc, color='orange', linestyle='-', label='Validation accuracy')
ax2 = ax.twinx()
lns3 = ax2.plot(loss, color='red', linestyle='-', label='Train loss')
lns4 = ax2.plot(val_loss, color='green', linestyle='-', label='Validation loss')
lns = lns1 + lns2 + lns3 + lns4
labs = [l.get_label() for l in lns]
ax.legend(lns, labs, loc='right')
# ax.legend(lns, labs, loc=0)
ax.grid()
ax.set_xlabel("Epoch")
ax.set_ylabel("Accuracy")
x_major_locator = MultipleLocator(1)
y_major_locator = MultipleLocator(0.01)
ax.xaxis.set_major_locator(x_major_locator)
ax.set_xlim(0, 9)
ax.set_ylim(0.90, 0.99)
ax.yaxis.set_major_locator(y_major_locator)
ax2.yaxis.set_major_locator(MultipleLocator(0.05))
ax2.set_ylabel("Loss")
ax2.set_ylim(0.05, 0.35)
# ax2.legend(loc=0)
plt.title('Training and validation accuracy and loss')
# plt.show()
# plt.savefig('MultiNetV1_result.png')
plt.tight_layout()
print('savefig...')
pdf.savefig()
plt.close()
pdf.close()
with open("MultiNetV1.txt", 'a+') as f:
f.write('acc\n')
for item in acc:
f.write("{}\n".format(item))
f.write('val_acc\n')
for item in val_acc:
f.write("{}\n".format(item))
f.write('loss\n')
for item in loss:
f.write("{}\n".format(item))
f.write('val_loss\n')
for item in val_loss:
f.write("{}\n".format(item))
def read_image(file_path):
from PIL import Image
img = Image.open(file_path)
if img.mode != 'RGB':
img = img.convert('RGB')
return img.resize((ROWS, COLS), Image.NEAREST)
def predict():
result = []
model = keras.models.load_model('MultiNetV1.h5')
test_images = [test_dir + '/' + str(i) + '.jpg' for i in range(1, 12501)]
count = len(test_images)
data = np.ndarray((count, ROWS, COLS, CHANNELS), dtype=np.float32)
for i, image_file in enumerate(test_images):
image = read_image(image_file)
data[i] = np.asarray(image) / 255.0
if i % 250 == 0: print('处理 {} of {}'.format(i, count))
test = data
predictions = model.predict(test, verbose=1)
print(predictions)
for i in range(len(predictions)):
dog_pre = predictions[i, 1]
if dog_pre <= 0.005:
result.append(0.005)
elif dog_pre >=0.995:
result.append(0.995)
else:
result.append(dog_pre)
# if predictions[i, 0] >= 0.5:
# result.append(0.005)
# else:
# result.append(0.995)
return result
result = predict()
print(result)
import pandas as pd
# 字典中的key值即为csv中列名
dataframe = pd.DataFrame({'id': [i for i in range(1, 12501)], 'label': result})
# 将DataFrame存储为csv,index表示是否显示行名,default=True
dataframe.to_csv("MultiNetV1_result.csv", index=False, sep=',')
| [
"[email protected]"
] | |
8e3f054d598f85623ae2376aac935bda04e154d6 | afbae26b958b5ef20548402a65002dcc8e55b66a | /ironstubs/process_stubs.py | 570dd3fd93b8d1d96302c69f2f0d497a9dd5adf3 | [
"MIT"
] | permissive | gtalarico/ironpython-stubs | d875cb8932c7644f807dc6fde9dd513d159e4f5c | c7f6a6cb197e3949e40a4880a0b2a44e72d0a940 | refs/heads/master | 2023-07-12T01:43:47.295560 | 2022-05-23T18:12:06 | 2022-05-23T18:12:06 | 95,340,553 | 235 | 88 | NOASSERTION | 2023-07-05T06:36:28 | 2017-06-25T05:30:46 | Python | UTF-8 | Python | false | false | 6,253 | py | """ Stub Generator for IronPython
Extended script based on script developed by Gary Edwards at:
gitlab.com/reje/revit-python-stubs
This is uses a slightly modify version of generator3,
github.com/JetBrains/intellij-community/blob/master/python/helpers/generator3.py
Iterates through a list of targeted assemblies and generates stub directories
for the namespaces using pycharm's generator3.
Note:
Some files ended up too large for Jedi to handle and would cause
memory errors and crashes - 1mb+ in a single files was enough to
cause problems. To fix this, there is a separate module that creates
a compressed version of the stubs, but it also split large file
into separate files to deal with jedi.
These directories will show up in the stubs as (X_parts)
MIT LICENSE
https://github.com/gtalarico/ironpython-stubs
Gui Talarico
--------------------------------------------------------------------------
Large files, such as `System/__init__.py` or `Revit/DB/__init__.py`
can exceed memory limits and crash the system.
These files need to be optimized so Jedi won't misbehave and crash your system
when parsing these files to index autocomplete options.
The primary strategies are:
1. Remove unecessary characters (empty lines, extra spaces, etc)
2. Split Large file into parts to improve Jedi perfomance and avoid crashes
#1 is very straight forward. Use a few regexes.
#2 is more complex. Some of the stubs created by generator3 such as DB/__init__.py
had nearyly 2mb. Doesn't seem like much, but for a raw .py file, that's more than
120K lines. System.Windows.Forms had over 7mb.
The strategy here was simple. Take all the classes inside this monster files,
create separate files for each one, and import them back into the original file.
For an example, compare:
`\stubs\Autodesk\Revit\DB\__init__.py`
and
``\stubs.min\Autodesk\Revit\DB\__init__.py`
"""
import re
import os
import sys
import subprocess
from collections import defaultdict
import json
from pprint import pprint
#############################################################################
#TODO: Integrate with CLI
#TODO: FIX Vars
#TODO: FIX Character Replacement + Optimize
#############################################################################
##########
# CONFIG #
##########
join = os.path.join
project_dir = os.getcwd() # Must execute from project dir
SAVE_PATH = os.path.join(project_dir, 'release', 'stubs')
LIMIT_IN_KB = 200
FILESIZE_LIMITE = LIMIT_IN_KB * 1024
def file_is_too_damn_big(filepath):
return os.path.getsize(filepath) > FILESIZE_LIMITE
def read_source(filepath):
with open(filepath) as fp:
source = fp.read()
return source
def write_source(filepath, source):
folderpath = os.path.dirname(filepath)
if not os.path.exists(folderpath):
os.makedirs(folderpath)
with open(filepath, 'w') as fp:
source = fp.write(source)
print('File Written: {}'.format(filepath))
target_files = []
TESTING = False
# TESTING = True
print('Starting...')
print(SAVE_PATH)
for root, subfolders, files in os.walk(SAVE_PATH):
py_files = [f for f in files if f.endswith('.py')]
for filename in py_files:
filepath = join(root, filename)
filesize = os.path.getsize(filepath)
filedir = os.path.dirname(filepath)
new_filedir = filedir.replace('\stubs', '\stubs.min')
new_filepath = os.path.join(new_filedir, filename)
source = read_source(filepath)
print("Processing File detected: {}".format(filepath))
if TESTING:
if not filepath.endswith('DB\\__init__.py'):
continue
# SOME OF THESE WORK IN TESTS BUT ARE NOT WORKING ON BATCH REPLACEMENT
replacements = [
(r' {4}', ' '), # Convert 4 spaces into single
(r':\r\n( )+pass', r':pass'), # Put pass in one line
(r'"""\r\n( )+pass', r'"""'), # If has doc string, not need to keep pass
(r'pass\n', r'pass'), # Remove Extra Line after pass
(r' = ', '='),
(r', ', ','),
(r' # known case of __new__', ''), # Pycharm Note
(r' #cannot find CLR method', ''), # Pycharm Note
(r' # default', ''), # Pycharm Note
]
new_source = source
for old, new in replacements:
new_source = re.sub(old, new, new_source)
write_source(new_filepath, new_source)
print('='*30)
#####################################
# SEPARATE FILE INTO SEPARATE FILES #
#####################################
if file_is_too_damn_big(new_filepath):
print('='*30)
print('WARNING: file above breaking max: {}'.format(new_filepath))
module_name = os.path.basename(filepath).replace('.py', '_parts')
chunks_dir = join(new_filedir, module_name)
# Create Blank Init File
write_source(join(chunks_dir, '__init__.py'), '')
# Split File into Classes
chunks = re.split(r'(?:\n)class ', new_source)
header = chunks.pop(0)
clean_source = header
write_source(new_filepath, clean_source)
for chunk in chunks:
# Find Class Name and body
class_source = 'class ' + chunk
re_class_name = re.search('(class )(\w+)', class_source)
class_name = re_class_name.group(2)
if not os.path.exists(chunks_dir):
os.mkdir(chunks_dir)
# Write individual class files
with open(join(chunks_dir, class_name + '.py'), 'w') as fp:
fp.write(class_source)
# New class file import to __init__
with open(new_filepath, 'a') as fp:
fp.write('from {0}.{1} import {1}\n'.format(module_name, class_name))
| [
"[email protected]"
] | |
9fabaf664d6dbaf4dd42fc7eb23fb3b411cfd395 | 845d8e6816e91474e673b6cda452254d40c65e5c | /django_mailbox/transports/mmdf.py | ad462849609331fa0f5cdc9bf69e107179dd2cb7 | [] | no_license | redtoad/django-mailbox | d0847f7f29f4e4459045e8d9d3d5d1406968175b | 6da17053d495bee58ea78d4fb394d7618aeaab1a | refs/heads/master | 2021-01-01T15:36:55.409316 | 2013-06-12T06:50:25 | 2013-06-12T06:50:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 152 | py | from mailbox import MMDF
from django_mailbox.transports.generic import GenericFileMailbox
class MMDFTransport(GenericFileMailbox):
_variant = MMDF
| [
"[email protected]"
] | |
02405e0001cf5846244e9d69773d9a9e7158254b | 2e682fd72e3feaa70e3f7bf2a3b83c50d783ec02 | /PyTorch/built-in/cv/detection/SSD_for_PyTorch/configs/paa/paa_r50_fpn_1.5x_coco.py | 816c773695c011d9bf568083b9cd4e991e0abf1e | [
"Apache-2.0",
"GPL-1.0-or-later",
"BSD-2-Clause",
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-generic-cla",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | Ascend/ModelZoo-PyTorch | 4c89414b9e2582cef9926d4670108a090c839d2d | 92acc188d3a0f634de58463b6676e70df83ef808 | refs/heads/master | 2023-07-19T12:40:00.512853 | 2023-07-17T02:48:18 | 2023-07-17T02:48:18 | 483,502,469 | 23 | 6 | Apache-2.0 | 2022-10-15T09:29:12 | 2022-04-20T04:11:18 | Python | UTF-8 | Python | false | false | 713 | py | # Copyright 2022 Huawei Technologies Co., Ltd.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
_base_ = './paa_r50_fpn_1x_coco.py'
lr_config = dict(step=[12, 16])
runner = dict(type='EpochBasedRunner', max_epochs=18)
| [
"[email protected]"
] | |
6204778bccce5acd82eee6997003e783a16005fd | a939e018333a9ecd26ddc618f99835b7eb381686 | /.svn/tmp/tempfile.2.tmp | 509885ba67010786fd018501957f1787d480a5c8 | [] | no_license | cash2one/crawl_youtube | bff5ba254001c2f31f770e55a4aca39bc54e45ee | 0dc40186a1d89da2b00f29d4f4edfdc5470eb4fc | refs/heads/master | 2021-01-16T22:30:17.800282 | 2016-02-18T11:50:09 | 2016-02-18T11:50:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,117 | tmp | #!/usr/bin/python
# coding=utf8
# Copyright 2015 LeTV Inc. All Rights Reserved.
# author: [email protected] (Qiang Gao)
import os
import signal
from le_crawler.common.logutil import Log
thrift_logger = Log('thrift.server.TServer', 'log/thrift_filter.error').log
from optparse import OptionParser
from thrift.transport import TSocket
from thrift.transport import TTransport
from thrift.protocol import TCompactProtocol
from thrift.server import TServer
from pybloom import ScalableBloomFilter
from le_crawler.proto.filter import UrlFilterService
class FilterHandler(object):
def __init__(self, logger):
self.logger_ = logger
self._load_from_file()
def url_seen(self, url):
if self.deduper_.add(url):
self.logger_.info('url duplicated: %s', url)
return True
return False
def _load_from_file(self):
self.logger_.info('loading data from cache file...')
if not os.path.isfile('data/bloom.data'):
self.logger_.error('bloom cache file not found, create one instead.')
self.deduper_ = ScalableBloomFilter(100000000, 0.0001, 4)
else:
with open('data/bloom.data', 'r') as f:
self.deduper_ = ScalableBloomFilter.fromfile(f)
def _dump_to_file(self):
self.logger_.info('dumping data...')
if not os.path.isdir('data'):
os.mkdir('data')
with open('data/bloom.data', 'w') as f:
self.deduper_.tofile(f)
self.logger_.info('dump data finished.')
def close(self):
self._dump_to_file()
class FilterServiceMain(object):
def __init__(self):
self.logger_ = Log('filter_log', 'log/filter.log').log
self.exit_ = False
def close(self, num, fram):
self.exit_ = True
try:
self.socket_.close()
self.handler_.close()
self.logger_.info('close transport')
except:
self.logger_.exception('failed to close transport.')
def run(self, host, port):
# this flag daemon set true is for stop service by outside signal
self.socket_ = TSocket.TServerSocket(host, port)
self.handler_ = FilterHandler(self.logger_)
self.service = TServer.TThreadedServer(UrlFilterService.Processor(self.handler_),
self.socket_,
TTransport.TBufferedTransportFactory(),
TCompactProtocol.TCompactProtocolFactory(),
daemon=True)
self.logger_.info('begin server on %s, %s' % (host, port))
print 'begin server on %s, %s' % (host, port)
self.service.serve()
scheduler = FilterServiceMain()
signal.signal(signal.SIGINT, scheduler.close)
signal.signal(signal.SIGTERM, scheduler.close)
if __name__ == '__main__':
option_parser = OptionParser()
option_parser.add_option('-H', '--host', type='string', dest='host',
default='10.150.140.84', help="service host")
option_parser.add_option('-p', '--port', type='int', dest='port',
default=8089, help="service port")
options, _ = option_parser.parse_args()
scheduler.run(options.host, options.port)
| [
"[email protected]"
] | |
6951735b5119448cb7a86cf403b941f92733e4b0 | f46966a5e49a6138182635a4850738a18eec01e5 | /scripts/utils/bcbio_prep_cwl_genomes.py | d704120ef6fa0e7407cca8ec06c5c6a3272e0319 | [
"MIT"
] | permissive | jchenpku/bcbio-nextgen | 44a9247a0e1314aaba66d1f9941540ddb2993bde | 9ddbfcc6f2595298ae8aad3adfa6a568a2a4c62f | refs/heads/master | 2020-08-01T03:06:30.695158 | 2019-10-07T00:21:32 | 2019-10-07T00:21:32 | 73,585,332 | 1 | 0 | MIT | 2019-10-07T00:21:33 | 2016-11-12T23:49:31 | Python | UTF-8 | Python | false | false | 2,642 | py | #!/usr/bin/env python
"""Clean and prepare a set of genomes for CWL usage and upload.
bcbio with CWL can read directly from a reference genome folder
without using Galaxy location files. This allows both local and
remote usage on object stores (Arvados, DNAnexus, SevenBridges, Synapse, S3).
This copies from an existing bcbio genome installation, cleaning
and packing directories to be ready for CWL usage and upload.
Usage:
bcbio_prep_cwl_genomes.py <genome_dir>
"""
import glob
import os
import shutil
import subprocess
import sys
import tarfile
from bcbio import utils
def main(base_dir):
for genome_dir in sorted(glob.glob(os.path.join(base_dir, "*", "*"))):
if os.path.isdir(genome_dir):
genome_name = os.path.basename(genome_dir)
genome_out_dir = utils.safe_makedir(os.path.join(os.path.join(os.getcwd(), "genomes", genome_name)))
copy_genome(genome_dir, genome_out_dir)
def copy_genome(orig_dir, out_dir):
print(orig_dir, out_dir)
to_copy = ["versions.csv", "bwa", "config", "coverage", "rnaseq", "rtg", "seq", "snpeff",
"ucsc", "validation", "variation", "viral"]
excludes = {"seq": ["*.fa.gz*", "*.old*", "perl"],
"rnaseq": ["ericscript", "tophat", "kallisto"],
"snpeff": ["transcripts"],
"variation": ["genesplicer", "dbNSFP*"]}
to_tar = ["bwa", "rtg", "snpeff"]
for copy in to_copy:
if os.path.isfile(os.path.join(orig_dir, copy)):
shutil.copy(os.path.join(orig_dir, copy), out_dir)
elif copy in to_tar and len(glob.glob(os.path.join(out_dir, "%s*-wf.tar.gz" % copy))) == 1:
print("already prepped: %s" % glob.glob(os.path.join(out_dir, "%s*-wf.tar.gz" % copy)))
else:
cmd = ["rsync", "-avz"]
for e in excludes.get(copy, []):
cmd += ["--exclude", e]
cmd += ["%s/%s/" % (orig_dir, copy), "%s/%s/" % (out_dir, copy)]
print " ".join(cmd)
subprocess.check_call(cmd)
if copy in to_tar:
with utils.chdir(out_dir):
out_file = copy
dir_files = os.listdir(copy)
if len(dir_files) == 1 and os.path.isdir(os.path.join(copy, dir_files[0])):
out_file += "--%s" % (dir_files[0])
out_file += "-wf.tar.gz"
print("tarball", out_file)
with tarfile.open(out_file, "w:gz") as tar:
tar.add(copy)
shutil.rmtree(copy)
if __name__ == "__main__":
main(*sys.argv[1:])
| [
"[email protected]"
] | |
000ad2bfe0221337ebe78b33b4c1046aed21085d | 46b432cd3557038c454601367b878f889c9b6a8f | /kiyuna/tutorial04/test_hmm.py | b2b0fc5a973faf6fbfb2ad7d8772238651f39b66 | [] | no_license | tmu-nlp/NLPtutorial2019 | 84ceec06568fd9d899a686658fb8851466133375 | d77d199c50cd37d70e462209a7bfcd4dee9140a1 | refs/heads/master | 2020-05-14T13:34:05.336594 | 2019-09-25T02:25:41 | 2019-09-25T02:25:41 | 181,814,723 | 1 | 0 | null | 2019-08-01T18:53:54 | 2019-04-17T04:04:06 | Python | UTF-8 | Python | false | false | 3,896 | py | '''
隠れマルコフモデルによる品詞推定
'''
import os
import sys
import subprocess
from collections import defaultdict
from math import log2
os.chdir(os.path.dirname(os.path.abspath(__file__))) # cd .
def message(text):
print("\33[92m" + text + "\33[0m")
def load_model(model_file):
possible_tags = defaultdict(int)
emission = defaultdict(float)
transition = defaultdict(float)
with open(model_file) as f:
for line in f:
type, context, word, prob = line.split()
possible_tags[context] += 1
if type == 'T':
transition[f"{context} {word}"] = float(prob)
else:
emission[f"{context} {word}"] = float(prob)
return possible_tags, emission, transition
def test_hmm(model_path, test_path, output_path):
λ_1 = 0.90
λ_unk = 1 - λ_1
V = 1e6
possible_tags, emission, transition = load_model(model_path)
res = []
with open(test_path) as f:
for line in f:
words = line.split()
# 最小化DP(viterbi)
best_score = defaultdict(lambda: float('inf'))
best_edge = defaultdict(str)
best_score["0 <s>"] = 0
best_edge["0 <s>"] = None
for i, word in enumerate(words):
for prev in possible_tags:
for next in possible_tags:
if f"{i} {prev}" not in best_score:
continue
if f"{prev} {next}" not in transition:
continue
score = best_score[f"{i} {prev}"]
Pt = transition[f"{prev} {next}"]
score += -log2(Pt)
Pe = λ_1 * emission[f"{next} {word}"] + λ_unk / V
score += -log2(Pe)
if best_score[f"{i+1} {next}"] > score:
best_score[f"{i+1} {next}"] = score
best_edge[f"{i+1} {next}"] = f"{i} {prev}"
l = len(words)
for tag in possible_tags:
if f"{l} {tag}" not in best_score:
continue
if f"{tag} </s>" not in transition:
continue
Pt = transition[f"{tag} </s>"]
score = best_score[f"{l} {tag}"] + -log2(Pt)
if best_score[f"{l+1} </s>"] > score:
best_score[f"{l+1} </s>"] = score
best_edge[f"{l+1} </s>"] = f"{l} {tag}"
tags = []
next_edge = best_edge[f"{l+1} </s>"]
while next_edge != "0 <s>":
pos, tag = next_edge.split()
tags.append(tag)
next_edge = best_edge[next_edge]
tags.reverse()
res.append(" ".join(tags) + '\n')
with open(output_path, 'w') as f:
f.writelines(res)
if __name__ == '__main__':
is_test = sys.argv[1:] == ["test"]
if is_test:
message("[*] test")
model = './model_test.txt'
test = '../../test/05-test-input.txt'
res = './result_test.pos'
ans = '../../test/05-test-answer.txt'
else:
message("[*] wiki")
model = './model_wiki.txt'
test = '../../data/wiki-en-test.norm'
res = './result_wiki.pos'
ans = '../../data/wiki-en-test.pos'
test_hmm(model, test, res)
if is_test:
subprocess.run(f'diff -s {res} {ans}'.split())
else:
subprocess.run(f'perl ../../script/gradepos.pl {ans} {res}'.split())
message("[+] Done!")
'''
Accuracy: 90.82% (4144/4563)
Most common mistakes:
NNS --> NN 45
NN --> JJ 27
JJ --> DT 22
NNP --> NN 22
VBN --> NN 12
JJ --> NN 12
NN --> IN 11
NN --> DT 10
NNP --> JJ 8
VBP --> VB 7
'''
| [
"[email protected]"
] | |
c697740729c72361e89fa3f8b66eec1705d07e84 | 09e57dd1374713f06b70d7b37a580130d9bbab0d | /data/p4VQE/R3/benchmark/startPyquil348.py | c41069924278a31fe96eac76877e55e4208814cf | [
"BSD-3-Clause"
] | permissive | UCLA-SEAL/QDiff | ad53650034897abb5941e74539e3aee8edb600ab | d968cbc47fe926b7f88b4adf10490f1edd6f8819 | refs/heads/main | 2023-08-05T04:52:24.961998 | 2021-09-19T02:56:16 | 2021-09-19T02:56:16 | 405,159,939 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,196 | py | # qubit number=4
# total number=13
import pyquil
from pyquil.api import local_forest_runtime, QVMConnection
from pyquil import Program, get_qc
from pyquil.gates import *
import numpy as np
conn = QVMConnection()
def make_circuit()-> Program:
prog = Program() # circuit begin
prog += H(1) # number=2
prog += H(2) # number=3
prog += H(3) # number=4
prog += Y(3) # number=5
prog += SWAP(1,0) # number=6
prog += SWAP(1,0) # number=7
prog += CNOT(1,0) # number=10
prog += X(0) # number=11
prog += CNOT(1,0) # number=12
prog += X(0) # number=9
# circuit end
return prog
def summrise_results(bitstrings) -> dict:
d = {}
for l in bitstrings:
if d.get(l) is None:
d[l] = 1
else:
d[l] = d[l] + 1
return d
if __name__ == '__main__':
prog = make_circuit()
qvm = get_qc('4q-qvm')
results = qvm.run_and_measure(prog,1024)
bitstrings = np.vstack([results[i] for i in qvm.qubits()]).T
bitstrings = [''.join(map(str, l)) for l in bitstrings]
writefile = open("../data/startPyquil348.csv","w")
print(summrise_results(bitstrings),file=writefile)
writefile.close()
| [
"[email protected]"
] | |
1af9746ec4cafd840ab09d82afe8460f8f91246c | 27ece9ab880a0bdba4b2c053eccda94602c716d5 | /.history/tf_regression_logistic_20181130085723.py | 63a5da989d271aad51d7c4b5ba99da863f646db0 | [] | no_license | Symfomany/keras | 85e3ad0530837c00f63e14cee044b6a7d85c37b2 | 6cdb6e93dee86014346515a2017652c615bf9804 | refs/heads/master | 2020-04-08T20:21:35.991753 | 2018-11-30T08:23:36 | 2018-11-30T08:23:36 | 159,695,807 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,658 | py | import matplotlib.pyplot as plt
import tensorflow as tf
import numpy as np
import os, argparse
"""
Any interaction with your filesystem to save persistent data in TF needs a Saver object and a Session object.
The Saver constructor allows you to control many things among which 1 is important:
The var_list: Default to None, this is the list of variables you want to persist to your filesystem.
You can either choose to save all the variables, some variables or even a dictionary to give custom names to your variables.
The Session constructor allows you to control 3 things:
+ The var_list: This is used in case of a distributed architecture to handle computation. You can specify which TF server or ‘target’ you want to compute on.
+ The graph: the graph you want the Session to handle. The tricky thing for beginners is the fact that there is always a default Graph in TF where all operations are set by default, so you are always in a “default Graph scope”.
+ The config: You can use ConfigProto to configure TF. Check the linked source for more details.
The Saver can handle the saving and loading (called restoring) of your Graph metadata and your Variables data.
To do that, it adds operations inside the current Graph that will be evaluated within a session.
By default, the Saver will handle the default Graph and all its included Variables,
but you can create as much Savers as you want to control any graph or subgraph and their variables.
If you look at your folder, it actually creates 3 files per save call and a checkpoint file,
I’ll go into more details about this in the annexe.
You can go on just by understanding that weights are saved into .data files and your graph
and metadata are saved into the .meta file.
Note: You must be careful to use a Saver with a Session linked to the Graph containing all the variables the Saver is handling.😨
To restore a meta checkpoint, use the TF helper import_meta_graph:
import tensorflow as tf
# This function returns a Saver
saver = tf.train.import_meta_graph('results/model.ckpt-1000.meta')
graph = tf.get_default_graph()
# Finally we can retrieve tensors, operations, collections, etc.
global_step_tensor = graph.get_tensor_by_name('loss/global_step:0')
train_op = graph.get_operation_by_name('loss/train_op')
hyperparameters = tf.get_collection('hyperparameters')
Restoring the weights:
with tf.Session() as sess:
# To initialize values with saved data
saver.restore(sess, 'results/model.ckpt.data-1000-00000-of-00001')
print(sess.run(global_step_tensor)) # returns 1000
Using a pre-trained graph in a new graph:
Now that you know how to save and load, you can probably figure out how to do it. Yet, there might be some tricks that could help you go faster.
The good point is that this method simplifies everything: you can load a pre-trained VGG-16,
access any nodes in the graph, plug your own operations and train the whole thing!
If you only want to fine-tune your own nodes, you can stop the gradients anywhere you want,
to avoid training the whole graph.
Files architecture
Getting back to TF, when you save your data the usual way, you end up with 5 different type of files:
+ A “checkpoint” file
+ Some “data” files
+ A “meta” file
+ An “index” file
+ If you use Tensorboard, an “events” file
+ If you dump the human-friendly version: a“textual Protobufs” file
+ The checkckpoint file is just a bookkeeping file that you can use in combination of high-level helper for loading different time saved chkp files.
+ The .meta file holds the compressed Protobufs graph of your model and all the metadata associated (collections, learning rate, operations, etc.)
+ The .index file holds an immutable key-value table linking a serialised tensor name and where to find its data in the chkp.data files
+ The .data files hold the data (weights) itself (this one is usually quite big in size). There can be many data files because they can be sharded and/or created on multiple timesteps while training.
I provide a slightly different version which is simpler and that I found handy. The original freeze_graph function provided by TF is installed in your bin dir and can be called directly if you used PIP to install TF. If not you can call it directly from its folder (see the commented import in the gist).
https://www.tensorflow.org/guide/saved_model
How to use the frozen model
Naturally, after knowing how to freeze a model, one might wonder how to use it.
Wee need to:
+ Import a graph_def ProtoBuf first
+ Load this graph_def into an actual Graph
"""
dir = os.path.dirname(os.path.realpath(__file__))
def freeze_graph(model_dir, output_node_names):
"""Extract the sub graph defined by the output nodes and convert
all its variables into constant
Args:
model_dir: the root folder containing the checkpoint state file
output_node_names: a string, containing all the output node's names,
comma separated
"""
if not tf.gfile.Exists(model_dir):
raise AssertionError(
"Export directory doesn't exists. Please specify an export "
"directory: %s" % model_dir)
if not output_node_names:
print("You need to supply the name of a node to --output_node_names.")
return -1
# We retrieve our checkpoint fullpath
checkpoint = tf.train.get_checkpoint_state(model_dir)
input_checkpoint = checkpoint.model_checkpoint_path
# We precise the file fullname of our freezed graph
absolute_model_dir = "/".join(input_checkpoint.split('/')[:-1])
print(absolute_model_dir)
output_graph = absolute_model_dir + "/models/frozen_model.pb"
# We clear devices to allow TensorFlow to control on which device it will load operations
clear_devices = True
with tf.Session(graph=tf.Graph()) as sess:
# We import the meta graph in the current default Graph
saver = tf.train.import_meta_graph(input_checkpoint + '.meta', clear_devices=clear_devices)
# We restore the weights
saver.restore(sess, input_checkpoint)
# We use a built-in TF helper to export variables to constants
output_graph_def = tf.graph_util.convert_variables_to_constants(
sess, # The session is used to retrieve the weights
tf.get_default_graph().as_graph_def(), # The graph_def is used to retrieve the nodes
output_node_names.split(",") # The output node names are used to select the usefull nodes
)
# Finally we serialize and dump the output graph to the filesystem
with tf.gfile.GFile(output_graph, "wb") as f:
f.write(output_graph_def.SerializeToString())
return output_graph_def
def get_dataset():
"""
Method used to generate the dataset
"""
# Numbers of row per class
row_per_class = 100
# Generate rows
sick = np.random.randn(row_per_class, 2) + np.array([-2, -2])
sick_2 = np.random.randn(row_per_class, 2) + np.array([2, 2])
healthy = np.random.randn(row_per_class, 2) + np.array([-2, 2])
healthy_2 = np.random.randn(row_per_class, 2) + np.array([2, -2])
features = np.vstack([sick, sick_2, healthy, healthy_2])
targets = np.concatenate((np.zeros(row_per_class * 2), np.zeros(row_per_class * 2) + 1))
targets = targets.reshape(-1, 1)
return features, targets
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument("--model_dir", type=str, default="models", help="Model folder to export")
parser.add_argument("--output_node_names", type=str, default="frozen_model", help="The name of the output nodes, comma separated.")
args = parser.parse_args()
features, targets = get_dataset()
# Plot points
#plt.scatter(features[:, 0], features[:, 1], s=40, c=targets, cmap=plt.cm.Spectral)
#plt.show()
tf_features = tf.placeholder(tf.float32, shape=[None, 2])
tf_targets = tf.placeholder(tf.float32, shape=[None, 1])
# First
w1 = tf.Variable(tf.random_normal([2, 3]))
b1 = tf.Variable(tf.zeros([3]))
# Operations
z1 = tf.matmul(tf_features, w1) + b1
a1 = tf.nn.sigmoid(z1)
# Output neuron
w2 = tf.Variable(tf.random_normal([3, 1]))
b2 = tf.Variable(tf.zeros([1]))
# Operations
z2 = tf.matmul(a1, w2) + b2
py = tf.nn.sigmoid(z2)
cost = tf.reduce_mean(tf.square(py - tf_targets))
correct_prediction = tf.equal(tf.round(py), tf_targets)
accuracy = tf.reduce_mean(tf.cast(correct_prediction, tf.float32))
optimizer = tf.train.GradientDescentOptimizer(learning_rate=0.1)
train = optimizer.minimize(cost)
sess = tf.Session()
sess.run(tf.global_variables_initializer())
for e in range(100):
sess.run(train, feed_dict={
tf_features: features,
tf_targets: targets
})
print("accuracy =", sess.run(accuracy, feed_dict={
tf_features: features,
tf_targets: targets
}))
# We can check easily that we are indeed in the default graph
print(z1.graph == tf.get_default_graph())
# By default, the Saver handles every Variables related to the default graph
all_saver = tf.train.Saver()
all_saver.save(sess, args.model_dir + '/models')
#save a checkpoint file, which will store the above assignment
tf.saved_model.simple_save(sess,"models/model.ckpt",
inputs={
"features_data": tf_features,
}, outputs={
"targets_data": tf_targets
})
#freeze_graph(args.model_dir, args.output_node_names)
| [
"[email protected]"
] | |
66c35ef831aaa59121f0b9b48d719fee7b050b34 | 078686dd88ff399cb3f9f773d237a7b18adf513a | /fund_crawl.py | 2e11bb1c2315571f53e2f78a3e04f58a7555f55c | [] | no_license | kh7160/lotto | b1995bb9488a02f9c0656779cb6bb118aa1d66b0 | 9c6b764bcc7244729d8ad39637de3d029f8f4b26 | refs/heads/master | 2023-02-28T00:12:27.295284 | 2021-02-01T10:49:20 | 2021-02-01T10:49:20 | 334,917,528 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 842 | py | import requests
from bs4 import BeautifulSoup
import fund_parse
url = 'https://dhlottery.co.kr/common.do?method=main'
resp = requests.get(url)
soup = BeautifulSoup(resp.text, 'html.parser')
# 7 num crawling
group = soup.select('.group .num span')
group = group[0].text
num = []
num.append(int(soup.find_all('span', {'class' : 'num al720_color1'})[0].text))
num.append(int(soup.find_all('span', {'class' : 'num al720_color2'})[0].text))
num.append(int(soup.find_all('span', {'class' : 'num al720_color3'})[0].text))
num.append(int(soup.find_all('span', {'class' : 'num al720_color4'})[0].text))
num.append(int(soup.find_all('span', {'class' : 'num al720_color5'})[0].text))
num.append(int(soup.find_all('span', {'class' : 'num al720_color6'})[0].text))
# mysql update
fund_parse.fund_update_group(group)
fund_parse.fund_update_number(num) | [
"[email protected]"
] | |
ddff08d9864dfe1076ecf400d73e63b3b20a37df | 1a663b69c47ac56c38aed5704fc403df82b48491 | /teafacto/scripts/theanowrap.py | 331e4da792e0cef33af941df2b7e907443d1db42 | [
"MIT"
] | permissive | lukovnikov/teafacto | 9c0dda1dbb1abbcff795097a3522178ad5395852 | 5e863df8d061106ad705c0837f2d2ca4e08db0e4 | refs/heads/master | 2020-04-04T05:53:56.616520 | 2017-02-08T21:03:17 | 2017-02-08T21:03:17 | 46,288,607 | 2 | 5 | null | 2016-04-13T12:25:47 | 2015-11-16T16:52:23 | Python | UTF-8 | Python | false | false | 710 | py | from teafacto.core.base import tensorops as T, Val, param
import numpy as np
import sys
x = Val(np.random.random((10,10)))
#y = Val(np.random.random((10,10)))
y = param((10, 10), name="y").uniform()
w = param((10, 10), name="w").uniform()
#z = T.dot(x, y)
z = (x + y)
u = z * w
s = T.nnet.sigmoid
s2 = T.nnet.sigmoid
print s == s2
sys.exit()
print z.allparams
print T.dot
print z.ndim
print z.dimswap
zd = z.dimswap(1,0)
print z.dimswap(0, 1).allparams
print y.dimswap(0, 1).allparams
print T.nnet.conv.conv2d
print u.norm(2).allparams
print u.dimswap(0, 1).allparams
print T.nnet.softmax(z).allparams
zs = T.nnet.sigmoid(z)
zs = zs + x
zs.autobuild()
zs.autobuild()
us = T.nnet.sigmoid(u)
print us.allparams | [
"[email protected]"
] | |
59c6f29c6c88c672ad008ad803c796881d0de0c6 | 938a089e9b5e876a3b48932274171da7a4e7aa42 | /bench/genesys2.py | 2332f797a6bd9cebe7f8ad88338e320f41377567 | [
"BSD-3-Clause",
"BSD-2-Clause"
] | permissive | rprinz08/liteeth | aa94e0eb790ba571ea59e98697d11300a57b3d03 | dc10f82753efd236e1811a72c4be2c27cefd2c68 | refs/heads/master | 2023-07-18T17:17:06.441779 | 2021-09-10T08:06:47 | 2021-09-10T08:06:47 | 260,763,015 | 0 | 0 | NOASSERTION | 2020-05-02T19:47:32 | 2020-05-02T19:47:32 | null | UTF-8 | Python | false | false | 2,680 | py | #!/usr/bin/env python3
#
# This file is part of LiteEth.
#
# Copyright (c) 2020 Florent Kermarrec <[email protected]>
# SPDX-License-Identifier: BSD-2-Clause
import os
import argparse
from migen import *
from litex_boards.platforms import genesys2
from litex_boards.targets.genesys2 import _CRG
from litex.soc.cores.clock import *
from litex.soc.interconnect.csr import *
from litex.soc.integration.soc_core import *
from litex.soc.integration.builder import *
from liteeth.phy.s7rgmii import LiteEthPHYRGMII
# Bench SoC ----------------------------------------------------------------------------------------
class BenchSoC(SoCCore):
def __init__(self, sys_clk_freq=int(50e6)):
platform = genesys2.Platform()
# SoCMini ----------------------------------------------------------------------------------
SoCMini.__init__(self, platform, clk_freq=sys_clk_freq,
ident = "LiteEth bench on Genesys2",
ident_version = True
)
# CRG --------------------------------------------------------------------------------------
self.submodules.crg = _CRG(platform, sys_clk_freq)
# Etherbone --------------------------------------------------------------------------------
self.submodules.ethphy = LiteEthPHYRGMII(
clock_pads = self.platform.request("eth_clocks"),
pads = self.platform.request("eth"),
with_hw_init_reset = False)
self.add_etherbone(phy=self.ethphy, buffer_depth=255)
# SRAM -------------------------------------------------------------------------------------
self.add_ram("sram", 0x20000000, 0x1000)
# Leds -------------------------------------------------------------------------------------
from litex.soc.cores.led import LedChaser
self.submodules.leds = LedChaser(
pads = platform.request_all("user_led"),
sys_clk_freq = sys_clk_freq)
# Main ---------------------------------------------------------------------------------------------
def main():
parser = argparse.ArgumentParser(description="LiteEth Bench on Genesys2")
parser.add_argument("--build", action="store_true", help="Build bitstream")
parser.add_argument("--load", action="store_true", help="Load bitstream")
args = parser.parse_args()
soc = BenchSoC()
builder = Builder(soc, csr_csv="csr.csv")
builder.build(run=args.build)
if args.load:
prog = soc.platform.create_programmer()
prog.load_bitstream(os.path.join(builder.gateware_dir, soc.build_name + ".bit"))
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
6387f24c6cee7a4d44c898fadc2886bc1358fc85 | cb3d1b072391b07ef0e9596df7f223f37683e970 | /[0333]_Largest_BST_Subtree/Largest_BST_Subtree.py | 20ac486fdae272035ca2cdb53f05e32e45ab550b | [] | no_license | kotori233/LeetCode | 99620255a64c898457901602de5db150bc35aabb | 996f9fcd26326db9b8f49078d9454fffb908cafe | refs/heads/master | 2021-09-10T18:00:56.968949 | 2018-03-30T14:38:27 | 2018-03-30T14:38:27 | 103,036,334 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 835 | py | # Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def largestBSTSubtree(self, root):
"""
:type root: TreeNode
:rtype: int
"""
self.res = 0
def dfs(root):
if root is None:
return (0, float('-inf'), float('inf'))
left = dfs(root.left)
right = dfs(root.right)
if root.val > left[1] and root.val < right[2]:
temp = left[0] + right[0] + 1
self.res = max(temp, self.res)
return (temp, max(root.val, right[1]), min(root.val, left[2]))
return (0, float('-inf'), float('inf'))
dfs(root)
return self.res
| [
"[email protected]"
] | |
dbc0c27afd8670f2879a4311628eb29b3134a236 | b0ba1585b2f65b2ba0ce98d4186d1d33b91f27cb | /src/containerapp/azext_containerapp/containerapp_decorator.py | 243d3be45515bfeec28eb8c29d18137123008b91 | [
"LicenseRef-scancode-generic-cla",
"MIT"
] | permissive | anagg929/azure-cli-extensions | dce128c9a5105a2c5f510081ec0f521cf5720b55 | ec02d4c83bd8d5ece829abd75b3030142c67aa3a | refs/heads/main | 2023-09-03T19:43:24.099198 | 2023-08-29T12:57:13 | 2023-08-29T12:57:13 | 299,980,394 | 0 | 0 | MIT | 2020-09-30T16:21:59 | 2020-09-30T16:21:58 | null | UTF-8 | Python | false | false | 77,574 | py | # --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# pylint: disable=line-too-long, consider-using-f-string, no-else-return, duplicate-string-formatting-argument, expression-not-assigned, too-many-locals, logging-fstring-interpolation, broad-except, pointless-statement, bare-except
from typing import Dict, Any
from urllib.parse import urlparse
from azure.cli.core.commands import AzCliCommand
import time
from azure.cli.core.azclierror import (
RequiredArgumentMissingError,
ValidationError,
ArgumentUsageError,
ResourceNotFoundError)
from azure.cli.core.commands.client_factory import get_subscription_id
from knack.log import get_logger
from knack.util import CLIError
from msrestazure.tools import parse_resource_id, is_valid_resource_id
from msrest.exceptions import DeserializationError
from .base_resource import BaseResource
from ._clients import ManagedEnvironmentClient, ConnectedEnvironmentClient, ManagedEnvironmentPreviewClient
from ._client_factory import handle_raw_exception, handle_non_404_status_code_exception
from ._models import (
Ingress as IngressModel,
Configuration as ConfigurationModel,
Template as TemplateModel,
RegistryCredentials as RegistryCredentialsModel,
ContainerApp as ContainerAppModel,
Dapr as DaprModel,
ContainerResources as ContainerResourcesModel,
Scale as ScaleModel,
Service as ServiceModel,
Container as ContainerModel,
ManagedServiceIdentity as ManagedServiceIdentityModel,
ScaleRule as ScaleRuleModel,
Volume as VolumeModel,
VolumeMount as VolumeMountModel)
from ._decorator_utils import (create_deserializer,
process_loaded_yaml,
load_yaml_file)
from ._utils import (_ensure_location_allowed,
parse_secret_flags, store_as_secret_and_return_secret_ref, parse_env_var_flags,
_convert_object_from_snake_to_camel_case,
_object_to_dict, _remove_additional_attributes,
_remove_readonly_attributes,
_infer_acr_credentials,
_ensure_identity_resource_id,
validate_container_app_name,
set_managed_identity,
create_acrpull_role_assignment, is_registry_msi_system,
safe_set, parse_metadata_flags, parse_auth_flags,
get_default_workload_profile_name_from_env,
ensure_workload_profile_supported, _generate_secret_volume_name,
parse_service_bindings, check_unique_bindings, AppType, get_linker_client,
safe_get, _update_revision_env_secretrefs, _add_or_update_tags, _populate_secret_values,
clean_null_values, _add_or_update_env_vars, _remove_env_vars, _get_existing_secrets, _get_acr_cred)
from ._validators import validate_create, validate_revision_suffix
from ._constants import (CONTAINER_APPS_RP,
HELLO_WORLD_IMAGE,
CONNECTED_ENVIRONMENT_TYPE,
CONNECTED_ENVIRONMENT_RESOURCE_TYPE,
MANAGED_ENVIRONMENT_TYPE,
MANAGED_ENVIRONMENT_RESOURCE_TYPE, ACR_IMAGE_SUFFIX)
logger = get_logger(__name__)
class BaseContainerAppDecorator(BaseResource):
def __init__(self, cmd: AzCliCommand, client: Any, raw_parameters: Dict, models: str):
super().__init__(cmd, client, raw_parameters, models)
def list(self):
containerapps = super().list()
managed_env = self.get_argument_managed_env()
if managed_env:
env_name = parse_resource_id(managed_env)["name"].lower()
if "resource_group" in parse_resource_id(managed_env):
self.get_environment_client().show(self.cmd, parse_resource_id(managed_env)["resource_group"],
parse_resource_id(managed_env)["name"])
containerapps = [c for c in containerapps if
c["properties"]["environmentId"].lower() == managed_env.lower()]
else:
containerapps = [c for c in containerapps if
parse_resource_id(c["properties"]["environmentId"])["name"].lower() == env_name]
return containerapps
def show(self):
try:
r = super().show()
if self.get_param("show_secrets"):
self.set_up_get_existing_secrets(r)
return r
except CLIError as e:
handle_raw_exception(e)
def list_secrets(self):
containerapp_def = None
try:
containerapp_def = self.client.show(cmd=self.cmd, resource_group_name=self.get_argument_resource_group_name(), name=self.get_argument_name())
except Exception as e:
handle_non_404_status_code_exception(e)
if not containerapp_def:
raise ResourceNotFoundError("The containerapp '{}' does not exist".format(self.get_argument_name()))
if not self.get_argument_show_values():
return safe_get(containerapp_def, "properties", "configuration", "secrets", default=[])
try:
return self.client.list_secrets(cmd=self.cmd, resource_group_name=self.get_argument_resource_group_name(), name=self.get_argument_name())["value"]
except Exception as e:
handle_non_404_status_code_exception(e)
def get_environment_client(self):
return ManagedEnvironmentClient
def set_up_get_existing_secrets(self, containerapp_def):
if "secrets" not in containerapp_def["properties"]["configuration"]:
containerapp_def["properties"]["configuration"]["secrets"] = []
else:
secrets = None
try:
secrets = self.client.list_secrets(cmd=self.cmd, resource_group_name=self.get_argument_resource_group_name(), name=self.get_argument_name())
except Exception as e: # pylint: disable=broad-except
handle_non_404_status_code_exception(e)
containerapp_def["properties"]["configuration"]["secrets"] = secrets["value"]
safe_set(containerapp_def, "properties", "configuration", "secrets", value=secrets["value"])
def get_param(self, key) -> Any:
return self.raw_param.get(key)
def set_param(self, key, value):
self.raw_param[key] = value
def get_argument_name(self):
return self.get_param("name")
def get_argument_resource_group_name(self):
return self.get_param("resource_group_name")
def get_argument_no_wait(self):
return self.get_param("no_wait")
def get_argument_yaml(self):
return self.get_param("yaml")
def get_argument_image(self):
return self.get_param("image")
def set_argument_image(self, image):
self.set_param("image", image)
def get_argument_container_name(self):
return self.get_param("container_name")
def set_argument_container_name(self, container_name):
self.set_param("container_name", container_name)
def get_argument_managed_env(self):
return self.get_param("managed_env")
def set_argument_managed_env(self, managed_env):
self.set_param("managed_env", managed_env)
def get_argument_min_replicas(self):
return self.get_param("min_replicas")
def get_argument_max_replicas(self):
return self.get_param("max_replicas")
def get_argument_scale_rule_name(self):
return self.get_param("scale_rule_name")
def get_argument_scale_rule_type(self):
return self.get_param("scale_rule_type")
def set_argument_scale_rule_type(self, scale_rule_type):
self.set_param("scale_rule_type", scale_rule_type)
def get_argument_scale_rule_http_concurrency(self):
return self.get_param("scale_rule_http_concurrency")
def get_argument_scale_rule_metadata(self):
return self.get_param("scale_rule_metadata")
def get_argument_scale_rule_auth(self):
return self.get_param("scale_rule_auth")
def get_argument_target_port(self):
return self.get_param("target_port")
def get_argument_exposed_port(self):
return self.get_param("exposed_port")
def get_argument_transport(self):
return self.get_param("transport")
def get_argument_ingress(self):
return self.get_param("ingress")
def get_argument_allow_insecure(self):
return self.get_param("allow_insecure")
def get_argument_revisions_mode(self):
return self.get_param("revisions_mode")
def get_argument_secrets(self):
return self.get_param("secrets")
def get_argument_env_vars(self):
return self.get_param("env_vars")
def get_argument_cpu(self):
return self.get_param("cpu")
def get_argument_memory(self):
return self.get_param("memory")
def get_argument_registry_server(self):
return self.get_param("registry_server")
def get_argument_registry_user(self):
return self.get_param("registry_user")
def set_argument_registry_user(self, registry_user):
self.set_param("registry_user", registry_user)
def get_argument_registry_pass(self):
return self.get_param("registry_pass")
def set_argument_registry_pass(self, registry_pass):
self.set_param("registry_pass", registry_pass)
def get_argument_dapr_enabled(self):
return self.get_param("dapr_enabled")
def get_argument_dapr_app_port(self):
return self.get_param("dapr_app_port")
def get_argument_dapr_app_id(self):
return self.get_param("dapr_app_id")
def get_argument_dapr_app_protocol(self):
return self.get_param("dapr_app_protocol")
def get_argument_dapr_http_read_buffer_size(self):
return self.get_param("dapr_http_read_buffer_size")
def get_argument_dapr_http_max_request_size(self):
return self.get_param("dapr_http_max_request_size")
def get_argument_dapr_log_level(self):
return self.get_param("dapr_log_level")
def get_argument_dapr_enable_api_logging(self):
return self.get_param("dapr_enable_api_logging")
def get_argument_service_type(self):
return self.get_param("service_type")
def get_argument_service_bindings(self):
return self.get_param("service_bindings")
def get_argument_revision_suffix(self):
return self.get_param("revision_suffix")
def get_argument_startup_command(self):
return self.get_param("startup_command")
def get_argument_args(self):
return self.get_param("args")
def get_argument_tags(self):
return self.get_param("tags")
def get_argument_system_assigned(self):
return self.get_param("system_assigned")
def get_argument_disable_warnings(self):
return self.get_param("disable_warnings")
def get_argument_user_assigned(self):
return self.get_param("user_assigned")
def get_argument_registry_identity(self):
return self.get_param("registry_identity")
def get_argument_workload_profile_name(self):
return self.get_param("workload_profile_name")
def set_argument_workload_profile_name(self, workload_profile_name):
self.set_param("workload_profile_name", workload_profile_name)
def get_argument_secret_volume_mount(self):
return self.get_param("secret_volume_mount")
def get_argument_service_connectors_def_list(self):
return self.get_param("service_connectors_def_list")
def set_argument_service_connectors_def_list(self, service_connectors_def_list):
self.set_param("service_connectors_def_list", service_connectors_def_list)
def get_argument_termination_grace_period(self):
return self.get_param("termination_grace_period")
def get_argument_show_values(self):
return self.get_param("show_values")
def get_argument_set_env_vars(self):
return self.get_param("set_env_vars")
def get_argument_remove_env_vars(self):
return self.get_param("remove_env_vars")
def get_argument_replace_env_vars(self):
return self.get_param("replace_env_vars")
def get_argument_remove_all_env_vars(self):
return self.get_param("remove_all_env_vars")
def get_argument_from_revision(self):
return self.get_param("from_revision")
def get_argument_unbind_service_bindings(self):
return self.get_param("unbind_service_bindings")
class ContainerAppCreateDecorator(BaseContainerAppDecorator):
def __init__(
self, cmd: AzCliCommand, client: Any, raw_parameters: Dict, models: str
):
super().__init__(cmd, client, raw_parameters, models)
self.containerapp_def = ContainerAppModel
def validate_arguments(self):
validate_container_app_name(self.get_argument_name(), AppType.ContainerApp.name)
validate_create(self.get_argument_registry_identity(), self.get_argument_registry_pass(), self.get_argument_registry_user(), self.get_argument_registry_server(), self.get_argument_no_wait())
validate_revision_suffix(self.get_argument_revision_suffix())
def construct_payload(self):
if self.get_argument_registry_identity() and not is_registry_msi_system(self.get_argument_registry_identity()):
logger.info("Creating an acrpull role assignment for the registry identity")
create_acrpull_role_assignment(self.cmd, self.get_argument_registry_server(), self.get_argument_registry_identity(), skip_error=True)
if self.get_argument_yaml():
return self.set_up_create_containerapp_yaml(name=self.get_argument_name(), file_name=self.get_argument_yaml())
if not self.get_argument_image():
self.set_argument_image(HELLO_WORLD_IMAGE)
if self.get_argument_managed_env() is None:
raise RequiredArgumentMissingError('Usage error: --environment is required if not using --yaml')
# Validate managed environment
parsed_managed_env = parse_resource_id(self.get_argument_managed_env())
managed_env_name = parsed_managed_env['name']
managed_env_rg = parsed_managed_env['resource_group']
managed_env_info = None
try:
managed_env_info = self.get_environment_client().show(cmd=self.cmd, resource_group_name=managed_env_rg, name=managed_env_name)
except Exception as e:
handle_non_404_status_code_exception(e)
if not managed_env_info:
raise ValidationError("The environment '{}' does not exist. Specify a valid environment".format(self.get_argument_managed_env()))
while not self.get_argument_no_wait() and safe_get(managed_env_info, "properties", "provisioningState", default="").lower() in ["inprogress", "updating"]:
logger.info("Waiting for environment provisioning to finish before creating container app")
time.sleep(5)
managed_env_info = self.get_environment_client().show(cmd=self.cmd, resource_group_name=managed_env_rg, name=managed_env_name)
location = managed_env_info["location"]
_ensure_location_allowed(self.cmd, location, CONTAINER_APPS_RP, "containerApps")
if not self.get_argument_workload_profile_name() and "workloadProfiles" in managed_env_info:
workload_profile_name = get_default_workload_profile_name_from_env(self.cmd, managed_env_info, managed_env_rg)
self.set_argument_workload_profile_name(workload_profile_name)
external_ingress = None
if self.get_argument_ingress() is not None:
if self.get_argument_ingress().lower() == "internal":
external_ingress = False
elif self.get_argument_ingress().lower() == "external":
external_ingress = True
ingress_def = None
if self.get_argument_target_port() is not None and self.get_argument_ingress() is not None:
ingress_def = IngressModel
ingress_def["external"] = external_ingress
ingress_def["targetPort"] = self.get_argument_target_port()
ingress_def["transport"] = self.get_argument_transport()
ingress_def["exposedPort"] = self.get_argument_exposed_port() if self.get_argument_transport() == "tcp" else None
ingress_def["allowInsecure"] = self.get_argument_allow_insecure()
secrets_def = None
if self.get_argument_secrets() is not None:
secrets_def = parse_secret_flags(self.get_argument_secrets())
registries_def = None
if self.get_argument_registry_server() is not None and not is_registry_msi_system(self.get_argument_registry_identity()):
registries_def = RegistryCredentialsModel
registries_def["server"] = self.get_argument_registry_server()
# Infer credentials if not supplied and its azurecr
if (self.get_argument_registry_user() is None or self.get_argument_registry_pass() is None) and self.get_argument_registry_identity() is None:
registry_user, registry_pass = _infer_acr_credentials(self.cmd, self.get_argument_registry_server(), self.get_argument_disable_warnings())
self.set_argument_registry_user(registry_user)
self.set_argument_registry_pass(registry_pass)
if not self.get_argument_registry_identity():
registries_def["username"] = self.get_argument_registry_user()
if secrets_def is None:
secrets_def = []
registries_def["passwordSecretRef"] = store_as_secret_and_return_secret_ref(secrets_def, self.get_argument_registry_user(),
self.get_argument_registry_server(),
self.get_argument_registry_pass(),
disable_warnings=self.get_argument_disable_warnings())
else:
registries_def["identity"] = self.get_argument_registry_identity()
dapr_def = None
if self.get_argument_dapr_enabled():
dapr_def = DaprModel
dapr_def["enabled"] = True
dapr_def["appId"] = self.get_argument_dapr_app_id()
dapr_def["appPort"] = self.get_argument_dapr_app_port()
dapr_def["appProtocol"] = self.get_argument_dapr_app_protocol()
dapr_def["httpReadBufferSize"] = self.get_argument_dapr_http_read_buffer_size()
dapr_def["httpMaxRequestSize"] = self.get_argument_dapr_http_max_request_size()
dapr_def["logLevel"] = self.get_argument_dapr_log_level()
dapr_def["enableApiLogging"] = self.get_argument_dapr_enable_api_logging()
service_def = None
if self.get_argument_service_type():
service_def = ServiceModel
service_def["type"] = self.get_argument_service_type()
config_def = ConfigurationModel
config_def["secrets"] = secrets_def
config_def["activeRevisionsMode"] = self.get_argument_revisions_mode()
config_def["ingress"] = ingress_def
config_def["registries"] = [registries_def] if registries_def is not None else None
config_def["dapr"] = dapr_def
config_def["service"] = service_def if service_def is not None else None
# Identity actions
identity_def = ManagedServiceIdentityModel
identity_def["type"] = "None"
assign_system_identity = self.get_argument_system_assigned()
if self.get_argument_user_assigned():
assign_user_identities = [x.lower() for x in self.get_argument_user_assigned()]
else:
assign_user_identities = []
if assign_system_identity and assign_user_identities:
identity_def["type"] = "SystemAssigned, UserAssigned"
elif assign_system_identity:
identity_def["type"] = "SystemAssigned"
elif assign_user_identities:
identity_def["type"] = "UserAssigned"
if assign_user_identities:
identity_def["userAssignedIdentities"] = {}
subscription_id = get_subscription_id(self.cmd.cli_ctx)
for r in assign_user_identities:
r = _ensure_identity_resource_id(subscription_id, self.get_argument_resource_group_name(), r)
identity_def["userAssignedIdentities"][r] = {} # pylint: disable=unsupported-assignment-operation
scale_def = self.set_up_scale_rule()
resources_def = None
if self.get_argument_cpu() is not None or self.get_argument_memory() is not None:
resources_def = ContainerResourcesModel
resources_def["cpu"] = self.get_argument_cpu()
resources_def["memory"] = self.get_argument_memory()
container_def = ContainerModel
container_def["name"] = self.get_argument_container_name() if self.get_argument_container_name() else self.get_argument_name()
container_def["image"] = self.get_argument_image() if not is_registry_msi_system(self.get_argument_registry_identity()) else HELLO_WORLD_IMAGE
if self.get_argument_env_vars() is not None:
container_def["env"] = parse_env_var_flags(self.get_argument_env_vars())
if self.get_argument_startup_command() is not None:
container_def["command"] = self.get_argument_startup_command()
if self.get_argument_args() is not None:
container_def["args"] = self.get_argument_args()
if resources_def is not None:
container_def["resources"] = resources_def
template_def = TemplateModel
template_def["containers"] = [container_def]
template_def["scale"] = scale_def
if self.get_argument_secret_volume_mount() is not None:
volume_def = VolumeModel
volume_mount_def = VolumeMountModel
# generate a volume name
volume_def["name"] = _generate_secret_volume_name()
volume_def["storageType"] = "Secret"
# mount the volume to the container
volume_mount_def["volumeName"] = volume_def["name"]
volume_mount_def["mountPath"] = self.get_argument_secret_volume_mount()
container_def["volumeMounts"] = [volume_mount_def]
template_def["volumes"] = [volume_def]
if self.get_argument_revision_suffix() is not None and not is_registry_msi_system(self.get_argument_registry_identity()):
template_def["revisionSuffix"] = self.get_argument_revision_suffix()
if self.get_argument_termination_grace_period() is not None:
template_def["terminationGracePeriodSeconds"] = self.get_argument_termination_grace_period()
self.containerapp_def["location"] = location
self.containerapp_def["identity"] = identity_def
self.containerapp_def["properties"]["environmentId"] = self.get_argument_managed_env()
self.containerapp_def["properties"]["configuration"] = config_def
self.containerapp_def["properties"]["template"] = template_def
self.containerapp_def["tags"] = self.get_argument_tags()
if self.get_argument_workload_profile_name():
self.containerapp_def["properties"]["workloadProfileName"] = self.get_argument_workload_profile_name()
ensure_workload_profile_supported(self.cmd, managed_env_name, managed_env_rg, self.get_argument_workload_profile_name(),
managed_env_info)
if self.get_argument_registry_identity():
if is_registry_msi_system(self.get_argument_registry_identity()):
set_managed_identity(self.cmd, self.get_argument_resource_group_name(), self.containerapp_def, system_assigned=True)
else:
set_managed_identity(self.cmd, self.get_argument_resource_group_name(), self.containerapp_def, user_assigned=[self.get_argument_registry_identity()])
def create(self):
try:
r = self.client.create_or_update(
cmd=self.cmd, resource_group_name=self.get_argument_resource_group_name(), name=self.get_argument_name(), container_app_envelope=self.containerapp_def,
no_wait=self.get_argument_no_wait())
return r
except Exception as e:
handle_raw_exception(e)
def construct_for_post_process(self, r):
if is_registry_msi_system(self.get_argument_registry_identity()):
while r["properties"]["provisioningState"] == "InProgress":
r = self.client.show(self.cmd, self.get_argument_resource_group_name(), self.get_argument_name())
time.sleep(10)
logger.info("Creating an acrpull role assignment for the system identity")
system_sp = r["identity"]["principalId"]
create_acrpull_role_assignment(self.cmd, self.get_argument_registry_server(), registry_identity=None, service_principal=system_sp)
containers_def = safe_get(self.containerapp_def, "properties", "template", "containers")
containers_def[0]["image"] = self.get_argument_image()
safe_set(self.containerapp_def, "properties", "template", "revisionSuffix", value=self.get_argument_revision_suffix())
registries_def = RegistryCredentialsModel
registries_def["server"] = self.get_argument_registry_server()
registries_def["identity"] = self.get_argument_registry_identity()
safe_set(self.containerapp_def, "properties", "configuration", "registries", value=[registries_def])
def post_process(self, r):
if is_registry_msi_system(self.get_argument_registry_identity()):
r = self.create()
if "properties" in r and "provisioningState" in r["properties"] and r["properties"]["provisioningState"].lower() == "waiting" and not self.get_argument_no_wait():
not self.get_argument_disable_warnings() and logger.warning('Containerapp creation in progress. Please monitor the creation using `az containerapp show -n {} -g {}`'.format(self.get_argument_name(), self.get_argument_resource_group_name()))
if "configuration" in r["properties"] and "ingress" in r["properties"]["configuration"] and r["properties"]["configuration"]["ingress"] and "fqdn" in r["properties"]["configuration"]["ingress"]:
not self.get_argument_disable_warnings() and logger.warning("\nContainer app created. Access your app at https://{}/\n".format(r["properties"]["configuration"]["ingress"]["fqdn"]))
else:
target_port = self.get_argument_target_port() or "<port>"
not self.get_argument_disable_warnings() and logger.warning("\nContainer app created. To access it over HTTPS, enable ingress: "
"az containerapp ingress enable -n %s -g %s --type external --target-port %s"
" --transport auto\n", self.get_argument_name(), self.get_argument_resource_group_name(), target_port)
return r
def set_up_create_containerapp_yaml(self, name, file_name):
if self.get_argument_image() or self.get_argument_min_replicas() or self.get_argument_max_replicas() or self.get_argument_target_port() or self.get_argument_ingress() or \
self.get_argument_revisions_mode() or self.get_argument_secrets() or self.get_argument_env_vars() or self.get_argument_cpu() or self.get_argument_memory() or self.get_argument_registry_server() or \
self.get_argument_registry_user() or self.get_argument_registry_pass() or self.get_argument_dapr_enabled() or self.get_argument_dapr_app_port() or self.get_argument_dapr_app_id() or \
self.get_argument_startup_command() or self.get_argument_args() or self.get_argument_tags():
not self.get_argument_disable_warnings() and logger.warning(
'Additional flags were passed along with --yaml. These flags will be ignored, and the configuration defined in the yaml will be used instead')
yaml_containerapp = process_loaded_yaml(load_yaml_file(file_name))
if type(yaml_containerapp) != dict: # pylint: disable=unidiomatic-typecheck
raise ValidationError(
'Invalid YAML provided. Please see https://aka.ms/azure-container-apps-yaml for a valid containerapps YAML spec.')
if not yaml_containerapp.get('name'):
yaml_containerapp['name'] = name
elif yaml_containerapp.get('name').lower() != name.lower():
logger.warning(
'The app name provided in the --yaml file "{}" does not match the one provided in the --name flag "{}". The one provided in the --yaml file will be used.'.format(
yaml_containerapp.get('name'), name))
name = yaml_containerapp.get('name')
if not yaml_containerapp.get('type'):
yaml_containerapp['type'] = 'Microsoft.App/containerApps'
elif yaml_containerapp.get('type').lower() != "microsoft.app/containerapps":
raise ValidationError('Containerapp type must be \"Microsoft.App/ContainerApps\"')
# Deserialize the yaml into a ContainerApp object. Need this since we're not using SDK
try:
deserializer = create_deserializer(self.models)
self.containerapp_def = deserializer('ContainerApp', yaml_containerapp)
except DeserializationError as ex:
raise ValidationError(
'Invalid YAML provided. Please see https://aka.ms/azure-container-apps-yaml for a valid containerapps YAML spec.') from ex
# Remove tags before converting from snake case to camel case, then re-add tags. We don't want to change the case of the tags. Need this since we're not using SDK
tags = None
if yaml_containerapp.get('tags'):
tags = yaml_containerapp.get('tags')
del yaml_containerapp['tags']
self.containerapp_def = _convert_object_from_snake_to_camel_case(_object_to_dict(self.containerapp_def))
self.containerapp_def['tags'] = tags
# After deserializing, some properties may need to be moved under the "properties" attribute. Need this since we're not using SDK
self.containerapp_def = process_loaded_yaml(self.containerapp_def)
# Remove "additionalProperties" and read-only attributes that are introduced in the deserialization. Need this since we're not using SDK
_remove_additional_attributes(self.containerapp_def)
_remove_readonly_attributes(self.containerapp_def)
# Remove extra workloadProfileName introduced in deserialization
if "workloadProfileName" in self.containerapp_def:
del self.containerapp_def["workloadProfileName"]
# Validate managed environment
env_id = self.containerapp_def["properties"]['environmentId']
env_info = None
if self.get_argument_managed_env():
if not self.get_argument_disable_warnings() and env_id is not None and env_id != self.get_argument_managed_env():
logger.warning('The environmentId was passed along with --yaml. The value entered with --environment will be ignored, and the configuration defined in the yaml will be used instead')
if env_id is None:
env_id = self.get_argument_managed_env()
safe_set(self.containerapp_def, "properties", "environmentId", value=env_id)
if not self.containerapp_def["properties"].get('environmentId'):
raise RequiredArgumentMissingError(
'environmentId is required. This can be retrieved using the `az containerapp env show -g MyResourceGroup -n MyContainerappEnvironment --query id` command. Please see https://aka.ms/azure-container-apps-yaml for a valid containerapps YAML spec.')
if is_valid_resource_id(env_id):
parsed_managed_env = parse_resource_id(env_id)
env_name = parsed_managed_env['name']
env_rg = parsed_managed_env['resource_group']
else:
raise ValidationError('Invalid environmentId specified. Environment not found')
try:
env_info = self.get_environment_client().show(cmd=self.cmd, resource_group_name=env_rg, name=env_name)
except Exception as e:
handle_non_404_status_code_exception(e)
if not env_info:
raise ValidationError("The environment '{}' in resource group '{}' was not found".format(env_name, env_rg))
# Validate location
if not self.containerapp_def.get('location'):
self.containerapp_def['location'] = env_info['location']
def set_up_scale_rule(self):
scale_def = None
if self.get_argument_min_replicas() is not None or self.get_argument_max_replicas() is not None:
scale_def = ScaleModel
scale_def["minReplicas"] = self.get_argument_min_replicas()
scale_def["maxReplicas"] = self.get_argument_max_replicas()
scale_rule_type = self.get_argument_scale_rule_type()
scale_rule_name = self.get_argument_scale_rule_name()
scale_rule_auth = self.get_argument_scale_rule_auth()
scale_rule_metadata = self.get_argument_scale_rule_metadata()
scale_rule_http_concurrency = self.get_argument_scale_rule_http_concurrency()
if self.get_argument_scale_rule_name():
if not scale_rule_type:
scale_rule_type = "http"
scale_rule_type = scale_rule_type.lower()
scale_rule_def = ScaleRuleModel
curr_metadata = {}
if self.get_argument_scale_rule_http_concurrency():
if scale_rule_type in ('http', 'tcp'):
curr_metadata["concurrentRequests"] = str(scale_rule_http_concurrency)
metadata_def = parse_metadata_flags(scale_rule_metadata, curr_metadata)
auth_def = parse_auth_flags(scale_rule_auth)
if scale_rule_type == "http":
scale_rule_def["name"] = scale_rule_name
scale_rule_def["custom"] = None
scale_rule_def["http"] = {}
scale_rule_def["http"]["metadata"] = metadata_def
scale_rule_def["http"]["auth"] = auth_def
else:
scale_rule_def["name"] = scale_rule_name
scale_rule_def["http"] = None
scale_rule_def["custom"] = {}
scale_rule_def["custom"]["type"] = scale_rule_type
scale_rule_def["custom"]["metadata"] = metadata_def
scale_rule_def["custom"]["auth"] = auth_def
if not scale_def:
scale_def = ScaleModel
scale_def["rules"] = [scale_rule_def]
return scale_def
class ContainerAppUpdateDecorator(BaseContainerAppDecorator):
def __init__(
self, cmd: AzCliCommand, client: Any, raw_parameters: Dict, models: str
):
super().__init__(cmd, client, raw_parameters, models)
self.containerapp_def = {}
self.new_containerapp = {}
def validate_arguments(self):
validate_revision_suffix(self.get_argument_revision_suffix())
# Validate that max_replicas is set to 0-1000
if self.get_argument_max_replicas() is not None:
if self.get_argument_max_replicas() < 1 or self.get_argument_max_replicas() > 1000:
raise ArgumentUsageError('--max-replicas must be in the range [1,1000]')
def update(self):
try:
r = self.client.update(
cmd=self.cmd, resource_group_name=self.get_argument_resource_group_name(), name=self.get_argument_name(), container_app_envelope=self.new_containerapp,
no_wait=self.get_argument_no_wait())
if not self.get_argument_no_wait() and "properties" in r and "provisioningState" in r["properties"] and r["properties"]["provisioningState"].lower() == "waiting":
logger.warning('Containerapp update in progress. Please monitor the update using `az containerapp show -n {} -g {}`'.format(self.get_argument_name(), self.get_argument_resource_group_name()))
return r
except Exception as e:
handle_raw_exception(e)
def set_up_from_revision(self):
if self.get_argument_from_revision():
r = None
try:
r = self.client.show_revision(cmd=self.cmd, resource_group_name=self.get_argument_resource_group_name(), container_app_name=self.get_argument_name(), name=self.get_argument_from_revision())
except CLIError as e:
handle_non_404_status_code_exception(e)
_update_revision_env_secretrefs(r["properties"]["template"]["containers"], self.get_argument_name())
safe_set(self.new_containerapp, "properties", "template", value=r["properties"]["template"])
def _need_update_container(self):
return self.get_argument_image() or self.get_argument_container_name() or self.get_argument_set_env_vars() is not None or self.get_argument_remove_env_vars() is not None or self.get_argument_replace_env_vars() is not None or self.get_argument_remove_all_env_vars() or self.get_argument_cpu() or self.get_argument_memory() or self.get_argument_startup_command() is not None or self.get_argument_args() is not None or self.get_argument_secret_volume_mount() is not None
def construct_payload(self):
# construct from yaml
if self.get_argument_yaml():
return self.set_up_update_containerapp_yaml(name=self.get_argument_name(), file_name=self.get_argument_yaml())
self.containerapp_def = None
try:
self.containerapp_def = self.client.show(cmd=self.cmd, resource_group_name=self.get_argument_resource_group_name(), name=self.get_argument_name())
except Exception as e:
handle_non_404_status_code_exception(e)
if not self.containerapp_def:
raise ResourceNotFoundError("The containerapp '{}' does not exist".format(self.get_argument_name()))
self.new_containerapp["properties"] = {}
self.set_up_from_revision()
# Doing this while API has bug. If env var is an empty string, API doesn't return "value" even though the "value" should be an empty string
for container in safe_get(self.containerapp_def, "properties", "template", "containers", default=[]):
if "env" in container:
for e in container["env"]:
if "value" not in e:
e["value"] = ""
update_map = {}
update_map['scale'] = self.get_argument_min_replicas() or self.get_argument_max_replicas() or self.get_argument_scale_rule_name()
update_map['container'] = self._need_update_container()
update_map['ingress'] = self.get_argument_ingress() or self.get_argument_target_port()
update_map['registry'] = self.get_argument_registry_server() or self.get_argument_registry_user() or self.get_argument_registry_pass()
if self.get_argument_tags():
_add_or_update_tags(self.new_containerapp, self.get_argument_tags())
if self.get_argument_revision_suffix() is not None:
self.new_containerapp["properties"]["template"] = {} if "template" not in self.new_containerapp["properties"] else self.new_containerapp["properties"]["template"]
self.new_containerapp["properties"]["template"]["revisionSuffix"] = self.get_argument_revision_suffix()
if self.get_argument_termination_grace_period() is not None:
safe_set(self.new_containerapp, "properties", "template", "terminationGracePeriodSeconds",
value=self.get_argument_termination_grace_period())
if self.get_argument_workload_profile_name():
self.new_containerapp["properties"]["workloadProfileName"] = self.get_argument_workload_profile_name()
parsed_managed_env = parse_resource_id(self.containerapp_def["properties"]["environmentId"])
managed_env_name = parsed_managed_env['name']
managed_env_rg = parsed_managed_env['resource_group']
managed_env_info = None
try:
managed_env_info = self.get_environment_client().show(cmd=self.cmd, resource_group_name=managed_env_rg, name=managed_env_name)
except Exception as e:
handle_non_404_status_code_exception(e)
if not managed_env_info:
raise ValidationError(
"Error parsing the managed environment '{}' from the specified containerapp".format(
managed_env_name))
ensure_workload_profile_supported(self.cmd, managed_env_name, managed_env_rg, self.get_argument_workload_profile_name(),
managed_env_info)
# Containers
if update_map["container"]:
self.new_containerapp["properties"]["template"] = {} if "template" not in self.new_containerapp["properties"] else self.new_containerapp["properties"]["template"]
self.new_containerapp["properties"]["template"]["containers"] = self.containerapp_def["properties"]["template"]["containers"]
if not self.get_argument_container_name():
if len(self.new_containerapp["properties"]["template"]["containers"]) == 1:
container_name = self.new_containerapp["properties"]["template"]["containers"][0]["name"]
self.set_argument_container_name(container_name)
else:
raise ValidationError(
"Usage error: --container-name is required when adding or updating a container")
# Check if updating existing container
updating_existing_container = False
for c in self.new_containerapp["properties"]["template"]["containers"]:
if c["name"].lower() == self.get_argument_container_name().lower():
updating_existing_container = True
if self.get_argument_image() is not None:
c["image"] = self.get_argument_image()
if self.get_argument_set_env_vars() is not None:
if "env" not in c or not c["env"]:
c["env"] = []
# env vars
_add_or_update_env_vars(c["env"], parse_env_var_flags(self.get_argument_set_env_vars()))
if self.get_argument_replace_env_vars() is not None:
# Remove other existing env_vars, then add them
c["env"] = []
_add_or_update_env_vars(c["env"], parse_env_var_flags(self.get_argument_replace_env_vars()))
if self.get_argument_remove_env_vars() is not None:
if "env" not in c or not c["env"]:
c["env"] = []
# env vars
_remove_env_vars(c["env"], self.get_argument_remove_env_vars())
if self.get_argument_remove_all_env_vars():
c["env"] = []
if self.get_argument_startup_command() is not None:
if isinstance(self.get_argument_startup_command(), list) and not self.get_argument_startup_command():
c["command"] = None
else:
c["command"] = self.get_argument_startup_command()
if self.get_argument_args() is not None:
if isinstance(self.get_argument_args(), list) and not self.get_argument_args():
c["args"] = None
else:
c["args"] = self.get_argument_args()
if self.get_argument_cpu() is not None or self.get_argument_memory() is not None:
if "resources" in c and c["resources"]:
if self.get_argument_cpu() is not None:
c["resources"]["cpu"] = self.get_argument_cpu()
if self.get_argument_memory() is not None:
c["resources"]["memory"] = self.get_argument_memory()
else:
c["resources"] = {
"cpu": self.get_argument_cpu(),
"memory": self.get_argument_memory()
}
if self.get_argument_secret_volume_mount() is not None:
self.new_containerapp["properties"]["template"]["volumes"] = self.containerapp_def["properties"]["template"]["volumes"]
if "volumeMounts" not in c or not c["volumeMounts"]:
# if no volume mount exists, create a new volume and then mount
volume_def = VolumeModel
volume_mount_def = VolumeMountModel
volume_def["name"] = _generate_secret_volume_name()
volume_def["storageType"] = "Secret"
volume_mount_def["volumeName"] = volume_def["name"]
volume_mount_def["mountPath"] = self.get_argument_secret_volume_mount()
if "volumes" not in self.new_containerapp["properties"]["template"]:
self.new_containerapp["properties"]["template"]["volumes"] = [volume_def]
else:
self.new_containerapp["properties"]["template"]["volumes"].append(volume_def)
c["volumeMounts"] = volume_mount_def
else:
if len(c["volumeMounts"]) > 1:
raise ValidationError(
"Usage error: --secret-volume-mount can only be used with a container that has a single volume mount, to define multiple volumes and mounts please use --yaml")
else:
# check that the only volume is of type secret
volume_name = c["volumeMounts"][0]["volumeName"]
for v in self.new_containerapp["properties"]["template"]["volumes"]:
if v["name"].lower() == volume_name.lower():
if v["storageType"] != "Secret":
raise ValidationError(
"Usage error: --secret-volume-mount can only be used to update volume mounts with volumes of type secret. To update other types of volumes please use --yaml")
break
c["volumeMounts"][0]["mountPath"] = self.get_argument_secret_volume_mount()
# If not updating existing container, add as new container
if not updating_existing_container:
if self.get_argument_image() is None:
raise ValidationError("Usage error: --image is required when adding a new container")
resources_def = None
if self.get_argument_cpu() is not None or self.get_argument_memory() is not None:
resources_def = ContainerResourcesModel
resources_def["cpu"] = self.get_argument_cpu()
resources_def["memory"] = self.get_argument_memory()
container_def = ContainerModel
container_def["name"] = self.get_argument_container_name()
container_def["image"] = self.get_argument_image()
container_def["env"] = []
if self.get_argument_set_env_vars() is not None:
# env vars
_add_or_update_env_vars(container_def["env"], parse_env_var_flags(self.get_argument_set_env_vars()))
if self.get_argument_replace_env_vars() is not None:
# env vars
_add_or_update_env_vars(container_def["env"], parse_env_var_flags(self.get_argument_replace_env_vars()))
if self.get_argument_remove_env_vars() is not None:
# env vars
_remove_env_vars(container_def["env"], self.get_argument_remove_env_vars())
if self.get_argument_remove_all_env_vars():
container_def["env"] = []
if self.get_argument_startup_command() is not None:
if isinstance(self.get_argument_startup_command(), list) and not self.get_argument_startup_command():
container_def["command"] = None
else:
container_def["command"] = self.get_argument_startup_command()
if self.get_argument_args() is not None:
if isinstance(self.get_argument_args(), list) and not self.get_argument_args():
container_def["args"] = None
else:
container_def["args"] = self.get_argument_args()
if resources_def is not None:
container_def["resources"] = resources_def
if self.get_argument_secret_volume_mount() is not None:
self.new_containerapp["properties"]["template"]["volumes"] = self.containerapp_def["properties"]["template"]["volumes"]
# generate a new volume name
volume_def = VolumeModel
volume_mount_def = VolumeMountModel
volume_def["name"] = _generate_secret_volume_name()
volume_def["storageType"] = "Secret"
# mount the volume to the container
volume_mount_def["volumeName"] = volume_def["name"]
volume_mount_def["mountPath"] = self.get_argument_secret_volume_mount()
container_def["volumeMounts"] = [volume_mount_def]
if "volumes" not in self.new_containerapp["properties"]["template"]:
self.new_containerapp["properties"]["template"]["volumes"] = [volume_def]
else:
self.new_containerapp["properties"]["template"]["volumes"].append(volume_def)
self.new_containerapp["properties"]["template"]["containers"].append(container_def)
# Scale
if update_map["scale"]:
self.new_containerapp["properties"]["template"] = {} if "template" not in self.new_containerapp["properties"] else self.new_containerapp["properties"]["template"]
if "scale" not in self.new_containerapp["properties"]["template"]:
self.new_containerapp["properties"]["template"]["scale"] = {}
if self.get_argument_min_replicas() is not None:
self.new_containerapp["properties"]["template"]["scale"]["minReplicas"] = self.get_argument_min_replicas()
if self.get_argument_max_replicas() is not None:
self.new_containerapp["properties"]["template"]["scale"]["maxReplicas"] = self.get_argument_max_replicas()
scale_def = None
if self.get_argument_min_replicas() is not None or self.get_argument_max_replicas() is not None:
scale_def = ScaleModel
scale_def["minReplicas"] = self.get_argument_min_replicas()
scale_def["maxReplicas"] = self.get_argument_max_replicas()
# so we don't overwrite rules
if safe_get(self.new_containerapp, "properties", "template", "scale", "rules"):
self.new_containerapp["properties"]["template"]["scale"].pop(["rules"])
scale_rule_type = self.get_argument_scale_rule_type()
if self.get_argument_scale_rule_name():
if not scale_rule_type:
scale_rule_type = "http"
scale_rule_type = scale_rule_type.lower()
scale_rule_def = ScaleRuleModel
curr_metadata = {}
if self.get_argument_scale_rule_http_concurrency():
if scale_rule_type in ('http', 'tcp'):
curr_metadata["concurrentRequests"] = str(self.get_argument_scale_rule_http_concurrency())
metadata_def = parse_metadata_flags(self.get_argument_scale_rule_metadata(), curr_metadata)
auth_def = parse_auth_flags(self.get_argument_scale_rule_auth())
if scale_rule_type == "http":
scale_rule_def["name"] = self.get_argument_scale_rule_name()
scale_rule_def["custom"] = None
scale_rule_def["http"] = {}
scale_rule_def["http"]["metadata"] = metadata_def
scale_rule_def["http"]["auth"] = auth_def
else:
scale_rule_def["name"] = self.get_argument_scale_rule_name()
scale_rule_def["http"] = None
scale_rule_def["custom"] = {}
scale_rule_def["custom"]["type"] = scale_rule_type
scale_rule_def["custom"]["metadata"] = metadata_def
scale_rule_def["custom"]["auth"] = auth_def
if not scale_def:
scale_def = ScaleModel
scale_def["rules"] = [scale_rule_def]
self.new_containerapp["properties"]["template"]["scale"]["rules"] = scale_def["rules"]
# Ingress
if update_map["ingress"]:
self.new_containerapp["properties"]["configuration"] = {} if "configuration" not in self.new_containerapp[
"properties"] else self.new_containerapp["properties"]["configuration"]
if self.get_argument_target_port() is not None or self.get_argument_ingress() is not None:
self.new_containerapp["properties"]["configuration"]["ingress"] = {}
if self.get_argument_ingress():
self.new_containerapp["properties"]["configuration"]["ingress"][
"external"] = self.get_argument_ingress().lower() == "external"
if self.get_argument_target_port():
self.new_containerapp["properties"]["configuration"]["ingress"]["targetPort"] = self.get_argument_target_port()
# Registry
if update_map["registry"]:
self.new_containerapp["properties"]["configuration"] = {} if "configuration" not in self.new_containerapp[
"properties"] else self.new_containerapp["properties"]["configuration"]
if "registries" in self.containerapp_def["properties"]["configuration"]:
self.new_containerapp["properties"]["configuration"]["registries"] = self.containerapp_def["properties"]["configuration"]["registries"]
if "registries" not in self.containerapp_def["properties"]["configuration"] or \
self.containerapp_def["properties"]["configuration"]["registries"] is None:
self.new_containerapp["properties"]["configuration"]["registries"] = []
registries_def = self.new_containerapp["properties"]["configuration"]["registries"]
self.set_up_get_existing_secrets(self.containerapp_def)
if "secrets" in self.containerapp_def["properties"]["configuration"] and self.containerapp_def["properties"]["configuration"]["secrets"]:
self.new_containerapp["properties"]["configuration"]["secrets"] = self.containerapp_def["properties"]["configuration"]["secrets"]
else:
self.new_containerapp["properties"]["configuration"]["secrets"] = []
if self.get_argument_registry_server():
if not self.get_argument_registry_pass() or not self.get_argument_registry_user():
if ACR_IMAGE_SUFFIX not in self.get_argument_registry_server():
raise RequiredArgumentMissingError(
'Registry url is required if using Azure Container Registry, otherwise Registry username and password are required if using Dockerhub')
logger.warning(
'No credential was provided to access Azure Container Registry. Trying to look up...')
parsed = urlparse(self.get_argument_registry_server())
registry_name = (parsed.netloc if parsed.scheme else parsed.path).split('.')[0]
registry_user, registry_pass, _ = _get_acr_cred(self.cmd.cli_ctx, registry_name)
self.set_argument_registry_user(registry_user)
self.set_argument_registry_pass(registry_pass)
# Check if updating existing registry
updating_existing_registry = False
for r in registries_def:
if r['server'].lower() == self.get_argument_registry_server().lower():
updating_existing_registry = True
if self.get_argument_registry_user():
r["username"] = self.get_argument_registry_user()
if self.get_argument_registry_pass():
r["passwordSecretRef"] = store_as_secret_and_return_secret_ref(
self.new_containerapp["properties"]["configuration"]["secrets"],
r["username"],
r["server"],
self.get_argument_registry_pass(),
update_existing_secret=True,
disable_warnings=True)
# If not updating existing registry, add as new registry
if not updating_existing_registry:
registry = RegistryCredentialsModel
registry["server"] = self.get_argument_registry_server()
registry["username"] = self.get_argument_registry_user()
registry["passwordSecretRef"] = store_as_secret_and_return_secret_ref(
self.new_containerapp["properties"]["configuration"]["secrets"],
self.get_argument_registry_user(),
self.get_argument_registry_server(),
self.get_argument_registry_pass(),
update_existing_secret=True,
disable_warnings=True)
registries_def.append(registry)
if not self.get_argument_revision_suffix():
self.new_containerapp["properties"]["template"] = {} if "template" not in self.new_containerapp["properties"] else self.new_containerapp["properties"]["template"]
self.new_containerapp["properties"]["template"]["revisionSuffix"] = None
def set_up_update_containerapp_yaml(self, name, file_name):
if self.get_argument_image() or self.get_argument_min_replicas() or self.get_argument_max_replicas() or \
self.get_argument_set_env_vars() or self.get_argument_remove_env_vars() or self.get_argument_replace_env_vars() or self.get_argument_remove_all_env_vars() or self.get_argument_cpu() or self.get_argument_memory() or \
self.get_argument_startup_command() or self.get_argument_args() or self.get_argument_tags():
logger.warning(
'Additional flags were passed along with --yaml. These flags will be ignored, and the configuration defined in the yaml will be used instead')
yaml_containerapp = process_loaded_yaml(load_yaml_file(file_name))
if type(yaml_containerapp) != dict: # pylint: disable=unidiomatic-typecheck
raise ValidationError(
'Invalid YAML provided. Please see https://aka.ms/azure-container-apps-yaml for a valid containerapps YAML spec.')
if not yaml_containerapp.get('name'):
yaml_containerapp['name'] = name
elif yaml_containerapp.get('name').lower() != name.lower():
logger.warning(
'The app name provided in the --yaml file "{}" does not match the one provided in the --name flag "{}". The one provided in the --yaml file will be used.'.format(
yaml_containerapp.get('name'), name))
name = yaml_containerapp.get('name')
if not yaml_containerapp.get('type'):
yaml_containerapp['type'] = 'Microsoft.App/containerApps'
elif yaml_containerapp.get('type').lower() != "microsoft.app/containerapps":
raise ValidationError('Containerapp type must be \"Microsoft.App/ContainerApps\"')
# Check if containerapp exists
try:
self.new_containerapp = self.client.show(cmd=self.cmd, resource_group_name=self.get_argument_resource_group_name(), name=self.get_argument_name())
except Exception as e:
handle_non_404_status_code_exception(e)
if not self.new_containerapp:
raise ValidationError("The containerapp '{}' does not exist".format(name))
existed_environment_id = self.new_containerapp['properties']['environmentId']
self.new_containerapp = None
# Deserialize the yaml into a ContainerApp object. Need this since we're not using SDK
try:
deserializer = create_deserializer(self.models)
self.new_containerapp = deserializer('ContainerApp', yaml_containerapp)
except DeserializationError as ex:
raise ValidationError(
'Invalid YAML provided. Please see https://aka.ms/azure-container-apps-yaml for a valid containerapps YAML spec.') from ex
# Remove tags before converting from snake case to camel case, then re-add tags. We don't want to change the case of the tags. Need this since we're not using SDK
tags = None
if yaml_containerapp.get('tags'):
tags = yaml_containerapp.get('tags')
del yaml_containerapp['tags']
self.new_containerapp = _convert_object_from_snake_to_camel_case(_object_to_dict(self.new_containerapp))
self.new_containerapp['tags'] = tags
# After deserializing, some properties may need to be moved under the "properties" attribute. Need this since we're not using SDK
self.new_containerapp = process_loaded_yaml(self.new_containerapp)
# Change which revision we update from
if self.get_argument_from_revision():
r = self.client.show_revision(cmd=self.cmd, resource_group_name=self.get_argument_resource_group_name(), container_app_name=name, name=self.get_argument_from_revision())
_update_revision_env_secretrefs(r["properties"]["template"]["containers"], name)
self.new_containerapp["properties"]["template"] = r["properties"]["template"]
# Remove "additionalProperties" and read-only attributes that are introduced in the deserialization. Need this since we're not using SDK
_remove_additional_attributes(self.new_containerapp)
_remove_readonly_attributes(self.new_containerapp)
secret_values = self.list_secrets()
_populate_secret_values(self.new_containerapp, secret_values)
# Clean null values since this is an update
self.new_containerapp = clean_null_values(self.new_containerapp)
# Fix bug with revisionSuffix when containers are added
if not safe_get(self.new_containerapp, "properties", "template", "revisionSuffix"):
if "properties" not in self.new_containerapp:
self.new_containerapp["properties"] = {}
if "template" not in self.new_containerapp["properties"]:
self.new_containerapp["properties"]["template"] = {}
self.new_containerapp["properties"]["template"]["revisionSuffix"] = None
# Remove the environmentId in the PATCH payload if it has not been changed
if safe_get(self.new_containerapp, "properties", "environmentId") and safe_get(self.new_containerapp, "properties", "environmentId").lower() == existed_environment_id.lower():
del self.new_containerapp["properties"]['environmentId']
# decorator for preview create
class ContainerAppPreviewCreateDecorator(ContainerAppCreateDecorator):
def __init__(
self, cmd: AzCliCommand, client: Any, raw_parameters: Dict, models: str
):
super().__init__(cmd, client, raw_parameters, models)
def construct_payload(self):
super().construct_payload()
self.set_up_service_binds()
self.set_up_extended_location()
def post_process(self, r):
if is_registry_msi_system(self.get_argument_registry_identity()):
r = self.create()
if "properties" in r and "provisioningState" in r["properties"] and r["properties"]["provisioningState"].lower() == "waiting" and not self.get_argument_no_wait():
not self.get_argument_disable_warnings() and logger.warning('Containerapp creation in progress. Please monitor the creation using `az containerapp show -n {} -g {}`'.format(self.get_argument_name(), self.get_argument_resource_group_name()))
if "configuration" in r["properties"] and "ingress" in r["properties"]["configuration"] and \
r["properties"]["configuration"]["ingress"] and "fqdn" in r["properties"]["configuration"]["ingress"]:
not self.get_argument_disable_warnings() and logger.warning("\nContainer app created. Access your app at https://{}/\n".format(r["properties"]["configuration"]["ingress"]["fqdn"]))
else:
target_port = self.get_argument_target_port() or "<port>"
not self.get_argument_disable_warnings() and logger.warning(
"\nContainer app created. To access it over HTTPS, enable ingress: "
"az containerapp ingress enable -n %s -g %s --type external --target-port %s"
" --transport auto\n", self.get_argument_name(), self.get_argument_resource_group_name(), target_port)
if self.get_argument_service_connectors_def_list() is not None:
linker_client = get_linker_client(self.cmd)
for item in self.get_argument_service_connectors_def_list():
while r is not None and r["properties"]["provisioningState"].lower() == "inprogress":
r = self.client.show(self.cmd, self.get_argument_resource_group_name(), self.get_argument_name())
time.sleep(1)
linker_client.linker.begin_create_or_update(resource_uri=r["id"],
parameters=item["parameters"],
linker_name=item["linker_name"]).result()
return r
def set_up_extended_location(self):
if self.get_argument_environment_type() == CONNECTED_ENVIRONMENT_TYPE:
if not self.containerapp_def.get('extendedLocation'):
env_id = safe_get(self.containerapp_def, "properties", 'environmentId') or self.get_argument_managed_env()
parsed_env = parse_resource_id(env_id)
env_name = parsed_env['name']
env_rg = parsed_env['resource_group']
env_info = self.get_environment_client().show(cmd=self.cmd, resource_group_name=env_rg, name=env_name)
self.containerapp_def["extendedLocation"] = env_info["extendedLocation"]
def set_up_service_binds(self):
if self.get_argument_service_bindings() is not None:
service_connectors_def_list, service_bindings_def_list = parse_service_bindings(self.cmd,
self.get_argument_service_bindings(),
self.get_argument_resource_group_name(),
self.get_argument_name())
self.set_argument_service_connectors_def_list(service_connectors_def_list)
unique_bindings = check_unique_bindings(self.cmd, service_connectors_def_list, service_bindings_def_list,
self.get_argument_resource_group_name(), self.get_argument_name())
if not unique_bindings:
raise ValidationError("Binding names across managed and dev services should be unique.")
safe_set(self.containerapp_def, "properties", "template", "serviceBinds", value=service_bindings_def_list)
def get_environment_client(self):
if self.get_argument_yaml():
env = safe_get(self.containerapp_def, "properties", "environmentId")
else:
env = self.get_argument_managed_env()
environment_type = self.get_argument_environment_type()
if not env and not environment_type:
return ManagedEnvironmentClient
parsed_env = parse_resource_id(env)
# Validate environment type
if parsed_env.get('resource_type').lower() == CONNECTED_ENVIRONMENT_RESOURCE_TYPE.lower():
if environment_type == MANAGED_ENVIRONMENT_TYPE:
logger.warning("User passed a connectedEnvironment resource id but did not specify --environment-type connected. Using environment type connected.")
environment_type = CONNECTED_ENVIRONMENT_TYPE
else:
if environment_type == CONNECTED_ENVIRONMENT_TYPE:
logger.warning("User passed a managedEnvironment resource id but specified --environment-type connected. Using environment type managed.")
environment_type = MANAGED_ENVIRONMENT_TYPE
self.set_argument_environment_type(environment_type)
self.set_argument_managed_env(env)
if environment_type == CONNECTED_ENVIRONMENT_TYPE:
return ConnectedEnvironmentClient
else:
return ManagedEnvironmentPreviewClient
def get_argument_environment_type(self):
return self.get_param("environment_type")
def set_argument_environment_type(self, environment_type):
self.set_param("environment_type", environment_type)
# decorator for preview update
class ContainerAppPreviewUpdateDecorator(ContainerAppUpdateDecorator):
def construct_payload(self):
super().construct_payload()
self.set_up_service_bindings()
self.set_up_unbind_service_bindings()
def post_process(self, r):
# Delete managed bindings
linker_client = None
if self.get_argument_unbind_service_bindings():
linker_client = get_linker_client(self.cmd)
for item in self.get_argument_unbind_service_bindings():
while r["properties"]["provisioningState"].lower() == "inprogress":
r = self.client.show(self.cmd, self.get_argument_resource_group_name(), self.get_argument_name())
time.sleep(1)
linker_client.linker.begin_delete(resource_uri=r["id"], linker_name=item).result()
# Update managed bindings
if self.get_argument_service_connectors_def_list() is not None:
linker_client = get_linker_client(self.cmd) if linker_client is None else linker_client
for item in self.get_argument_service_connectors_def_list():
while r["properties"]["provisioningState"].lower() == "inprogress":
r = self.client.show(self.cmd, self.get_argument_resource_group_name(), self.get_argument_name())
time.sleep(1)
linker_client.linker.begin_create_or_update(resource_uri=r["id"],
parameters=item["parameters"],
linker_name=item["linker_name"]).result()
return r
def set_up_service_bindings(self):
if self.get_argument_service_bindings() is not None:
linker_client = get_linker_client(self.cmd)
service_connectors_def_list, service_bindings_def_list = parse_service_bindings(self.cmd, self.get_argument_service_bindings(), self.get_argument_resource_group_name(), self.get_argument_name())
self.set_argument_service_connectors_def_list(service_connectors_def_list)
service_bindings_used_map = {update_item["name"]: False for update_item in service_bindings_def_list}
safe_set(self.new_containerapp, "properties", "template", "serviceBinds", value=self.containerapp_def["properties"]["template"]["serviceBinds"])
if self.new_containerapp["properties"]["template"]["serviceBinds"] is None:
self.new_containerapp["properties"]["template"]["serviceBinds"] = []
for item in self.new_containerapp["properties"]["template"]["serviceBinds"]:
for update_item in service_bindings_def_list:
if update_item["name"] in item.values():
item["serviceId"] = update_item["serviceId"]
service_bindings_used_map[update_item["name"]] = True
for update_item in service_bindings_def_list:
if service_bindings_used_map[update_item["name"]] is False:
# Check if it doesn't exist in existing service linkers
managed_bindings = linker_client.linker.list(resource_uri=self.containerapp_def["id"])
if managed_bindings:
managed_bindings_list = [item.name for item in managed_bindings]
if update_item["name"] in managed_bindings_list:
raise ValidationError("Binding names across managed and dev services should be unique.")
self.new_containerapp["properties"]["template"]["serviceBinds"].append(update_item)
if service_connectors_def_list is not None:
for item in service_connectors_def_list:
# Check if it doesn't exist in existing service bindings
service_bindings_list = []
for binds in self.new_containerapp["properties"]["template"]["serviceBinds"]:
service_bindings_list.append(binds["name"])
if item["linker_name"] in service_bindings_list:
raise ValidationError("Binding names across managed and dev services should be unique.")
def set_up_unbind_service_bindings(self):
if self.get_argument_unbind_service_bindings():
new_template = self.new_containerapp.setdefault("properties", {}).setdefault("template", {})
existing_template = self.containerapp_def["properties"]["template"]
if not self.get_argument_service_bindings():
new_template["serviceBinds"] = existing_template.get("serviceBinds", [])
service_bindings_dict = {}
if new_template["serviceBinds"]:
service_bindings_dict = {service_binding["name"]: index for index, service_binding in
enumerate(new_template.get("serviceBinds", []))}
for item in self.get_argument_unbind_service_bindings():
if item in service_bindings_dict:
new_template["serviceBinds"] = [binding for binding in new_template["serviceBinds"] if
binding["name"] != item]
# decorator for preview list
class ContainerAppPreviewListDecorator(BaseContainerAppDecorator):
def __init__(
self, cmd: AzCliCommand, client: Any, raw_parameters: Dict, models: str
):
super().__init__(cmd, client, raw_parameters, models)
def list(self):
containerapps = super().list()
if self.get_argument_environment_type() == CONNECTED_ENVIRONMENT_TYPE:
containerapps = [c for c in containerapps if CONNECTED_ENVIRONMENT_RESOURCE_TYPE in c["properties"]["environmentId"]]
if self.get_argument_environment_type() == MANAGED_ENVIRONMENT_TYPE:
containerapps = [c for c in containerapps if MANAGED_ENVIRONMENT_RESOURCE_TYPE in c["properties"]["environmentId"]]
return containerapps
def get_environment_client(self):
env = self.get_argument_managed_env()
if is_valid_resource_id(env):
parsed_env = parse_resource_id(env)
if parsed_env.get('resource_type').lower() == CONNECTED_ENVIRONMENT_RESOURCE_TYPE.lower():
return ConnectedEnvironmentClient
else:
return ManagedEnvironmentPreviewClient
if self.get_argument_environment_type() == CONNECTED_ENVIRONMENT_TYPE:
return ConnectedEnvironmentClient
else:
return ManagedEnvironmentPreviewClient
def get_argument_environment_type(self):
return self.get_param("environment_type")
| [
"[email protected]"
] | |
4fccc4958d08996a263601b37e9b8b1a85416c19 | 8997a0bf1e3b6efe5dd9d5f307e1459f15501f5a | /stackoverflow_site__parsing/print__datetime_utc_and_reputation_change__with_missing_dates.py | 00e9926585256ee0385b09771cb3e87d0bf1e62c | [
"CC-BY-4.0"
] | permissive | stepik/SimplePyScripts | 01092eb1b2c1c33756427abb2debbd0c0abf533f | 3259d88cb58b650549080d6f63b15910ae7e4779 | refs/heads/master | 2023-05-15T17:35:55.743164 | 2021-06-11T22:59:07 | 2021-06-11T22:59:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,007 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
__author__ = 'ipetrash'
import datetime as DT
def generate_range_dates(start_date, end_date) -> list:
date_1 = min(start_date, end_date)
date_2 = max(start_date, end_date)
# Сразу добавляем стартовую дату
items = [date_1]
while date_1 < date_2:
date_1 += DT.timedelta(days=1)
items.append(date_1)
return items
if __name__ == '__main__':
url = 'https://ru.stackoverflow.com/users/201445/gil9red?tab=reputation'
from print__datetime_utc_and_reputation_change import get_day_by_rep
day_by_rep = get_day_by_rep(url)
start_date, end_date = min(day_by_rep), max(day_by_rep)
print('Start: {}, end: {}'.format(start_date, end_date))
print()
# Сгенерируем диапазон дат
dates = generate_range_dates(start_date, end_date)
# Print
for day in reversed(dates):
print('{:%d/%m/%Y} : {}'.format(day, day_by_rep.get(day, 0)))
| [
"[email protected]"
] | |
4bdba1ed302a07e95891189723cb8e02be46a173 | 8806a17d66d7abb8434c879215dc09cbfc3b5a25 | /bin/log.py | 02e6764c09facc7e70ec062e7792b50d468208ef | [] | no_license | chenrun666/JW_purchase | f23d1719f447be669134c8fc02b1b8fd9d82cba8 | 9552920259f4014a08b38db88d0d48f0864822d3 | refs/heads/master | 2020-04-25T20:44:06.403805 | 2019-03-09T10:05:19 | 2019-03-09T10:05:19 | 173,057,863 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,963 | py | # coding:utf-8
import logging
from logging.handlers import RotatingFileHandler # 按文件大小滚动备份
import colorlog # 控制台日志输入颜色
import time
import datetime
import os
cur_path = os.path.dirname(os.path.realpath(__file__)) # log_path是存放日志的路径
log_path = os.path.join(os.path.dirname(cur_path), 'logs')
if not os.path.exists(log_path): os.mkdir(log_path) # 如果不存在这个logs文件夹,就自动创建一个
logName = os.path.join(log_path, '%s.log' % time.strftime('%Y-%m-%d')) # 文件的命名
log_colors_config = {
'DEBUG': 'cyan',
'INFO': 'green',
'WARNING': 'yellow',
'ERROR': 'red',
'CRITICAL': 'red',
}
class Log:
def __init__(self, logName=logName):
self.logName = logName
self.logger = logging.getLogger()
self.logger.setLevel(logging.DEBUG)
self.formatter = colorlog.ColoredFormatter(
'%(log_color)s[%(asctime)s] [%(filename)s:%(lineno)d] [%(module)s:%(funcName)s] [%(levelname)s]- %(message)s',
log_colors=log_colors_config) # 日志输出格式
self.handle_logs()
def get_file_sorted(self, file_path):
"""最后修改时间顺序升序排列 os.path.getmtime()->获取文件最后修改时间"""
dir_list = os.listdir(file_path)
if not dir_list:
return
else:
dir_list = sorted(dir_list, key=lambda x: os.path.getmtime(os.path.join(file_path, x)))
return dir_list
def TimeStampToTime(self, timestamp):
"""格式化时间"""
timeStruct = time.localtime(timestamp)
return str(time.strftime('%Y-%m-%d', timeStruct))
def handle_logs(self):
"""处理日志过期天数和文件数量"""
dir_list = ['report'] # 要删除文件的目录名
for dir in dir_list:
dirPath = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) + '/' + dir # 拼接删除目录完整路径
file_list = self.get_file_sorted(dirPath) # 返回按修改时间排序的文件list
if file_list: # 目录下没有日志文件
for i in file_list:
file_path = os.path.join(dirPath, i) # 拼接文件的完整路径
t_list = self.TimeStampToTime(os.path.getctime(file_path)).split('-')
now_list = self.TimeStampToTime(time.time()).split('-')
t = datetime.datetime(int(t_list[0]), int(t_list[1]),
int(t_list[2])) # 将时间转换成datetime.datetime 类型
now = datetime.datetime(int(now_list[0]), int(now_list[1]), int(now_list[2]))
if (now - t).days > 7: # 创建时间大于6天的文件删除
self.delete_logs(file_path)
if len(file_list) > 10: # 限制目录下记录文件数量
file_list = file_list[0:-4]
for i in file_list:
file_path = os.path.join(dirPath, i)
print(file_path)
self.delete_logs(file_path)
def delete_logs(self, file_path):
try:
os.remove(file_path)
except PermissionError as e:
Log().warning('删除日志文件失败:{}'.format(e))
def __console(self, level, message):
# 创建一个FileHandler,用于写到本地
fh = RotatingFileHandler(filename=self.logName, mode='a', maxBytes=1024 * 1024 * 10, backupCount=10,
encoding='utf-8') # 使用RotatingFileHandler类,滚动备份日志
fh.suffix = "%Y%m%d.log"
fh.setLevel(logging.DEBUG)
fh.setFormatter(self.formatter)
self.logger.addHandler(fh)
# 创建一个StreamHandler,用于输出到控制台
ch = colorlog.StreamHandler()
ch.setLevel(logging.DEBUG)
ch.setFormatter(self.formatter)
self.logger.addHandler(ch)
if level == 'info':
self.logger.info(message)
elif level == 'debug':
self.logger.debug(message)
elif level == 'warning':
self.logger.warning(message)
elif level == 'error':
self.logger.error(message)
# 这两行代码是为了避免日志输出重复问题
self.logger.removeHandler(ch)
self.logger.removeHandler(fh)
fh.close() # 关闭打开的文件
def debug(self, message):
self.__console('debug', message)
def info(self, message):
self.__console('info', message)
def warning(self, message):
self.__console('warning', message)
def error(self, message):
self.__console('error', message)
logger = Log()
if __name__ == "__main__":
log = Log()
log.debug("---测试开始----")
log.info("操作步骤")
log.warning("----测试结束----")
log.error("----测试错误----")
| [
"[email protected]"
] | |
76f0db3ff3eb3950c75953ea5619bbcd4e1ee88c | 113bfeda578324908963307670718c5545f30e8b | /booksite/booksite/book/migrations/0011_auto_20171205_1611.py | c4e55389b90cee0a19f1960233318db14ed070c8 | [
"Apache-2.0"
] | permissive | tkliuxing/bookspider | f0989814716e38fa081cc300f92fc975ff8ac67d | bc7ba487f0ab6ea7782f5093bb1d074eac662bdf | refs/heads/master | 2021-01-18T23:31:26.566892 | 2020-03-14T04:04:48 | 2020-03-14T04:04:48 | 21,845,464 | 40 | 36 | null | 2015-11-06T03:58:04 | 2014-07-15T03:51:01 | CSS | UTF-8 | Python | false | false | 426 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.8 on 2017-12-05 08:11
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('book', '0010_auto_20171205_1436'),
]
operations = [
migrations.AlterUniqueTogether(
name='bookpage',
unique_together=set([('page_number', 'site')]),
),
]
| [
"[email protected]"
] | |
67a841ac1879780cf4fb03786cd5b1d6924639eb | 93713f46f16f1e29b725f263da164fed24ebf8a8 | /Library/lib/python3.7/site-packages/sympy/physics/mechanics/system.py | b82288bed1b78b4314a676d5a08789aa26293f22 | [
"BSD-3-Clause"
] | permissive | holzschu/Carnets | b83d15136d25db640cea023abb5c280b26a9620e | 1ad7ec05fb1e3676ac879585296c513c3ee50ef9 | refs/heads/master | 2023-02-20T12:05:14.980685 | 2023-02-13T15:59:23 | 2023-02-13T15:59:23 | 167,671,526 | 541 | 36 | BSD-3-Clause | 2022-11-29T03:08:22 | 2019-01-26T09:26:46 | Python | UTF-8 | Python | false | false | 18,663 | py | from sympy.core.backend import eye, Matrix, zeros
from sympy.physics.mechanics import dynamicsymbols
from sympy.physics.mechanics.functions import find_dynamicsymbols
__all__ = ['SymbolicSystem']
class SymbolicSystem(object):
"""SymbolicSystem is a class that contains all the information about a
system in a symbolic format such as the equations of motions and the bodies
and loads in the system.
There are three ways that the equations of motion can be described for
Symbolic System:
[1] Explicit form where the kinematics and dynamics are combined
x' = F_1(x, t, r, p)
[2] Implicit form where the kinematics and dynamics are combined
M_2(x, p) x' = F_2(x, t, r, p)
[3] Implicit form where the kinematics and dynamics are separate
M_3(q, p) u' = F_3(q, u, t, r, p)
q' = G(q, u, t, r, p)
where
x : states, e.g. [q, u]
t : time
r : specified (exogenous) inputs
p : constants
q : generalized coordinates
u : generalized speeds
F_1 : right hand side of the combined equations in explicit form
F_2 : right hand side of the combined equations in implicit form
F_3 : right hand side of the dynamical equations in implicit form
M_2 : mass matrix of the combined equations in implicit form
M_3 : mass matrix of the dynamical equations in implicit form
G : right hand side of the kinematical differential equations
Parameters
==========
coord_states : ordered iterable of functions of time
This input will either be a collection of the coordinates or states
of the system depending on whether or not the speeds are also
given. If speeds are specified this input will be assumed to
be the coordinates otherwise this input will be assumed to
be the states.
right_hand_side : Matrix
This variable is the right hand side of the equations of motion in
any of the forms. The specific form will be assumed depending on
whether a mass matrix or coordinate derivatives are given.
speeds : ordered iterable of functions of time, optional
This is a collection of the generalized speeds of the system. If
given it will be assumed that the first argument (coord_states)
will represent the generalized coordinates of the system.
mass_matrix : Matrix, optional
The matrix of the implicit forms of the equations of motion (forms
[2] and [3]). The distinction between the forms is determined by
whether or not the coordinate derivatives are passed in. If
they are given form [3] will be assumed otherwise form [2] is
assumed.
coordinate_derivatives : Matrix, optional
The right hand side of the kinematical equations in explicit form.
If given it will be assumed that the equations of motion are being
entered in form [3].
alg_con : Iterable, optional
The indexes of the rows in the equations of motion that contain
algebraic constraints instead of differential equations. If the
equations are input in form [3], it will be assumed the indexes are
referencing the mass_matrix/right_hand_side combination and not the
coordinate_derivatives.
output_eqns : Dictionary, optional
Any output equations that are desired to be tracked are stored in a
dictionary where the key corresponds to the name given for the
specific equation and the value is the equation itself in symbolic
form
coord_idxs : Iterable, optional
If coord_states corresponds to the states rather than the
coordinates this variable will tell SymbolicSystem which indexes of
the states correspond to generalized coordinates.
speed_idxs : Iterable, optional
If coord_states corresponds to the states rather than the
coordinates this variable will tell SymbolicSystem which indexes of
the states correspond to generalized speeds.
bodies : iterable of Body/Rigidbody objects, optional
Iterable containing the bodies of the system
loads : iterable of load instances (described below), optional
Iterable containing the loads of the system where forces are given
by (point of application, force vector) and torques are given by
(reference frame acting upon, torque vector). Ex [(point, force),
(ref_frame, torque)]
Attributes
==========
coordinates : Matrix, shape(n, 1)
This is a matrix containing the generalized coordinates of the system
speeds : Matrix, shape(m, 1)
This is a matrix containing the generalized speeds of the system
states : Matrix, shape(o, 1)
This is a matrix containing the state variables of the system
alg_con : List
This list contains the indices of the algebraic constraints in the
combined equations of motion. The presence of these constraints
requires that a DAE solver be used instead of an ODE solver.
If the system is given in form [3] the alg_con variable will be
adjusted such that it is a representation of the combined kinematics
and dynamics thus make sure it always matches the mass matrix
entered.
dyn_implicit_mat : Matrix, shape(m, m)
This is the M matrix in form [3] of the equations of motion (the mass
matrix or generalized inertia matrix of the dynamical equations of
motion in implicit form).
dyn_implicit_rhs : Matrix, shape(m, 1)
This is the F vector in form [3] of the equations of motion (the right
hand side of the dynamical equations of motion in implicit form).
comb_implicit_mat : Matrix, shape(o, o)
This is the M matrix in form [2] of the equations of motion.
This matrix contains a block diagonal structure where the top
left block (the first rows) represent the matrix in the
implicit form of the kinematical equations and the bottom right
block (the last rows) represent the matrix in the implicit form
of the dynamical equations.
comb_implicit_rhs : Matrix, shape(o, 1)
This is the F vector in form [2] of the equations of motion. The top
part of the vector represents the right hand side of the implicit form
of the kinemaical equations and the bottom of the vector represents the
right hand side of the implicit form of the dynamical equations of
motion.
comb_explicit_rhs : Matrix, shape(o, 1)
This vector represents the right hand side of the combined equations of
motion in explicit form (form [1] from above).
kin_explicit_rhs : Matrix, shape(m, 1)
This is the right hand side of the explicit form of the kinematical
equations of motion as can be seen in form [3] (the G matrix).
output_eqns : Dictionary
If output equations were given they are stored in a dictionary where
the key corresponds to the name given for the specific equation and
the value is the equation itself in symbolic form
bodies : Tuple
If the bodies in the system were given they are stored in a tuple for
future access
loads : Tuple
If the loads in the system were given they are stored in a tuple for
future access. This includes forces and torques where forces are given
by (point of application, force vector) and torques are given by
(reference frame acted upon, torque vector).
Example
=======
As a simple example, the dynamics of a simple pendulum will be input into a
SymbolicSystem object manually. First some imports will be needed and then
symbols will be set up for the length of the pendulum (l), mass at the end
of the pendulum (m), and a constant for gravity (g). ::
>>> from sympy import Matrix, sin, symbols
>>> from sympy.physics.mechanics import dynamicsymbols, SymbolicSystem
>>> l, m, g = symbols('l m g')
The system will be defined by an angle of theta from the vertical and a
generalized speed of omega will be used where omega = theta_dot. ::
>>> theta, omega = dynamicsymbols('theta omega')
Now the equations of motion are ready to be formed and passed to the
SymbolicSystem object. ::
>>> kin_explicit_rhs = Matrix([omega])
>>> dyn_implicit_mat = Matrix([l**2 * m])
>>> dyn_implicit_rhs = Matrix([-g * l * m * sin(theta)])
>>> symsystem = SymbolicSystem([theta], dyn_implicit_rhs, [omega],
... dyn_implicit_mat)
Notes
=====
m : number of generalized speeds
n : number of generalized coordinates
o : number of states
"""
def __init__(self, coord_states, right_hand_side, speeds=None,
mass_matrix=None, coordinate_derivatives=None, alg_con=None,
output_eqns={}, coord_idxs=None, speed_idxs=None, bodies=None,
loads=None):
"""Initializes a SymbolicSystem object"""
# Extract information on speeds, coordinates and states
if speeds is None:
self._states = Matrix(coord_states)
if coord_idxs is None:
self._coordinates = None
else:
coords = [coord_states[i] for i in coord_idxs]
self._coordinates = Matrix(coords)
if speed_idxs is None:
self._speeds = None
else:
speeds_inter = [coord_states[i] for i in speed_idxs]
self._speeds = Matrix(speeds_inter)
else:
self._coordinates = Matrix(coord_states)
self._speeds = Matrix(speeds)
self._states = self._coordinates.col_join(self._speeds)
# Extract equations of motion form
if coordinate_derivatives is not None:
self._kin_explicit_rhs = coordinate_derivatives
self._dyn_implicit_rhs = right_hand_side
self._dyn_implicit_mat = mass_matrix
self._comb_implicit_rhs = None
self._comb_implicit_mat = None
self._comb_explicit_rhs = None
elif mass_matrix is not None:
self._kin_explicit_rhs = None
self._dyn_implicit_rhs = None
self._dyn_implicit_mat = None
self._comb_implicit_rhs = right_hand_side
self._comb_implicit_mat = mass_matrix
self._comb_explicit_rhs = None
else:
self._kin_explicit_rhs = None
self._dyn_implicit_rhs = None
self._dyn_implicit_mat = None
self._comb_implicit_rhs = None
self._comb_implicit_mat = None
self._comb_explicit_rhs = right_hand_side
# Set the remainder of the inputs as instance attributes
if alg_con is not None and coordinate_derivatives is not None:
alg_con = [i + len(coordinate_derivatives) for i in alg_con]
self._alg_con = alg_con
self.output_eqns = output_eqns
# Change the body and loads iterables to tuples if they are not tuples
# already
if type(bodies) != tuple and bodies is not None:
bodies = tuple(bodies)
if type(loads) != tuple and loads is not None:
loads = tuple(loads)
self._bodies = bodies
self._loads = loads
@property
def coordinates(self):
"""Returns the column matrix of the generalized coordinates"""
if self._coordinates is None:
raise AttributeError("The coordinates were not specified.")
else:
return self._coordinates
@property
def speeds(self):
"""Returns the column matrix of generalized speeds"""
if self._speeds is None:
raise AttributeError("The speeds were not specified.")
else:
return self._speeds
@property
def states(self):
"""Returns the column matrix of the state variables"""
return self._states
@property
def alg_con(self):
"""Returns a list with the indices of the rows containing algebraic
constraints in the combined form of the equations of motion"""
return self._alg_con
@property
def dyn_implicit_mat(self):
"""Returns the matrix, M, corresponding to the dynamic equations in
implicit form, M x' = F, where the kinematical equations are not
included"""
if self._dyn_implicit_mat is None:
raise AttributeError("dyn_implicit_mat is not specified for "
"equations of motion form [1] or [2].")
else:
return self._dyn_implicit_mat
@property
def dyn_implicit_rhs(self):
"""Returns the column matrix, F, corresponding to the dynamic equations
in implicit form, M x' = F, where the kinematical equations are not
included"""
if self._dyn_implicit_rhs is None:
raise AttributeError("dyn_implicit_rhs is not specified for "
"equations of motion form [1] or [2].")
else:
return self._dyn_implicit_rhs
@property
def comb_implicit_mat(self):
"""Returns the matrix, M, corresponding to the equations of motion in
implicit form (form [2]), M x' = F, where the kinematical equations are
included"""
if self._comb_implicit_mat is None:
if self._dyn_implicit_mat is not None:
num_kin_eqns = len(self._kin_explicit_rhs)
num_dyn_eqns = len(self._dyn_implicit_rhs)
zeros1 = zeros(num_kin_eqns, num_dyn_eqns)
zeros2 = zeros(num_dyn_eqns, num_kin_eqns)
inter1 = eye(num_kin_eqns).row_join(zeros1)
inter2 = zeros2.row_join(self._dyn_implicit_mat)
self._comb_implicit_mat = inter1.col_join(inter2)
return self._comb_implicit_mat
else:
raise AttributeError("comb_implicit_mat is not specified for "
"equations of motion form [1].")
else:
return self._comb_implicit_mat
@property
def comb_implicit_rhs(self):
"""Returns the column matrix, F, corresponding to the equations of
motion in implicit form (form [2]), M x' = F, where the kinematical
equations are included"""
if self._comb_implicit_rhs is None:
if self._dyn_implicit_rhs is not None:
kin_inter = self._kin_explicit_rhs
dyn_inter = self._dyn_implicit_rhs
self._comb_implicit_rhs = kin_inter.col_join(dyn_inter)
return self._comb_implicit_rhs
else:
raise AttributeError("comb_implicit_mat is not specified for "
"equations of motion in form [1].")
else:
return self._comb_implicit_rhs
def compute_explicit_form(self):
"""If the explicit right hand side of the combined equations of motion
is to provided upon initialization, this method will calculate it. This
calculation can potentially take awhile to compute."""
if self._comb_explicit_rhs is not None:
raise AttributeError("comb_explicit_rhs is already formed.")
inter1 = getattr(self, 'kin_explicit_rhs', None)
if inter1 is not None:
inter2 = self._dyn_implicit_mat.LUsolve(self._dyn_implicit_rhs)
out = inter1.col_join(inter2)
else:
out = self._comb_implicit_mat.LUsolve(self._comb_implicit_rhs)
self._comb_explicit_rhs = out
@property
def comb_explicit_rhs(self):
"""Returns the right hand side of the equations of motion in explicit
form, x' = F, where the kinematical equations are included"""
if self._comb_explicit_rhs is None:
raise AttributeError("Please run .combute_explicit_form before "
"attempting to access comb_explicit_rhs.")
else:
return self._comb_explicit_rhs
@property
def kin_explicit_rhs(self):
"""Returns the right hand side of the kinematical equations in explicit
form, q' = G"""
if self._kin_explicit_rhs is None:
raise AttributeError("kin_explicit_rhs is not specified for "
"equations of motion form [1] or [2].")
else:
return self._kin_explicit_rhs
def dynamic_symbols(self):
"""Returns a column matrix containing all of the symbols in the system
that depend on time"""
# Create a list of all of the expressions in the equations of motion
if self._comb_explicit_rhs is None:
eom_expressions = (self.comb_implicit_mat[:] +
self.comb_implicit_rhs[:])
else:
eom_expressions = (self._comb_explicit_rhs[:])
functions_of_time = set()
for expr in eom_expressions:
functions_of_time = functions_of_time.union(
find_dynamicsymbols(expr))
functions_of_time = functions_of_time.union(self._states)
return tuple(functions_of_time)
def constant_symbols(self):
"""Returns a column matrix containing all of the symbols in the system
that do not depend on time"""
# Create a list of all of the expressions in the equations of motion
if self._comb_explicit_rhs is None:
eom_expressions = (self.comb_implicit_mat[:] +
self.comb_implicit_rhs[:])
else:
eom_expressions = (self._comb_explicit_rhs[:])
constants = set()
for expr in eom_expressions:
constants = constants.union(expr.free_symbols)
constants.remove(dynamicsymbols._t)
return tuple(constants)
@property
def bodies(self):
"""Returns the bodies in the system"""
if self._bodies is None:
raise AttributeError("bodies were not specified for the system.")
else:
return self._bodies
@property
def loads(self):
"""Returns the loads in the system"""
if self._loads is None:
raise AttributeError("loads were not specified for the system.")
else:
return self._loads
| [
"[email protected]"
] | |
46b39b7702a9141219f94da8c8bd28f61995ddac | 60c4255fb0cf7ed817ff09d8113bf404cde8e12b | /env/lib/python2.7/site-packages/django/db/models/fields/related.py | 097b33363a2c3931424d17747bbaba9e3fa56da9 | [] | no_license | adamjberg/finna-be-octo-ninja | 83aba13f619d4fbfb5308e48336917f0ada0459d | cf16bfcb3d7bb4e878ba0b99ad701b5cda8be34c | refs/heads/master | 2021-01-10T20:19:20.849476 | 2014-01-11T05:42:23 | 2014-01-11T05:42:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 65,415 | py | from operator import attrgetter
from django.db import connection, connections, router
from django.db.backends import util
from django.db.models import signals, get_model
from django.db.models.fields import (AutoField, Field, IntegerField,
PositiveIntegerField, PositiveSmallIntegerField, FieldDoesNotExist)
from django.db.models.related import RelatedObject
from django.db.models.query import QuerySet
from django.db.models.query_utils import QueryWrapper
from django.db.models.deletion import CASCADE
from django.utils.encoding import smart_text
from django.utils import six
from django.utils.translation import ugettext_lazy as _, string_concat
from django.utils.functional import curry, cached_property
from django.core import exceptions
from django import forms
RECURSIVE_RELATIONSHIP_CONSTANT = 'self'
pending_lookups = {}
def add_lazy_relation(cls, field, relation, operation):
"""
Adds a lookup on ``cls`` when a related field is defined using a string,
i.e.::
class MyModel(Model):
fk = ForeignKey("AnotherModel")
This string can be:
* RECURSIVE_RELATIONSHIP_CONSTANT (i.e. "self") to indicate a recursive
relation.
* The name of a model (i.e "AnotherModel") to indicate another model in
the same app.
* An app-label and model name (i.e. "someapp.AnotherModel") to indicate
another model in a different app.
If the other model hasn't yet been loaded -- almost a given if you're using
lazy relationships -- then the relation won't be set up until the
class_prepared signal fires at the end of model initialization.
operation is the work that must be performed once the relation can be resolved.
"""
# Check for recursive relations
if relation == RECURSIVE_RELATIONSHIP_CONSTANT:
app_label = cls._meta.app_label
model_name = cls.__name__
else:
# Look for an "app.Model" relation
if isinstance(relation, six.string_types):
try:
app_label, model_name = relation.split(".")
except ValueError:
# If we can't split, assume a model in current app
app_label = cls._meta.app_label
model_name = relation
else:
# it's actually a model class
app_label = relation._meta.app_label
model_name = relation._meta.object_name
# Try to look up the related model, and if it's already loaded resolve the
# string right away. If get_model returns None, it means that the related
# model isn't loaded yet, so we need to pend the relation until the class
# is prepared.
model = get_model(app_label, model_name,
seed_cache=False, only_installed=False)
if model:
operation(field, model, cls)
else:
key = (app_label, model_name)
value = (cls, field, operation)
pending_lookups.setdefault(key, []).append(value)
def do_pending_lookups(sender, **kwargs):
"""
Handle any pending relations to the sending model. Sent from class_prepared.
"""
key = (sender._meta.app_label, sender.__name__)
for cls, field, operation in pending_lookups.pop(key, []):
operation(field, sender, cls)
signals.class_prepared.connect(do_pending_lookups)
#HACK
class RelatedField(object):
def contribute_to_class(self, cls, name):
sup = super(RelatedField, self)
# Store the opts for related_query_name()
self.opts = cls._meta
if hasattr(sup, 'contribute_to_class'):
sup.contribute_to_class(cls, name)
if not cls._meta.abstract and self.rel.related_name:
self.rel.related_name = self.rel.related_name % {
'class': cls.__name__.lower(),
'app_label': cls._meta.app_label.lower(),
}
other = self.rel.to
if isinstance(other, six.string_types) or other._meta.pk is None:
def resolve_related_class(field, model, cls):
field.rel.to = model
field.do_related_class(model, cls)
add_lazy_relation(cls, self, other, resolve_related_class)
else:
self.do_related_class(other, cls)
def set_attributes_from_rel(self):
self.name = self.name or (self.rel.to._meta.object_name.lower() + '_' + self.rel.to._meta.pk.name)
if self.verbose_name is None:
self.verbose_name = self.rel.to._meta.verbose_name
self.rel.field_name = self.rel.field_name or self.rel.to._meta.pk.name
def do_related_class(self, other, cls):
self.set_attributes_from_rel()
self.related = RelatedObject(other, cls, self)
if not cls._meta.abstract:
self.contribute_to_related_class(other, self.related)
def get_prep_lookup(self, lookup_type, value):
if hasattr(value, 'prepare'):
return value.prepare()
if hasattr(value, '_prepare'):
return value._prepare()
# FIXME: lt and gt are explicitly allowed to make
# get_(next/prev)_by_date work; other lookups are not allowed since that
# gets messy pretty quick. This is a good candidate for some refactoring
# in the future.
if lookup_type in ['exact', 'gt', 'lt', 'gte', 'lte']:
return self._pk_trace(value, 'get_prep_lookup', lookup_type)
if lookup_type in ('range', 'in'):
return [self._pk_trace(v, 'get_prep_lookup', lookup_type) for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Related Field has invalid lookup: %s" % lookup_type)
def get_db_prep_lookup(self, lookup_type, value, connection, prepared=False):
if not prepared:
value = self.get_prep_lookup(lookup_type, value)
if hasattr(value, 'get_compiler'):
value = value.get_compiler(connection=connection)
if hasattr(value, 'as_sql') or hasattr(value, '_as_sql'):
# If the value has a relabel_aliases method, it will need to
# be invoked before the final SQL is evaluated
if hasattr(value, 'relabel_aliases'):
return value
if hasattr(value, 'as_sql'):
sql, params = value.as_sql()
else:
sql, params = value._as_sql(connection=connection)
return QueryWrapper(('(%s)' % sql), params)
# FIXME: lt and gt are explicitly allowed to make
# get_(next/prev)_by_date work; other lookups are not allowed since that
# gets messy pretty quick. This is a good candidate for some refactoring
# in the future.
if lookup_type in ['exact', 'gt', 'lt', 'gte', 'lte']:
return [self._pk_trace(value, 'get_db_prep_lookup', lookup_type,
connection=connection, prepared=prepared)]
if lookup_type in ('range', 'in'):
return [self._pk_trace(v, 'get_db_prep_lookup', lookup_type,
connection=connection, prepared=prepared)
for v in value]
elif lookup_type == 'isnull':
return []
raise TypeError("Related Field has invalid lookup: %s" % lookup_type)
def _pk_trace(self, value, prep_func, lookup_type, **kwargs):
# Value may be a primary key, or an object held in a relation.
# If it is an object, then we need to get the primary key value for
# that object. In certain conditions (especially one-to-one relations),
# the primary key may itself be an object - so we need to keep drilling
# down until we hit a value that can be used for a comparison.
v = value
# In the case of an FK to 'self', this check allows to_field to be used
# for both forwards and reverse lookups across the FK. (For normal FKs,
# it's only relevant for forward lookups).
if isinstance(v, self.rel.to):
field_name = getattr(self.rel, "field_name", None)
else:
field_name = None
try:
while True:
if field_name is None:
field_name = v._meta.pk.name
v = getattr(v, field_name)
field_name = None
except AttributeError:
pass
except exceptions.ObjectDoesNotExist:
v = None
field = self
while field.rel:
if hasattr(field.rel, 'field_name'):
field = field.rel.to._meta.get_field(field.rel.field_name)
else:
field = field.rel.to._meta.pk
if lookup_type in ('range', 'in'):
v = [v]
v = getattr(field, prep_func)(lookup_type, v, **kwargs)
if isinstance(v, list):
v = v[0]
return v
def related_query_name(self):
# This method defines the name that can be used to identify this
# related object in a table-spanning query. It uses the lower-cased
# object_name by default, but this can be overridden with the
# "related_name" option.
return self.rel.related_name or self.opts.object_name.lower()
class SingleRelatedObjectDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# a single "remote" value, on the class pointed to by a related field.
# In the example "place.restaurant", the restaurant attribute is a
# SingleRelatedObjectDescriptor instance.
def __init__(self, related):
self.related = related
self.cache_name = related.get_cache_name()
def is_cached(self, instance):
return hasattr(instance, self.cache_name)
def get_query_set(self, **db_hints):
db = router.db_for_read(self.related.model, **db_hints)
return self.related.model._base_manager.using(db)
def get_prefetch_query_set(self, instances):
rel_obj_attr = attrgetter(self.related.field.attname)
instance_attr = lambda obj: obj._get_pk_val()
instances_dict = dict((instance_attr(inst), inst) for inst in instances)
params = {'%s__pk__in' % self.related.field.name: list(instances_dict)}
qs = self.get_query_set(instance=instances[0]).filter(**params)
# Since we're going to assign directly in the cache,
# we must manage the reverse relation cache manually.
rel_obj_cache_name = self.related.field.get_cache_name()
for rel_obj in qs:
instance = instances_dict[rel_obj_attr(rel_obj)]
setattr(rel_obj, rel_obj_cache_name, instance)
return qs, rel_obj_attr, instance_attr, True, self.cache_name
def __get__(self, instance, instance_type=None):
if instance is None:
return self
try:
rel_obj = getattr(instance, self.cache_name)
except AttributeError:
related_pk = instance._get_pk_val()
if related_pk is None:
rel_obj = None
else:
params = {'%s__pk' % self.related.field.name: related_pk}
try:
rel_obj = self.get_query_set(instance=instance).get(**params)
except self.related.model.DoesNotExist:
rel_obj = None
else:
setattr(rel_obj, self.related.field.get_cache_name(), instance)
setattr(instance, self.cache_name, rel_obj)
if rel_obj is None:
raise self.related.model.DoesNotExist
else:
return rel_obj
def __set__(self, instance, value):
if instance is None:
raise AttributeError("%s must be accessed via instance" % self.related.opts.object_name)
# The similarity of the code below to the code in
# ReverseSingleRelatedObjectDescriptor is annoying, but there's a bunch
# of small differences that would make a common base class convoluted.
# If null=True, we can assign null here, but otherwise the value needs
# to be an instance of the related class.
if value is None and self.related.field.null == False:
raise ValueError('Cannot assign None: "%s.%s" does not allow null values.' %
(instance._meta.object_name, self.related.get_accessor_name()))
elif value is not None and not isinstance(value, self.related.model):
raise ValueError('Cannot assign "%r": "%s.%s" must be a "%s" instance.' %
(value, instance._meta.object_name,
self.related.get_accessor_name(), self.related.opts.object_name))
elif value is not None:
if instance._state.db is None:
instance._state.db = router.db_for_write(instance.__class__, instance=value)
elif value._state.db is None:
value._state.db = router.db_for_write(value.__class__, instance=instance)
elif value._state.db is not None and instance._state.db is not None:
if not router.allow_relation(value, instance):
raise ValueError('Cannot assign "%r": instance is on database "%s", value is on database "%s"' %
(value, instance._state.db, value._state.db))
related_pk = getattr(instance, self.related.field.rel.get_related_field().attname)
if related_pk is None:
raise ValueError('Cannot assign "%r": "%s" instance isn\'t saved in the database.' %
(value, instance._meta.object_name))
# Set the value of the related field to the value of the related object's related field
setattr(value, self.related.field.attname, related_pk)
# Since we already know what the related object is, seed the related
# object caches now, too. This avoids another db hit if you get the
# object you just set.
setattr(instance, self.cache_name, value)
setattr(value, self.related.field.get_cache_name(), instance)
class ReverseSingleRelatedObjectDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# a single "remote" value, on the class that defines the related field.
# In the example "choice.poll", the poll attribute is a
# ReverseSingleRelatedObjectDescriptor instance.
def __init__(self, field_with_rel):
self.field = field_with_rel
self.cache_name = self.field.get_cache_name()
def is_cached(self, instance):
return hasattr(instance, self.cache_name)
def get_query_set(self, **db_hints):
db = router.db_for_read(self.field.rel.to, **db_hints)
rel_mgr = self.field.rel.to._default_manager
# If the related manager indicates that it should be used for
# related fields, respect that.
if getattr(rel_mgr, 'use_for_related_fields', False):
return rel_mgr.using(db)
else:
return QuerySet(self.field.rel.to).using(db)
def get_prefetch_query_set(self, instances):
other_field = self.field.rel.get_related_field()
rel_obj_attr = attrgetter(other_field.attname)
instance_attr = attrgetter(self.field.attname)
instances_dict = dict((instance_attr(inst), inst) for inst in instances)
if other_field.rel:
params = {'%s__pk__in' % self.field.rel.field_name: list(instances_dict)}
else:
params = {'%s__in' % self.field.rel.field_name: list(instances_dict)}
qs = self.get_query_set(instance=instances[0]).filter(**params)
# Since we're going to assign directly in the cache,
# we must manage the reverse relation cache manually.
if not self.field.rel.multiple:
rel_obj_cache_name = self.field.related.get_cache_name()
for rel_obj in qs:
instance = instances_dict[rel_obj_attr(rel_obj)]
setattr(rel_obj, rel_obj_cache_name, instance)
return qs, rel_obj_attr, instance_attr, True, self.cache_name
def __get__(self, instance, instance_type=None):
if instance is None:
return self
try:
rel_obj = getattr(instance, self.cache_name)
except AttributeError:
val = getattr(instance, self.field.attname)
if val is None:
rel_obj = None
else:
other_field = self.field.rel.get_related_field()
if other_field.rel:
params = {'%s__%s' % (self.field.rel.field_name, other_field.rel.field_name): val}
else:
params = {'%s__exact' % self.field.rel.field_name: val}
qs = self.get_query_set(instance=instance)
# Assuming the database enforces foreign keys, this won't fail.
rel_obj = qs.get(**params)
if not self.field.rel.multiple:
setattr(rel_obj, self.field.related.get_cache_name(), instance)
setattr(instance, self.cache_name, rel_obj)
if rel_obj is None and not self.field.null:
raise self.field.rel.to.DoesNotExist
else:
return rel_obj
def __set__(self, instance, value):
if instance is None:
raise AttributeError("%s must be accessed via instance" % self.field.name)
# If null=True, we can assign null here, but otherwise the value needs
# to be an instance of the related class.
if value is None and self.field.null == False:
raise ValueError('Cannot assign None: "%s.%s" does not allow null values.' %
(instance._meta.object_name, self.field.name))
elif value is not None and not isinstance(value, self.field.rel.to):
raise ValueError('Cannot assign "%r": "%s.%s" must be a "%s" instance.' %
(value, instance._meta.object_name,
self.field.name, self.field.rel.to._meta.object_name))
elif value is not None:
if instance._state.db is None:
instance._state.db = router.db_for_write(instance.__class__, instance=value)
elif value._state.db is None:
value._state.db = router.db_for_write(value.__class__, instance=instance)
elif value._state.db is not None and instance._state.db is not None:
if not router.allow_relation(value, instance):
raise ValueError('Cannot assign "%r": instance is on database "%s", value is on database "%s"' %
(value, instance._state.db, value._state.db))
# If we're setting the value of a OneToOneField to None, we need to clear
# out the cache on any old related object. Otherwise, deleting the
# previously-related object will also cause this object to be deleted,
# which is wrong.
if value is None:
# Look up the previously-related object, which may still be available
# since we've not yet cleared out the related field.
# Use the cache directly, instead of the accessor; if we haven't
# populated the cache, then we don't care - we're only accessing
# the object to invalidate the accessor cache, so there's no
# need to populate the cache just to expire it again.
related = getattr(instance, self.cache_name, None)
# If we've got an old related object, we need to clear out its
# cache. This cache also might not exist if the related object
# hasn't been accessed yet.
if related is not None:
setattr(related, self.field.related.get_cache_name(), None)
# Set the value of the related field
try:
val = getattr(value, self.field.rel.get_related_field().attname)
except AttributeError:
val = None
setattr(instance, self.field.attname, val)
# Since we already know what the related object is, seed the related
# object caches now, too. This avoids another db hit if you get the
# object you just set.
setattr(instance, self.cache_name, value)
if value is not None and not self.field.rel.multiple:
setattr(value, self.field.related.get_cache_name(), instance)
class ForeignRelatedObjectsDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# multiple "remote" values and have a ForeignKey pointed at them by
# some other model. In the example "poll.choice_set", the choice_set
# attribute is a ForeignRelatedObjectsDescriptor instance.
def __init__(self, related):
self.related = related # RelatedObject instance
def __get__(self, instance, instance_type=None):
if instance is None:
return self
return self.related_manager_cls(instance)
def __set__(self, instance, value):
if instance is None:
raise AttributeError("Manager must be accessed via instance")
manager = self.__get__(instance)
# If the foreign key can support nulls, then completely clear the related set.
# Otherwise, just move the named objects into the set.
if self.related.field.null:
manager.clear()
manager.add(*value)
@cached_property
def related_manager_cls(self):
# Dynamically create a class that subclasses the related model's default
# manager.
superclass = self.related.model._default_manager.__class__
rel_field = self.related.field
rel_model = self.related.model
attname = rel_field.rel.get_related_field().attname
class RelatedManager(superclass):
def __init__(self, instance):
super(RelatedManager, self).__init__()
self.instance = instance
self.core_filters = {
'%s__%s' % (rel_field.name, attname): getattr(instance, attname)
}
self.model = rel_model
def get_query_set(self):
try:
return self.instance._prefetched_objects_cache[rel_field.related_query_name()]
except (AttributeError, KeyError):
db = self._db or router.db_for_read(self.model, instance=self.instance)
qs = super(RelatedManager, self).get_query_set().using(db).filter(**self.core_filters)
val = getattr(self.instance, attname)
if val is None or val == '' and connections[db].features.interprets_empty_strings_as_nulls:
# We don't want to use qs.none() here, see #19652
return qs.filter(pk__in=[])
qs._known_related_objects = {rel_field: {self.instance.pk: self.instance}}
return qs
def get_prefetch_query_set(self, instances):
rel_obj_attr = attrgetter(rel_field.attname)
instance_attr = attrgetter(attname)
instances_dict = dict((instance_attr(inst), inst) for inst in instances)
db = self._db or router.db_for_read(self.model, instance=instances[0])
query = {'%s__%s__in' % (rel_field.name, attname): list(instances_dict)}
qs = super(RelatedManager, self).get_query_set().using(db).filter(**query)
# Since we just bypassed this class' get_query_set(), we must manage
# the reverse relation manually.
for rel_obj in qs:
instance = instances_dict[rel_obj_attr(rel_obj)]
setattr(rel_obj, rel_field.name, instance)
cache_name = rel_field.related_query_name()
return qs, rel_obj_attr, instance_attr, False, cache_name
def add(self, *objs):
for obj in objs:
if not isinstance(obj, self.model):
raise TypeError("'%s' instance expected, got %r" % (self.model._meta.object_name, obj))
setattr(obj, rel_field.name, self.instance)
obj.save()
add.alters_data = True
def create(self, **kwargs):
kwargs[rel_field.name] = self.instance
db = router.db_for_write(self.model, instance=self.instance)
return super(RelatedManager, self.db_manager(db)).create(**kwargs)
create.alters_data = True
def get_or_create(self, **kwargs):
# Update kwargs with the related object that this
# ForeignRelatedObjectsDescriptor knows about.
kwargs[rel_field.name] = self.instance
db = router.db_for_write(self.model, instance=self.instance)
return super(RelatedManager, self.db_manager(db)).get_or_create(**kwargs)
get_or_create.alters_data = True
# remove() and clear() are only provided if the ForeignKey can have a value of null.
if rel_field.null:
def remove(self, *objs):
val = getattr(self.instance, attname)
for obj in objs:
# Is obj actually part of this descriptor set?
if getattr(obj, rel_field.attname) == val:
setattr(obj, rel_field.name, None)
obj.save()
else:
raise rel_field.rel.to.DoesNotExist("%r is not related to %r." % (obj, self.instance))
remove.alters_data = True
def clear(self):
self.update(**{rel_field.name: None})
clear.alters_data = True
return RelatedManager
def create_many_related_manager(superclass, rel):
"""Creates a manager that subclasses 'superclass' (which is a Manager)
and adds behavior for many-to-many related objects."""
class ManyRelatedManager(superclass):
def __init__(self, model=None, query_field_name=None, instance=None, symmetrical=None,
source_field_name=None, target_field_name=None, reverse=False,
through=None, prefetch_cache_name=None):
super(ManyRelatedManager, self).__init__()
self.model = model
self.query_field_name = query_field_name
self.core_filters = {'%s__pk' % query_field_name: instance._get_pk_val()}
self.instance = instance
self.symmetrical = symmetrical
self.source_field_name = source_field_name
self.target_field_name = target_field_name
self.reverse = reverse
self.through = through
self.prefetch_cache_name = prefetch_cache_name
self._fk_val = self._get_fk_val(instance, source_field_name)
if self._fk_val is None:
raise ValueError('"%r" needs to have a value for field "%s" before '
'this many-to-many relationship can be used.' %
(instance, source_field_name))
# Even if this relation is not to pk, we require still pk value.
# The wish is that the instance has been already saved to DB,
# although having a pk value isn't a guarantee of that.
if instance.pk is None:
raise ValueError("%r instance needs to have a primary key value before "
"a many-to-many relationship can be used." %
instance.__class__.__name__)
def _get_fk_val(self, obj, field_name):
"""
Returns the correct value for this relationship's foreign key. This
might be something else than pk value when to_field is used.
"""
if not self.through:
# Make custom m2m fields with no through model defined usable.
return obj.pk
fk = self.through._meta.get_field(field_name)
if fk.rel.field_name and fk.rel.field_name != fk.rel.to._meta.pk.attname:
attname = fk.rel.get_related_field().get_attname()
return fk.get_prep_lookup('exact', getattr(obj, attname))
else:
return obj.pk
def get_query_set(self):
try:
return self.instance._prefetched_objects_cache[self.prefetch_cache_name]
except (AttributeError, KeyError):
db = self._db or router.db_for_read(self.instance.__class__, instance=self.instance)
return super(ManyRelatedManager, self).get_query_set().using(db)._next_is_sticky().filter(**self.core_filters)
def get_prefetch_query_set(self, instances):
instance = instances[0]
from django.db import connections
db = self._db or router.db_for_read(instance.__class__, instance=instance)
query = {'%s__pk__in' % self.query_field_name:
set(obj._get_pk_val() for obj in instances)}
qs = super(ManyRelatedManager, self).get_query_set().using(db)._next_is_sticky().filter(**query)
# M2M: need to annotate the query in order to get the primary model
# that the secondary model was actually related to. We know that
# there will already be a join on the join table, so we can just add
# the select.
# For non-autocreated 'through' models, can't assume we are
# dealing with PK values.
fk = self.through._meta.get_field(self.source_field_name)
source_col = fk.column
join_table = self.through._meta.db_table
connection = connections[db]
qn = connection.ops.quote_name
qs = qs.extra(select={'_prefetch_related_val':
'%s.%s' % (qn(join_table), qn(source_col))})
select_attname = fk.rel.get_related_field().get_attname()
return (qs,
attrgetter('_prefetch_related_val'),
attrgetter(select_attname),
False,
self.prefetch_cache_name)
# If the ManyToMany relation has an intermediary model,
# the add and remove methods do not exist.
if rel.through._meta.auto_created:
def add(self, *objs):
self._add_items(self.source_field_name, self.target_field_name, *objs)
# If this is a symmetrical m2m relation to self, add the mirror entry in the m2m table
if self.symmetrical:
self._add_items(self.target_field_name, self.source_field_name, *objs)
add.alters_data = True
def remove(self, *objs):
self._remove_items(self.source_field_name, self.target_field_name, *objs)
# If this is a symmetrical m2m relation to self, remove the mirror entry in the m2m table
if self.symmetrical:
self._remove_items(self.target_field_name, self.source_field_name, *objs)
remove.alters_data = True
def clear(self):
self._clear_items(self.source_field_name)
# If this is a symmetrical m2m relation to self, clear the mirror entry in the m2m table
if self.symmetrical:
self._clear_items(self.target_field_name)
clear.alters_data = True
def create(self, **kwargs):
# This check needs to be done here, since we can't later remove this
# from the method lookup table, as we do with add and remove.
if not self.through._meta.auto_created:
opts = self.through._meta
raise AttributeError("Cannot use create() on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name))
db = router.db_for_write(self.instance.__class__, instance=self.instance)
new_obj = super(ManyRelatedManager, self.db_manager(db)).create(**kwargs)
self.add(new_obj)
return new_obj
create.alters_data = True
def get_or_create(self, **kwargs):
db = router.db_for_write(self.instance.__class__, instance=self.instance)
obj, created = \
super(ManyRelatedManager, self.db_manager(db)).get_or_create(**kwargs)
# We only need to add() if created because if we got an object back
# from get() then the relationship already exists.
if created:
self.add(obj)
return obj, created
get_or_create.alters_data = True
def _add_items(self, source_field_name, target_field_name, *objs):
# source_field_name: the PK fieldname in join table for the source object
# target_field_name: the PK fieldname in join table for the target object
# *objs - objects to add. Either object instances, or primary keys of object instances.
# If there aren't any objects, there is nothing to do.
from django.db.models import Model
if objs:
new_ids = set()
for obj in objs:
if isinstance(obj, self.model):
if not router.allow_relation(obj, self.instance):
raise ValueError('Cannot add "%r": instance is on database "%s", value is on database "%s"' %
(obj, self.instance._state.db, obj._state.db))
fk_val = self._get_fk_val(obj, target_field_name)
if fk_val is None:
raise ValueError('Cannot add "%r": the value for field "%s" is None' %
(obj, target_field_name))
new_ids.add(self._get_fk_val(obj, target_field_name))
elif isinstance(obj, Model):
raise TypeError("'%s' instance expected, got %r" % (self.model._meta.object_name, obj))
else:
new_ids.add(obj)
db = router.db_for_write(self.through, instance=self.instance)
vals = self.through._default_manager.using(db).values_list(target_field_name, flat=True)
vals = vals.filter(**{
source_field_name: self._fk_val,
'%s__in' % target_field_name: new_ids,
})
new_ids = new_ids - set(vals)
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are inserting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=self.through, action='pre_add',
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=new_ids, using=db)
# Add the ones that aren't there already
self.through._default_manager.using(db).bulk_create([
self.through(**{
'%s_id' % source_field_name: self._fk_val,
'%s_id' % target_field_name: obj_id,
})
for obj_id in new_ids
])
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are inserting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=self.through, action='post_add',
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=new_ids, using=db)
def _remove_items(self, source_field_name, target_field_name, *objs):
# source_field_name: the PK colname in join table for the source object
# target_field_name: the PK colname in join table for the target object
# *objs - objects to remove
# If there aren't any objects, there is nothing to do.
if objs:
# Check that all the objects are of the right type
old_ids = set()
for obj in objs:
if isinstance(obj, self.model):
old_ids.add(self._get_fk_val(obj, target_field_name))
else:
old_ids.add(obj)
# Work out what DB we're operating on
db = router.db_for_write(self.through, instance=self.instance)
# Send a signal to the other end if need be.
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are deleting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=self.through, action="pre_remove",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=old_ids, using=db)
# Remove the specified objects from the join table
self.through._default_manager.using(db).filter(**{
source_field_name: self._fk_val,
'%s__in' % target_field_name: old_ids
}).delete()
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are deleting the
# duplicate data row for symmetrical reverse entries.
signals.m2m_changed.send(sender=self.through, action="post_remove",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=old_ids, using=db)
def _clear_items(self, source_field_name):
db = router.db_for_write(self.through, instance=self.instance)
# source_field_name: the PK colname in join table for the source object
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are clearing the
# duplicate data rows for symmetrical reverse entries.
signals.m2m_changed.send(sender=self.through, action="pre_clear",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=None, using=db)
self.through._default_manager.using(db).filter(**{
source_field_name: self._fk_val
}).delete()
if self.reverse or source_field_name == self.source_field_name:
# Don't send the signal when we are clearing the
# duplicate data rows for symmetrical reverse entries.
signals.m2m_changed.send(sender=self.through, action="post_clear",
instance=self.instance, reverse=self.reverse,
model=self.model, pk_set=None, using=db)
return ManyRelatedManager
class ManyRelatedObjectsDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# multiple "remote" values and have a ManyToManyField pointed at them by
# some other model (rather than having a ManyToManyField themselves).
# In the example "publication.article_set", the article_set attribute is a
# ManyRelatedObjectsDescriptor instance.
def __init__(self, related):
self.related = related # RelatedObject instance
@cached_property
def related_manager_cls(self):
# Dynamically create a class that subclasses the related
# model's default manager.
return create_many_related_manager(
self.related.model._default_manager.__class__,
self.related.field.rel
)
def __get__(self, instance, instance_type=None):
if instance is None:
return self
rel_model = self.related.model
manager = self.related_manager_cls(
model=rel_model,
query_field_name=self.related.field.name,
prefetch_cache_name=self.related.field.related_query_name(),
instance=instance,
symmetrical=False,
source_field_name=self.related.field.m2m_reverse_field_name(),
target_field_name=self.related.field.m2m_field_name(),
reverse=True,
through=self.related.field.rel.through,
)
return manager
def __set__(self, instance, value):
if instance is None:
raise AttributeError("Manager must be accessed via instance")
if not self.related.field.rel.through._meta.auto_created:
opts = self.related.field.rel.through._meta
raise AttributeError("Cannot set values on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name))
manager = self.__get__(instance)
manager.clear()
manager.add(*value)
class ReverseManyRelatedObjectsDescriptor(object):
# This class provides the functionality that makes the related-object
# managers available as attributes on a model class, for fields that have
# multiple "remote" values and have a ManyToManyField defined in their
# model (rather than having another model pointed *at* them).
# In the example "article.publications", the publications attribute is a
# ReverseManyRelatedObjectsDescriptor instance.
def __init__(self, m2m_field):
self.field = m2m_field
@property
def through(self):
# through is provided so that you have easy access to the through
# model (Book.authors.through) for inlines, etc. This is done as
# a property to ensure that the fully resolved value is returned.
return self.field.rel.through
@cached_property
def related_manager_cls(self):
# Dynamically create a class that subclasses the related model's
# default manager.
return create_many_related_manager(
self.field.rel.to._default_manager.__class__,
self.field.rel
)
def __get__(self, instance, instance_type=None):
if instance is None:
return self
manager = self.related_manager_cls(
model=self.field.rel.to,
query_field_name=self.field.related_query_name(),
prefetch_cache_name=self.field.name,
instance=instance,
symmetrical=self.field.rel.symmetrical,
source_field_name=self.field.m2m_field_name(),
target_field_name=self.field.m2m_reverse_field_name(),
reverse=False,
through=self.field.rel.through,
)
return manager
def __set__(self, instance, value):
if instance is None:
raise AttributeError("Manager must be accessed via instance")
if not self.field.rel.through._meta.auto_created:
opts = self.field.rel.through._meta
raise AttributeError("Cannot set values on a ManyToManyField which specifies an intermediary model. Use %s.%s's Manager instead." % (opts.app_label, opts.object_name))
manager = self.__get__(instance)
manager.clear()
manager.add(*value)
class ManyToOneRel(object):
def __init__(self, to, field_name, related_name=None, limit_choices_to=None,
parent_link=False, on_delete=None):
try:
to._meta
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, six.string_types), "'to' must be either a model, a model name or the string %r" % RECURSIVE_RELATIONSHIP_CONSTANT
self.to, self.field_name = to, field_name
self.related_name = related_name
if limit_choices_to is None:
limit_choices_to = {}
self.limit_choices_to = limit_choices_to
self.multiple = True
self.parent_link = parent_link
self.on_delete = on_delete
def is_hidden(self):
"Should the related object be hidden?"
return self.related_name and self.related_name[-1] == '+'
def get_related_field(self):
"""
Returns the Field in the 'to' object to which this relationship is
tied.
"""
data = self.to._meta.get_field_by_name(self.field_name)
if not data[2]:
raise FieldDoesNotExist("No related field named '%s'" %
self.field_name)
return data[0]
class OneToOneRel(ManyToOneRel):
def __init__(self, to, field_name, related_name=None, limit_choices_to=None,
parent_link=False, on_delete=None):
super(OneToOneRel, self).__init__(to, field_name,
related_name=related_name, limit_choices_to=limit_choices_to,
parent_link=parent_link, on_delete=on_delete
)
self.multiple = False
class ManyToManyRel(object):
def __init__(self, to, related_name=None, limit_choices_to=None,
symmetrical=True, through=None):
self.to = to
self.related_name = related_name
if limit_choices_to is None:
limit_choices_to = {}
self.limit_choices_to = limit_choices_to
self.symmetrical = symmetrical
self.multiple = True
self.through = through
def is_hidden(self):
"Should the related object be hidden?"
return self.related_name and self.related_name[-1] == '+'
def get_related_field(self):
"""
Returns the field in the to' object to which this relationship is tied
(this is always the primary key on the target model). Provided for
symmetry with ManyToOneRel.
"""
return self.to._meta.pk
class ForeignKey(RelatedField, Field):
empty_strings_allowed = False
default_error_messages = {
'invalid': _('Model %(model)s with pk %(pk)r does not exist.')
}
description = _("Foreign Key (type determined by related field)")
def __init__(self, to, to_field=None, rel_class=ManyToOneRel, **kwargs):
try:
to_name = to._meta.object_name.lower()
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, six.string_types), "%s(%r) is invalid. First parameter to ForeignKey must be either a model, a model name, or the string %r" % (self.__class__.__name__, to, RECURSIVE_RELATIONSHIP_CONSTANT)
else:
assert not to._meta.abstract, "%s cannot define a relation with abstract class %s" % (self.__class__.__name__, to._meta.object_name)
# For backwards compatibility purposes, we need to *try* and set
# the to_field during FK construction. It won't be guaranteed to
# be correct until contribute_to_class is called. Refs #12190.
to_field = to_field or (to._meta.pk and to._meta.pk.name)
kwargs['verbose_name'] = kwargs.get('verbose_name', None)
if 'db_index' not in kwargs:
kwargs['db_index'] = True
kwargs['rel'] = rel_class(to, to_field,
related_name=kwargs.pop('related_name', None),
limit_choices_to=kwargs.pop('limit_choices_to', None),
parent_link=kwargs.pop('parent_link', False),
on_delete=kwargs.pop('on_delete', CASCADE),
)
Field.__init__(self, **kwargs)
def validate(self, value, model_instance):
if self.rel.parent_link:
return
super(ForeignKey, self).validate(value, model_instance)
if value is None:
return
using = router.db_for_read(model_instance.__class__, instance=model_instance)
qs = self.rel.to._default_manager.using(using).filter(
**{self.rel.field_name: value}
)
qs = qs.complex_filter(self.rel.limit_choices_to)
if not qs.exists():
raise exceptions.ValidationError(self.error_messages['invalid'] % {
'model': self.rel.to._meta.verbose_name, 'pk': value})
def get_attname(self):
return '%s_id' % self.name
def get_validator_unique_lookup_type(self):
return '%s__%s__exact' % (self.name, self.rel.get_related_field().name)
def get_default(self):
"Here we check if the default value is an object and return the to_field if so."
field_default = super(ForeignKey, self).get_default()
if isinstance(field_default, self.rel.to):
return getattr(field_default, self.rel.get_related_field().attname)
return field_default
def get_db_prep_save(self, value, connection):
if value == '' or value == None:
return None
else:
return self.rel.get_related_field().get_db_prep_save(value,
connection=connection)
def value_to_string(self, obj):
if not obj:
# In required many-to-one fields with only one available choice,
# select that one available choice. Note: For SelectFields
# we have to check that the length of choices is *2*, not 1,
# because SelectFields always have an initial "blank" value.
if not self.blank and self.choices:
choice_list = self.get_choices_default()
if len(choice_list) == 2:
return smart_text(choice_list[1][0])
return Field.value_to_string(self, obj)
def contribute_to_class(self, cls, name):
super(ForeignKey, self).contribute_to_class(cls, name)
setattr(cls, self.name, ReverseSingleRelatedObjectDescriptor(self))
if isinstance(self.rel.to, six.string_types):
target = self.rel.to
else:
target = self.rel.to._meta.db_table
cls._meta.duplicate_targets[self.column] = (target, "o2m")
def contribute_to_related_class(self, cls, related):
# Internal FK's - i.e., those with a related name ending with '+' -
# and swapped models don't get a related descriptor.
if not self.rel.is_hidden() and not related.model._meta.swapped:
setattr(cls, related.get_accessor_name(), ForeignRelatedObjectsDescriptor(related))
if self.rel.limit_choices_to:
cls._meta.related_fkey_lookups.append(self.rel.limit_choices_to)
if self.rel.field_name is None:
self.rel.field_name = cls._meta.pk.name
def formfield(self, **kwargs):
db = kwargs.pop('using', None)
if isinstance(self.rel.to, six.string_types):
raise ValueError("Cannot create form field for %r yet, because "
"its related model %r has not been loaded yet" %
(self.name, self.rel.to))
defaults = {
'form_class': forms.ModelChoiceField,
'queryset': self.rel.to._default_manager.using(db).complex_filter(self.rel.limit_choices_to),
'to_field_name': self.rel.field_name,
}
defaults.update(kwargs)
return super(ForeignKey, self).formfield(**defaults)
def db_type(self, connection):
# The database column type of a ForeignKey is the column type
# of the field to which it points. An exception is if the ForeignKey
# points to an AutoField/PositiveIntegerField/PositiveSmallIntegerField,
# in which case the column type is simply that of an IntegerField.
# If the database needs similar types for key fields however, the only
# thing we can do is making AutoField an IntegerField.
rel_field = self.rel.get_related_field()
if (isinstance(rel_field, AutoField) or
(not connection.features.related_fields_match_type and
isinstance(rel_field, (PositiveIntegerField,
PositiveSmallIntegerField)))):
return IntegerField().db_type(connection=connection)
return rel_field.db_type(connection=connection)
class OneToOneField(ForeignKey):
"""
A OneToOneField is essentially the same as a ForeignKey, with the exception
that always carries a "unique" constraint with it and the reverse relation
always returns the object pointed to (since there will only ever be one),
rather than returning a list.
"""
description = _("One-to-one relationship")
def __init__(self, to, to_field=None, **kwargs):
kwargs['unique'] = True
super(OneToOneField, self).__init__(to, to_field, OneToOneRel, **kwargs)
def contribute_to_related_class(self, cls, related):
setattr(cls, related.get_accessor_name(),
SingleRelatedObjectDescriptor(related))
def formfield(self, **kwargs):
if self.rel.parent_link:
return None
return super(OneToOneField, self).formfield(**kwargs)
def save_form_data(self, instance, data):
if isinstance(data, self.rel.to):
setattr(instance, self.name, data)
else:
setattr(instance, self.attname, data)
def create_many_to_many_intermediary_model(field, klass):
from django.db import models
managed = True
if isinstance(field.rel.to, six.string_types) and field.rel.to != RECURSIVE_RELATIONSHIP_CONSTANT:
to_model = field.rel.to
to = to_model.split('.')[-1]
def set_managed(field, model, cls):
field.rel.through._meta.managed = model._meta.managed or cls._meta.managed
add_lazy_relation(klass, field, to_model, set_managed)
elif isinstance(field.rel.to, six.string_types):
to = klass._meta.object_name
to_model = klass
managed = klass._meta.managed
else:
to = field.rel.to._meta.object_name
to_model = field.rel.to
managed = klass._meta.managed or to_model._meta.managed
name = '%s_%s' % (klass._meta.object_name, field.name)
if field.rel.to == RECURSIVE_RELATIONSHIP_CONSTANT or to == klass._meta.object_name:
from_ = 'from_%s' % to.lower()
to = 'to_%s' % to.lower()
else:
from_ = klass._meta.object_name.lower()
to = to.lower()
meta = type('Meta', (object,), {
'db_table': field._get_m2m_db_table(klass._meta),
'managed': managed,
'auto_created': klass,
'app_label': klass._meta.app_label,
'db_tablespace': klass._meta.db_tablespace,
'unique_together': (from_, to),
'verbose_name': '%(from)s-%(to)s relationship' % {'from': from_, 'to': to},
'verbose_name_plural': '%(from)s-%(to)s relationships' % {'from': from_, 'to': to},
})
# Construct and return the new class.
return type(str(name), (models.Model,), {
'Meta': meta,
'__module__': klass.__module__,
from_: models.ForeignKey(klass, related_name='%s+' % name, db_tablespace=field.db_tablespace),
to: models.ForeignKey(to_model, related_name='%s+' % name, db_tablespace=field.db_tablespace)
})
class ManyToManyField(RelatedField, Field):
description = _("Many-to-many relationship")
def __init__(self, to, **kwargs):
try:
assert not to._meta.abstract, "%s cannot define a relation with abstract class %s" % (self.__class__.__name__, to._meta.object_name)
except AttributeError: # to._meta doesn't exist, so it must be RECURSIVE_RELATIONSHIP_CONSTANT
assert isinstance(to, six.string_types), "%s(%r) is invalid. First parameter to ManyToManyField must be either a model, a model name, or the string %r" % (self.__class__.__name__, to, RECURSIVE_RELATIONSHIP_CONSTANT)
# Python 2.6 and earlier require dictionary keys to be of str type,
# not unicode and class names must be ASCII (in Python 2.x), so we
# forcibly coerce it here (breaks early if there's a problem).
to = str(to)
kwargs['verbose_name'] = kwargs.get('verbose_name', None)
kwargs['rel'] = ManyToManyRel(to,
related_name=kwargs.pop('related_name', None),
limit_choices_to=kwargs.pop('limit_choices_to', None),
symmetrical=kwargs.pop('symmetrical', to == RECURSIVE_RELATIONSHIP_CONSTANT),
through=kwargs.pop('through', None))
self.db_table = kwargs.pop('db_table', None)
if kwargs['rel'].through is not None:
assert self.db_table is None, "Cannot specify a db_table if an intermediary model is used."
Field.__init__(self, **kwargs)
msg = _('Hold down "Control", or "Command" on a Mac, to select more than one.')
self.help_text = string_concat(self.help_text, ' ', msg)
def get_choices_default(self):
return Field.get_choices(self, include_blank=False)
def _get_m2m_db_table(self, opts):
"Function that can be curried to provide the m2m table name for this relation"
if self.rel.through is not None:
return self.rel.through._meta.db_table
elif self.db_table:
return self.db_table
else:
return util.truncate_name('%s_%s' % (opts.db_table, self.name),
connection.ops.max_name_length())
def _get_m2m_attr(self, related, attr):
"Function that can be curried to provide the source accessor or DB column name for the m2m table"
cache_attr = '_m2m_%s_cache' % attr
if hasattr(self, cache_attr):
return getattr(self, cache_attr)
for f in self.rel.through._meta.fields:
if hasattr(f, 'rel') and f.rel and f.rel.to == related.model:
setattr(self, cache_attr, getattr(f, attr))
return getattr(self, cache_attr)
def _get_m2m_reverse_attr(self, related, attr):
"Function that can be curried to provide the related accessor or DB column name for the m2m table"
cache_attr = '_m2m_reverse_%s_cache' % attr
if hasattr(self, cache_attr):
return getattr(self, cache_attr)
found = False
for f in self.rel.through._meta.fields:
if hasattr(f, 'rel') and f.rel and f.rel.to == related.parent_model:
if related.model == related.parent_model:
# If this is an m2m-intermediate to self,
# the first foreign key you find will be
# the source column. Keep searching for
# the second foreign key.
if found:
setattr(self, cache_attr, getattr(f, attr))
break
else:
found = True
else:
setattr(self, cache_attr, getattr(f, attr))
break
return getattr(self, cache_attr)
def value_to_string(self, obj):
data = ''
if obj:
qs = getattr(obj, self.name).all()
data = [instance._get_pk_val() for instance in qs]
else:
# In required many-to-many fields with only one available choice,
# select that one available choice.
if not self.blank:
choices_list = self.get_choices_default()
if len(choices_list) == 1:
data = [choices_list[0][0]]
return smart_text(data)
def contribute_to_class(self, cls, name):
# To support multiple relations to self, it's useful to have a non-None
# related name on symmetrical relations for internal reasons. The
# concept doesn't make a lot of sense externally ("you want me to
# specify *what* on my non-reversible relation?!"), so we set it up
# automatically. The funky name reduces the chance of an accidental
# clash.
if self.rel.symmetrical and (self.rel.to == "self" or self.rel.to == cls._meta.object_name):
self.rel.related_name = "%s_rel_+" % name
super(ManyToManyField, self).contribute_to_class(cls, name)
# The intermediate m2m model is not auto created if:
# 1) There is a manually specified intermediate, or
# 2) The class owning the m2m field is abstract.
# 3) The class owning the m2m field has been swapped out.
if not self.rel.through and not cls._meta.abstract and not cls._meta.swapped:
self.rel.through = create_many_to_many_intermediary_model(self, cls)
# Add the descriptor for the m2m relation
setattr(cls, self.name, ReverseManyRelatedObjectsDescriptor(self))
# Set up the accessor for the m2m table name for the relation
self.m2m_db_table = curry(self._get_m2m_db_table, cls._meta)
# Populate some necessary rel arguments so that cross-app relations
# work correctly.
if isinstance(self.rel.through, six.string_types):
def resolve_through_model(field, model, cls):
field.rel.through = model
add_lazy_relation(cls, self, self.rel.through, resolve_through_model)
if isinstance(self.rel.to, six.string_types):
target = self.rel.to
else:
target = self.rel.to._meta.db_table
cls._meta.duplicate_targets[self.column] = (target, "m2m")
def contribute_to_related_class(self, cls, related):
# Internal M2Ms (i.e., those with a related name ending with '+')
# and swapped models don't get a related descriptor.
if not self.rel.is_hidden() and not related.model._meta.swapped:
setattr(cls, related.get_accessor_name(), ManyRelatedObjectsDescriptor(related))
# Set up the accessors for the column names on the m2m table
self.m2m_column_name = curry(self._get_m2m_attr, related, 'column')
self.m2m_reverse_name = curry(self._get_m2m_reverse_attr, related, 'column')
self.m2m_field_name = curry(self._get_m2m_attr, related, 'name')
self.m2m_reverse_field_name = curry(self._get_m2m_reverse_attr, related, 'name')
get_m2m_rel = curry(self._get_m2m_attr, related, 'rel')
self.m2m_target_field_name = lambda: get_m2m_rel().field_name
get_m2m_reverse_rel = curry(self._get_m2m_reverse_attr, related, 'rel')
self.m2m_reverse_target_field_name = lambda: get_m2m_reverse_rel().field_name
def set_attributes_from_rel(self):
pass
def value_from_object(self, obj):
"Returns the value of this field in the given model instance."
return getattr(obj, self.attname).all()
def save_form_data(self, instance, data):
setattr(instance, self.attname, data)
def formfield(self, **kwargs):
db = kwargs.pop('using', None)
defaults = {
'form_class': forms.ModelMultipleChoiceField,
'queryset': self.rel.to._default_manager.using(db).complex_filter(self.rel.limit_choices_to)
}
defaults.update(kwargs)
# If initial is passed in, it's a list of related objects, but the
# MultipleChoiceField takes a list of IDs.
if defaults.get('initial') is not None:
initial = defaults['initial']
if callable(initial):
initial = initial()
defaults['initial'] = [i._get_pk_val() for i in initial]
return super(ManyToManyField, self).formfield(**defaults)
def db_type(self, connection):
# A ManyToManyField is not represented by a single column,
# so return None.
return None
| [
"[email protected]"
] | |
e51ce24cc9abf704617483f76ca2cd74285aeb65 | 5b93930ce8280b3cbc7d6b955df0bfc5504ee99c | /nodes/VanderPlas17Python/E_Chapter4/N_ThreeDimensionalPlotting/A_ThreeDimensionalPoints/index.py | 0b8df9722eaab651824247c3bb62acae5f54c7eb | [] | no_license | nimra/module_gen | 8749c8d29beb700cac57132232861eba4eb82331 | 2e0a4452548af4fefd4cb30ab9d08d7662122cf4 | refs/heads/master | 2022-03-04T09:35:12.443651 | 2019-10-26T04:40:49 | 2019-10-26T04:40:49 | 213,980,247 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,466 | py | # Lawrence McAfee
# ~~~~~~~~ import ~~~~~~~~
from modules.node.HierNode import HierNode
from modules.node.LeafNode import LeafNode
from modules.node.Stage import Stage
from modules.node.block.CodeBlock import CodeBlock as cbk
from modules.node.block.HierBlock import HierBlock as hbk
from modules.node.block.ImageBlock import ImageBlock as ibk
from modules.node.block.ListBlock import ListBlock as lbk
from modules.node.block.MarkdownBlock import MarkdownBlock as mbk
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
blocks = [
# Figure 4-92. An empty three-dimensional axes
#
# With this 3D axes enabled, we can now plot a variety of three-dimensional plot types.
# Three-dimensional plotting is one of the functionalities that benefits immensely from
# viewing figures interactively rather than statically in the notebook; recall that to use
# interactive figures, you can use %matplotlib notebook rather than %matplotlib
# inline when running this code.
#
# Three-Dimensional Points and Lines
# The most basic three-dimensional plot is a line or scatter plot created from sets of (x,
# y, z) triples. In analogy with the more common two-dimensional plots discussed ear‐
# lier, we can create these using the ax.plot3D and ax.scatter3D functions. The call
# signature for these is nearly identical to that of their two-dimensional counterparts,
# so you can refer to “Simple Line Plots” on page 224 and “Simple Scatter Plots” on
# page 233 for more information on controlling the output. Here we’ll plot a trigono‐
# metric spiral, along with some points drawn randomly near the line (Figure 4-93):
# In[4]: ax = plt.axes(projection='3d')
#
# # Data for a three-dimensional line
# zline = np.linspace(0, 15, 1000)
# xline = np.sin(zline)
# yline = np.cos(zline)
# ax.plot3D(xline, yline, zline, 'gray')
#
# # Data for three-dimensional scattered points
# zdata = 15 * np.random.random(100)
# xdata = np.sin(zdata) + 0.1 * np.random.randn(100)
# ydata = np.cos(zdata) + 0.1 * np.random.randn(100)
# ax.scatter3D(xdata, ydata, zdata, c=zdata, cmap='Greens');
#
#
#
#
# Three-Dimensional Plotting in Matplotlib | 291
#
# Figure 4-93. Points and lines in three dimensions
#
# Notice that by default, the scatter points have their transparency adjusted to give a
# sense of depth on the page. While the three-dimensional effect is sometimes difficult
# to see within a static image, an interactive view can lead to some nice intuition about
# the layout of the points.
#
# Three-Dimensional Contour Plots
# Analogous to the contour plots we explored in “Density and Contour Plots” on page
# 241, mplot3d contains tools to create three-dimensional relief plots using the same
# inputs. Like two-dimensional ax.contour plots, ax.contour3D requires all the input
# data to be in the form of two-dimensional regular grids, with the Z data evaluated at
# each point. Here we’ll show a three-dimensional contour diagram of a three-
# dimensional sinusoidal function (Figure 4-94):
# In[5]: def f(x, y):
# return np.sin(np.sqrt(x ** 2 + y ** 2))
#
# x = np.linspace(-6, 6, 30)
# y = np.linspace(-6, 6, 30)
#
# X, Y = np.meshgrid(x, y)
# Z = f(X, Y)
# In[6]: fig = plt.figure()
# ax = plt.axes(projection='3d')
# ax.contour3D(X, Y, Z, 50, cmap='binary')
# ax.set_xlabel('x')
# ax.set_ylabel('y')
# ax.set_zlabel('z');
#
#
#
#
# 292 | Chapter 4: Visualization with Matplotlib
#
]
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class Content(LeafNode):
def __init__(self):
super().__init__(
"Three-Dimensional Points and Lines",
# Stage.REMOVE_EXTRANEOUS,
# Stage.ORIG_BLOCKS,
# Stage.CUSTOM_BLOCKS,
# Stage.ORIG_FIGURES,
# Stage.CUSTOM_FIGURES,
# Stage.CUSTOM_EXERCISES,
)
[self.add(a) for a in blocks]
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
class ThreeDimensionalPoints(HierNode):
def __init__(self):
super().__init__("Three-Dimensional Points and Lines")
self.add(Content())
# eof
| [
"[email protected]"
] | |
8a501952490fa9d33985f24cf23aa7cb69298554 | 452be58b4c62e6522724740cac332ed0fe446bb8 | /src/starboard/android/shared/gyp_configuration.gypi | 12dd79875f4d6246ee3cd44f16732f163bbd4628 | [
"Apache-2.0"
] | permissive | blockspacer/cobalt-clone-cab7770533804d582eaa66c713a1582f361182d3 | b6e802f4182adbf6a7451a5d48dc4e158b395107 | 0b72f93b07285f3af3c8452ae2ceaf5860ca7c72 | refs/heads/master | 2020-08-18T11:32:21.458963 | 2019-10-17T13:09:35 | 2019-10-17T13:09:35 | 215,783,613 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,374 | gypi | # Copyright 2016 The Cobalt Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Platform specific configuration for Android on Starboard. Automatically
# included by gyp_cobalt in all .gyp files by Cobalt together with base.gypi.
#
{
'variables': {
'target_os': 'android',
'final_executable_type': 'shared_library',
'gtest_target_type': 'shared_library',
'sb_widevine_platform' : 'android',
'gl_type': 'system_gles2',
'enable_remote_debugging': 0,
'linker_flags': [
# The NDK default "ld" is actually the gold linker for all architectures
# except arm64 (aarch64) where it's the bfd linker. Don't use either of
# those, rather use lld everywhere. See release notes for NDK 19:
# https://developer.android.com/ndk/downloads/revision_history
'-fuse-ld=lld',
],
# Define platform specific compiler and linker flags.
# Refer to base.gypi for a list of all available variables.
'compiler_flags_host': [
'-O2',
],
'compiler_flags_debug': [
'-frtti',
'-O0',
],
'compiler_flags_devel': [
'-frtti',
'-O2',
],
'compiler_flags_qa': [
'-fno-rtti',
'-gline-tables-only',
],
'compiler_flags_qa_size': [
'-Os',
],
'compiler_flags_qa_speed': [
'-O2',
],
'compiler_flags_gold': [
'-fno-rtti',
'-gline-tables-only',
],
'compiler_flags_gold_size': [
'-Os',
],
'compiler_flags_gold_speed': [
'-O2',
],
'platform_libraries': [
'-lEGL',
'-lGLESv2',
'-lOpenSLES',
'-landroid',
'-llog',
'-lmediandk',
],
'conditions': [
['cobalt_fastbuild==0', {
'compiler_flags_debug': [
'-g',
],
'compiler_flags_devel': [
'-g',
],
'compiler_flags_qa': [
'-gline-tables-only',
],
'compiler_flags_gold': [
'-gline-tables-only',
],
}],
],
},
'target_defaults': {
'target_conditions': [
['sb_pedantic_warnings==1', {
'cflags': [
'-Wall',
'-Wextra',
'-Wunreachable-code',
# Don't get pedantic about warnings from base macros. These must be
# disabled after the -Wall above, so this has to be done here rather
# than in the platform's target toolchain.
# TODO: Rebase base and use static_assert instead of COMPILE_ASSERT
'-Wno-unused-local-typedef', # COMPILE_ASSERT
'-Wno-missing-field-initializers', # LAZY_INSTANCE_INITIALIZER
# It's OK not to use some input parameters. Note that the order
# matters: Wall implies Wunused-parameter and Wno-unused-parameter
# has no effect if specified before Wall.
'-Wno-unused-parameter',
],
}],
['_type=="executable"', {
# Android Lollipop+ requires relocatable executables.
'cflags': [
'-fPIE',
],
'ldflags': [
'-pie',
],
},{
# Android requires relocatable shared libraries.
'cflags': [
'-fPIC',
],
}],
['use_asan==1', {
'cflags': [
'-fsanitize=address',
'-fno-omit-frame-pointer',
],
'ldflags': [
'-fsanitize=address',
# Force linking of the helpers in sanitizer_options.cc
'-Wl,-u_sanitizer_options_link_helper',
],
'defines': [
'ADDRESS_SANITIZER',
],
}],
['use_tsan==1', {
'cflags': [
'-fsanitize=thread',
'-fno-omit-frame-pointer',
],
'ldflags': [
'-fsanitize=thread',
],
'defines': [
'THREAD_SANITIZER',
],
}],
],
}, # end of target_defaults
}
| [
"[email protected]"
] | |
6914467b4e480fb1fed13898dda10452a6241fef | 51b6d2fc53d5c632fcf01319842baebf13901e84 | /atcoder.jp/arc032/arc032_1/Main.py | 66bb6d66546bb0ff6cd9f30580c3f42ba9e3c722 | [] | no_license | mono-0812/procon | 35db3b2c21eff74fbd7b52db07f249380f6834ef | 68a4b53880a228a0164052b23d1326363efcbc20 | refs/heads/master | 2023-05-30T17:02:58.935074 | 2021-06-27T12:15:10 | 2021-06-27T12:15:10 | 345,896,553 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 188 | py | n = int(input())
val = 0
for i in range(1,n+1):
val += i
for i in range(2,val//2):
if val%i == 0:
print("BOWWOW")
exit()
if val == 1:
print("BOWWOW")
exit()
print("WANWAN") | [
"[email protected]"
] | |
8e0ec5c953585aa962691f0bce2d260c8e78caa8 | 11c036911cf893325199d9e9a91a11cd1dca7c90 | /all-paths-from-source-to-target/solution.py | 1fd9a15570b8173bfb5bd501c9d9b6d36d73959b | [] | no_license | arpiagar/HackerEarth | 34f817f69e94d88657c1d8991a55aca302cdc890 | 4a94f1b11a353ab6b2837a1ac77bfbd7c91f91d2 | refs/heads/master | 2021-07-18T14:23:05.124943 | 2021-02-09T21:58:12 | 2021-02-09T21:58:12 | 19,204,412 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 640 | py | #https://leetcode.com/problems/all-paths-from-source-to-target/submissions/
class Solution:
def allPathsSourceTarget(self, graph: List[List[int]]) -> List[List[int]]:
adj_map ={}
for i in range(len(graph)):
adj_map[i] = graph[i]
start = 0
out = []
self.findpath(start, len(graph)-1, [], adj_map, out)
return out
def findpath(self, current, end, temp, adj_map, out):
if current == end:
out.append(temp+[current])
temp.append(current)
for elem in adj_map[current]:
self.findpath(elem, end, [x for x in temp],adj_map, out)
| [
"[email protected]"
] | |
2f529daab804ec0cba2e66c17c9fb00762f77f1f | 0fccee4c738449f5e0a8f52ea5acabf51db0e910 | /genfragments/ThirteenTeV/MSSM_HiggsToMuMu/fragment_mhmodp_MA110_tb14_ggA.py | c41e893213325a4ae34cac52e2ac1bfb82a44725 | [] | no_license | cms-sw/genproductions | f308ffaf3586c19b29853db40e6d662e937940ff | dd3d3a3826343d4f75ec36b4662b6e9ff1f270f4 | refs/heads/master | 2023-08-30T17:26:02.581596 | 2023-08-29T14:53:43 | 2023-08-29T14:53:43 | 11,424,867 | 69 | 987 | null | 2023-09-14T12:41:28 | 2013-07-15T14:18:33 | Python | UTF-8 | Python | false | false | 16,290 | py | COM_ENERGY = 13000.0 # GeV
CROSS_SECTION = 1 # pb
PROCESS = 'HiggsBSM:gg2A3 = on'
SLHA_TABLE = """BLOCK SPINFO
1 FeynHiggs
2 2.12.0
2 built on ott 13, 2016
BLOCK MODSEL
1 0 # Model
2 1 # GridPts
3 0 # Content
4 0 # RPV
5 0 # CPV
6 0 # FV
BLOCK SMINPUTS
1 1.28952828E+02 # invAlfaMZ
2 1.16637000E-05 # GF
3 1.19000000E-01 # AlfasMZ
4 9.11876000E+01 # MZ
5 4.16000000E+00 # Mb
6 1.73200000E+02 # Mt
7 1.77703000E+00 # Mtau
11 5.10998902E-04 # Me
13 1.05658357E-01 # Mmu
21 6.00000000E-03 # Md
22 3.00000000E-03 # Mu
23 9.50000000E-02 # Ms
24 1.28600000E+00 # Mc
BLOCK MINPAR
3 1.40000000E+01 # TB
BLOCK EXTPAR
0 0.00000000E+00 # Q
1 9.54716519E+01 # M1
2 2.00000000E+02 # M2
3 1.50000000E+03 # M3
11 1.51428571E+03 # At
12 1.51428571E+03 # Ab
13 1.51428571E+03 # Atau
23 2.00000000E+02 # MUE
25 1.40000000E+01 # TB
26 1.10000000E+02 # MA0
27 1.36249178E+02 # MHp
31 5.00000000E+02 # MSL(1)
32 5.00000000E+02 # MSL(2)
33 1.00000000E+03 # MSL(3)
34 5.00000000E+02 # MSE(1)
35 5.00000000E+02 # MSE(2)
36 1.00000000E+03 # MSE(3)
41 1.50000000E+03 # MSQ(1)
42 1.50000000E+03 # MSQ(2)
43 1.00000000E+03 # MSQ(3)
44 1.50000000E+03 # MSU(1)
45 1.50000000E+03 # MSU(2)
46 1.00000000E+03 # MSU(3)
47 1.50000000E+03 # MSD(1)
48 1.50000000E+03 # MSD(2)
49 1.00000000E+03 # MSD(3)
BLOCK MASS
1000012 4.95867543E+02 # MSf(1,1,1)
1000011 5.02277652E+02 # MSf(1,2,1)
2000011 5.01829194E+02 # MSf(2,2,1)
1000002 1.49903513E+03 # MSf(1,3,1)
2000002 1.49959271E+03 # MSf(2,3,1)
1000001 1.50116775E+03 # MSf(1,4,1)
2000001 1.50020357E+03 # MSf(2,4,1)
1000014 4.95867543E+02 # MSf(1,1,2)
1000013 5.02423599E+02 # MSf(1,2,2)
2000013 5.01683097E+02 # MSf(2,2,2)
1000004 1.49903561E+03 # MSf(1,3,2)
2000004 1.49959333E+03 # MSf(2,3,2)
1000003 1.50117558E+03 # MSf(1,4,2)
2000003 1.50019575E+03 # MSf(2,4,2)
1000016 9.97940189E+02 # MSf(1,1,3)
1000015 9.99882507E+02 # MSf(1,2,3)
2000015 1.00217597E+03 # MSf(2,2,3)
1000006 8.76435511E+02 # MSf(1,3,3)
2000006 1.13478716E+03 # MSf(2,3,3)
1000005 9.98375125E+02 # MSf(1,4,3)
2000005 1.00369029E+03 # MSf(2,4,3)
25 1.07205976E+02 # Mh0
35 1.27765372E+02 # MHH
36 1.10000000E+02 # MA0
37 1.36717470E+02 # MHp
1000022 8.71700213E+01 # MNeu(1)
1000023 1.50504867E+02 # MNeu(2)
1000025 -2.09664642E+02 # MNeu(3)
1000035 2.67461405E+02 # MNeu(4)
1000024 1.46059389E+02 # MCha(1)
1000037 2.67571171E+02 # MCha(2)
1000021 1.50000000E+03 # MGl
BLOCK DMASS
0 1.73200000E+02 # Q
25 1.81082768E-01 # Delta Mh0
35 5.31343857E-01 # Delta MHH
36 0.00000000E+00 # Delta MA0
37 1.10863524E-01 # Delta MHp
BLOCK NMIX
1 1 9.26359748E-01 # ZNeu(1,1)
1 2 -1.27356386E-01 # ZNeu(1,2)
1 3 3.18049606E-01 # ZNeu(1,3)
1 4 -1.56468580E-01 # ZNeu(1,4)
2 1 -3.34170358E-01 # ZNeu(2,1)
2 2 -6.94276511E-01 # ZNeu(2,2)
2 3 5.02142193E-01 # ZNeu(2,3)
2 4 -3.92636619E-01 # ZNeu(2,4)
3 1 9.39740580E-02 # ZNeu(3,1)
3 2 -1.30790295E-01 # ZNeu(3,2)
3 3 -6.78628020E-01 # ZNeu(3,3)
3 4 -7.16607833E-01 # ZNeu(3,4)
4 1 -1.46139200E-01 # ZNeu(4,1)
4 2 6.96171225E-01 # ZNeu(4,2)
4 3 4.31464572E-01 # ZNeu(4,3)
4 4 -5.54821848E-01 # ZNeu(4,4)
BLOCK UMIX
1 1 -6.10491734E-01 # UCha(1,1)
1 2 7.92022628E-01 # UCha(1,2)
2 1 7.92022628E-01 # UCha(2,1)
2 2 6.10491734E-01 # UCha(2,2)
BLOCK VMIX
1 1 -7.92022628E-01 # VCha(1,1)
1 2 6.10491734E-01 # VCha(1,2)
2 1 6.10491734E-01 # VCha(2,1)
2 2 7.92022628E-01 # VCha(2,2)
BLOCK STAUMIX
1 1 6.71540180E-01 # USf(1,1)
1 2 7.40968142E-01 # USf(1,2)
2 1 7.40968142E-01 # USf(2,1)
2 2 -6.71540180E-01 # USf(2,2)
BLOCK STOPMIX
1 1 7.08243538E-01 # USf(1,1)
1 2 -7.05968194E-01 # USf(1,2)
2 1 7.05968194E-01 # USf(2,1)
2 2 7.08243538E-01 # USf(2,2)
BLOCK SBOTMIX
1 1 6.03353498E-01 # USf(1,1)
1 2 7.97473860E-01 # USf(1,2)
2 1 7.97473860E-01 # USf(2,1)
2 2 -6.03353498E-01 # USf(2,2)
BLOCK ALPHA
-1.20310802E+00 # Alpha
BLOCK DALPHA
4.00520479E-02 # Delta Alpha
BLOCK HMIX Q= -0.99900000E+03
1 2.00000000E+02 # MUE
2 1.40000000E+01 # TB
BLOCK MSOFT Q= 0.00000000E+00
1 9.54716519E+01 # M1
2 2.00000000E+02 # M2
3 1.50000000E+03 # M3
31 5.00000000E+02 # MSL(1)
32 5.00000000E+02 # MSL(2)
33 1.00000000E+03 # MSL(3)
34 5.00000000E+02 # MSE(1)
35 5.00000000E+02 # MSE(2)
36 1.00000000E+03 # MSE(3)
41 1.50000000E+03 # MSQ(1)
42 1.50000000E+03 # MSQ(2)
43 1.00000000E+03 # MSQ(3)
44 1.50000000E+03 # MSU(1)
45 1.50000000E+03 # MSU(2)
46 1.00000000E+03 # MSU(3)
47 1.50000000E+03 # MSD(1)
48 1.50000000E+03 # MSD(2)
49 1.00000000E+03 # MSD(3)
BLOCK AE Q= 0.00000000E+00
1 1 0.00000000E+00 # Af(1,1)
2 2 0.00000000E+00 # Af(2,2)
3 3 1.51428571E+03 # Af(3,3)
BLOCK AU Q= 0.00000000E+00
1 1 0.00000000E+00 # Af(1,1)
2 2 0.00000000E+00 # Af(2,2)
3 3 1.51428571E+03 # Af(3,3)
BLOCK AD Q= 0.00000000E+00
1 1 0.00000000E+00 # Af(1,1)
2 2 0.00000000E+00 # Af(2,2)
3 3 1.51428571E+03 # Af(3,3)
BLOCK YE Q= 0.00000000E+00
1 1 4.11949279E-05 # Yf(1,1)
2 2 8.51780382E-03 # Yf(2,2)
3 3 1.43257887E-01 # Yf(3,3)
BLOCK YU Q= 0.00000000E+00
1 1 1.72749580E-05 # Yf(1,1)
2 2 7.40519865E-03 # Yf(2,2)
3 3 9.97340906E-01 # Yf(3,3)
BLOCK YD Q= 0.00000000E+00
1 1 4.76870467E-04 # Yf(1,1)
2 2 7.55022224E-03 # Yf(2,2)
3 3 3.21042816E-01 # Yf(3,3)
BLOCK VCKMIN
1 2.25300000E-01 # lambda
2 8.08000000E-01 # A
3 1.32000000E-01 # rhobar
4 3.41000000E-01 # etabar
BLOCK MSL2 Q= 0.00000000E+00
1 1 2.50000000E+05 # MSL2(1,1)
2 2 2.50000000E+05 # MSL2(2,2)
3 3 1.00000000E+06 # MSL2(3,3)
BLOCK MSE2 Q= 0.00000000E+00
1 1 2.50000000E+05 # MSE2(1,1)
2 2 2.50000000E+05 # MSE2(2,2)
3 3 1.00000000E+06 # MSE2(3,3)
BLOCK MSQ2 Q= 0.00000000E+00
1 1 2.25000000E+06 # MSQ2(1,1)
2 2 2.25000000E+06 # MSQ2(2,2)
3 3 1.00000000E+06 # MSQ2(3,3)
BLOCK MSU2 Q= 0.00000000E+00
1 1 2.25000000E+06 # MSU2(1,1)
2 2 2.25000000E+06 # MSU2(2,2)
3 3 1.00000000E+06 # MSU2(3,3)
BLOCK MSD2 Q= 0.00000000E+00
1 1 2.25000000E+06 # MSD2(1,1)
2 2 2.25000000E+06 # MSD2(2,2)
3 3 1.00000000E+06 # MSD2(3,3)
BLOCK TE Q= 0.00000000E+00
1 1 0.00000000E+00 # Tf(1,1)
2 2 0.00000000E+00 # Tf(2,2)
3 3 2.16933371E+02 # Tf(3,3)
BLOCK TU Q= 0.00000000E+00
1 1 0.00000000E+00 # Tf(1,1)
2 2 0.00000000E+00 # Tf(2,2)
3 3 1.51025909E+03 # Tf(3,3)
BLOCK TD Q= 0.00000000E+00
1 1 0.00000000E+00 # Tf(1,1)
2 2 0.00000000E+00 # Tf(2,2)
3 3 4.86150550E+02 # Tf(3,3)
BLOCK SELMIX
1 1 9.99994952E-01 # UASf(1,1)
1 4 -3.17744688E-03 # UASf(1,4)
2 2 8.95991689E-01 # UASf(2,2)
2 5 -4.44070819E-01 # UASf(2,5)
3 3 6.71540180E-01 # UASf(3,3)
3 6 7.40968142E-01 # UASf(3,6)
4 1 3.17744688E-03 # UASf(4,1)
4 4 9.99994952E-01 # UASf(4,4)
5 2 4.44070819E-01 # UASf(5,2)
5 5 8.95991689E-01 # UASf(5,5)
6 3 7.40968142E-01 # UASf(6,3)
6 6 -6.71540180E-01 # UASf(6,6)
BLOCK USQMIX
1 1 1.00000000E+00 # UASf(1,1)
1 4 2.56324573E-05 # UASf(1,4)
2 2 9.99939654E-01 # UASf(2,2)
2 5 1.09857908E-02 # UASf(2,5)
3 3 7.08243538E-01 # UASf(3,3)
3 6 -7.05968194E-01 # UASf(3,6)
4 1 -2.56324573E-05 # UASf(4,1)
4 4 1.00000000E+00 # UASf(4,4)
5 2 -1.09857908E-02 # UASf(5,2)
5 5 9.99939654E-01 # UASf(5,5)
6 3 7.05968194E-01 # UASf(6,3)
6 6 7.08243538E-01 # UASf(6,6)
BLOCK DSQMIX
1 1 9.99983621E-01 # UASf(1,1)
1 4 -5.72350775E-03 # UASf(1,4)
2 2 9.95984067E-01 # UASf(2,2)
2 5 -8.95306504E-02 # UASf(2,5)
3 3 6.03353498E-01 # UASf(3,3)
3 6 7.97473860E-01 # UASf(3,6)
4 1 5.72350775E-03 # UASf(4,1)
4 4 9.99983621E-01 # UASf(4,4)
5 2 8.95306504E-02 # UASf(5,2)
5 5 9.95984067E-01 # UASf(5,5)
6 3 7.97473860E-01 # UASf(6,3)
6 6 -6.03353498E-01 # UASf(6,6)
BLOCK CVHMIX
1 1 6.42067901E-01 # UH(1,1)
1 2 7.66647775E-01 # UH(1,2)
1 3 0.00000000E+00 # UH(1,3)
2 1 7.66647775E-01 # UH(2,1)
2 2 -6.42067901E-01 # UH(2,2)
2 3 0.00000000E+00 # UH(2,3)
3 1 0.00000000E+00 # UH(3,1)
3 2 0.00000000E+00 # UH(3,2)
3 3 1.00000000E+00 # UH(3,3)
DECAY 25 3.41875866E-01 # Gamma(h0)
5.79386054E-06 2 22 22 # BR(h0 -> photon photon)
1.25675978E-08 2 22 23 # BR(h0 -> photon Z)
3.99769989E-06 2 23 23 # BR(h0 -> Z Z)
4.26627439E-05 2 -24 24 # BR(h0 -> W W)
8.72148397E-04 2 21 21 # BR(h0 -> gluon gluon)
8.70593592E-09 2 -11 11 # BR(h0 -> Electron electron)
3.87237488E-04 2 -13 13 # BR(h0 -> Muon muon)
1.10093995E-01 2 -15 15 # BR(h0 -> Tau tau)
2.62716636E-10 2 -2 2 # BR(h0 -> Up up)
3.63853694E-05 2 -4 4 # BR(h0 -> Charm charm)
1.40154792E-06 2 -1 1 # BR(h0 -> Down down)
3.51960360E-04 2 -3 3 # BR(h0 -> Strange strange)
8.88204396E-01 2 -5 5 # BR(h0 -> Bottom bottom)
DECAY 35 5.63170498E-02 # Gamma(HH)
1.16717839E-04 2 22 22 # BR(HH -> photon photon)
1.47840898E-04 2 22 23 # BR(HH -> photon Z)
2.18717644E-03 2 23 23 # BR(HH -> Z Z)
1.71409582E-02 2 -24 24 # BR(HH -> W W)
8.81648520E-03 2 21 21 # BR(HH -> gluon gluon)
8.87804614E-09 2 -11 11 # BR(HH -> Electron electron)
3.94913988E-04 2 -13 13 # BR(HH -> Muon muon)
1.12026296E-01 2 -15 15 # BR(HH -> Tau tau)
1.27899209E-08 2 -2 2 # BR(HH -> Up up)
1.77155175E-03 2 -4 4 # BR(HH -> Charm charm)
1.36915081E-06 2 -1 1 # BR(HH -> Down down)
3.43823171E-04 2 -3 3 # BR(HH -> Strange strange)
8.57047720E-01 2 -5 5 # BR(HH -> Bottom bottom)
5.12556106E-06 2 23 36 # BR(HH -> Z A0)
DECAY 36 3.93746610E-01 # Gamma(A0)
-2.86350604E-07 2 22 22 # BR(A0 -> photon photon)
-1.30583748E-08 2 22 23 # BR(A0 -> photon Z)
-1.14138999E-03 2 21 21 # BR(A0 -> gluon gluon)
-8.74774011E-09 2 -11 11 # BR(A0 -> Electron electron)
3.89101425E-04 2 -13 13 # BR(A0 -> Muon muon)
-1.10693557E-01 2 -15 15 # BR(A0 -> Tau tau)
-9.42242712E-12 2 -2 2 # BR(A0 -> Up up)
-1.37852009E-06 2 -4 4 # BR(A0 -> Charm charm)
-1.39918445E-06 2 -1 1 # BR(A0 -> Down down)
-3.51366970E-04 2 -3 3 # BR(A0 -> Strange strange)
-8.87421499E-01 2 -5 5 # BR(A0 -> Bottom bottom)
-3.74978114E-10 2 23 25 # BR(A0 -> Z h0)
DECAY 37 5.45817921E-02 # Gamma(Hp)
8.12535787E-08 2 -11 12 # BR(Hp -> Electron nu_e)
3.47383952E-03 2 -13 14 # BR(Hp -> Muon nu_mu)
9.82302680E-01 2 -15 16 # BR(Hp -> Tau nu_tau)
1.12502083E-05 2 -1 2 # BR(Hp -> Down up)
1.25444246E-04 2 -3 2 # BR(Hp -> Strange up)
7.52663746E-05 2 -5 2 # BR(Hp -> Bottom up)
9.55241727E-07 2 -1 4 # BR(Hp -> Down charm)
2.82513027E-03 2 -3 4 # BR(Hp -> Strange charm)
1.05381213E-02 2 -5 4 # BR(Hp -> Bottom charm)
3.20594076E-05 2 -5 6 # BR(Hp -> Bottom top)
3.49913322E-04 2 24 25 # BR(Hp -> W h0)
2.35989426E-07 2 24 35 # BR(Hp -> W HH)
2.65023366E-04 2 24 36 # BR(Hp -> W A0)
DECAY 6 1.38339571E+00 # Gamma(top)
9.91238679E-01 2 5 24 # BR(top -> bottom W)
8.76132109E-03 2 5 37 # BR(top -> bottom Hp)
"""
import FWCore.ParameterSet.Config as cms
from Configuration.Generator.Pythia8CommonSettings_cfi import *
from Configuration.Generator.Pythia8CUEP8M1Settings_cfi import *
generator = cms.EDFilter("Pythia8GeneratorFilter",
pythiaPylistVerbosity = cms.untracked.int32(1),
filterEfficiency = cms.untracked.double(1),
pythiaHepMCVerbosity = cms.untracked.bool(False),
SLHATableForPythia8 = cms.string('%s' % SLHA_TABLE),
comEnergy = cms.double(COM_ENERGY),
crossSection = cms.untracked.double(CROSS_SECTION),
maxEventsToPrint = cms.untracked.int32(1),
PythiaParameters = cms.PSet(
pythia8CommonSettingsBlock,
pythia8CUEP8M1SettingsBlock,
processParameters = cms.vstring(
'Higgs:useBSM = on',
PROCESS,
'SLHA:allowUserOverride = off',
'SLHA:minMassSM = 100.',
'PhaseSpace:mHatMin = 56.0'
),
parameterSets = cms.vstring(
'pythia8CommonSettings',
'pythia8CUEP8M1Settings',
'processParameters'
)
)
)
ProductionFilterSequence = cms.Sequence(generator)
| [
"[email protected]"
] | |
6416ddb447a7ceb30781bc3de024bd452393023a | 12c15c7ae150acaf8032f444db24440da2234b1a | /ArtificialIntelligence/DOCUMENTATION/Project2_Jimut/valueIterationAgents.py | 84b978c8e5c524d6638932a24ece1a152f975751 | [] | no_license | Jimut123/rkmveri-labs | 315ecd4607af72dd0851489e427a3ab09a8009ff | be19a453ea32460c454e3443798e3d8954fb084b | refs/heads/master | 2023-02-02T17:11:23.641187 | 2020-12-13T18:35:20 | 2020-12-13T18:35:20 | 201,784,550 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,614 | py | # valueIterationAgents.py
# -----------------------
# Licensing Information: You are free to use or extend these projects for
# educational purposes provided that (1) you do not distribute or publish
# solutions, (2) you retain this notice, and (3) you provide clear
# attribution to UC Berkeley, including a link to http://ai.berkeley.edu.
#
# Attribution Information: The Pacman AI projects were developed at UC Berkeley.
# The core projects and autograders were primarily created by John DeNero
# ([email protected]) and Dan Klein ([email protected]).
# Student side autograding was added by Brad Miller, Nick Hay, and
# Pieter Abbeel ([email protected]).
# valueIterationAgents.py
# -----------------------
# Licensing Information: You are free to use or extend these projects for
# educational purposes provided that (1) you do not distribute or publish
# solutions, (2) you retain this notice, and (3) you provide clear
# attribution to UC Berkeley, including a link to http://ai.berkeley.edu.
#
# Attribution Information: The Pacman AI projects were developed at UC Berkeley.
# The core projects and autograders were primarily created by John DeNero
# ([email protected]) and Dan Klein ([email protected]).
# Student side autograding was added by Brad Miller, Nick Hay, and
# Pieter Abbeel ([email protected]).
import mdp, util
from learningAgents import ValueEstimationAgent
import collections
class ValueIterationAgent(ValueEstimationAgent):
"""
* Please read learningAgents.py before reading this.*
A ValueIterationAgent takes a Markov decision process
(see mdp.py) on initialization and runs value iteration
for a given number of iterations using the supplied
discount factor.
"""
def __init__(self, mdp, discount = 0.9, iterations = 100):
"""
Your value iteration agent should take an mdp on
construction, run the indicated number of iterations
and then act according to the resulting policy.
Some useful mdp methods you will use:
mdp.getStates()
mdp.getPossibleActions(state)
mdp.getTransitionStatesAndProbs(state, action)
mdp.getReward(state, action, nextState)
mdp.isTerminal(state)
"""
self.mdp = mdp
self.discount = discount
self.iterations = iterations
self.values = util.Counter() # A Counter is a dict with default 0
self.runValueIteration()
def runValueIteration(self):
# Write value iteration code here
"*** YOUR CODE HERE ***"
for i in range(self.iterations): # every k
updatedValues = self.values.copy() # to use batch-version of MDP , hard copy the values
for state in self.mdp.getStates():
if self.mdp.isTerminal(state):
continue
actions = self.mdp.getPossibleActions(state)
optimal = max([self.getQValue(state,action) for action in actions])
updatedValues[state] = optimal
self.values = updatedValues
def getValue(self, state):
"""
Return the value of the state (computed in __init__).
"""
return self.values[state]
def computeQValueFromValues(self, state, action):
"""
Compute the Q-value of action in state from the
value function stored in self.values.
"""
"*** YOUR CODE HERE ***"
qval = 0
for s_prime, T in self.mdp.getTransitionStatesAndProbs(state, action):
qval += T * ( self.mdp.getReward(state, action, s_prime) + self.discount*self.getValue(s_prime) )
return qval
util.raiseNotDefined()
def computeActionFromValues(self, state):
"""
The policy is the best action in the given state
according to the values currently stored in self.values.
You may break ties any way you see fit. Note that if
there are no legal actions, which is the case at the
terminal state, you should return None.
"""
"*** YOUR CODE HERE ***"
# TODO
policy = util.Counter()
for action in self.mdp.getPossibleActions(state):
policy[action] = self.getQValue(state, action)
return policy.argMax()
util.raiseNotDefined()
def getPolicy(self, state):
return self.computeActionFromValues(state)
def getAction(self, state):
"Returns the policy at the state (no exploration)."
return self.computeActionFromValues(state)
def getQValue(self, state, action):
return self.computeQValueFromValues(state, action)
class AsynchronousValueIterationAgent(ValueIterationAgent):
"""
* Please read learningAgents.py before reading this.*
An AsynchronousValueIterationAgent takes a Markov decision process
(see mdp.py) on initialization and runs cyclic value iteration
for a given number of iterations using the supplied
discount factor.
"""
def __init__(self, mdp, discount = 0.9, iterations = 1000):
"""
Your cyclic value iteration agent should take an mdp on
construction, run the indicated number of iterations,
and then act according to the resulting policy. Each iteration
updates the value of only one state, which cycles through
the states list. If the chosen state is terminal, nothing
happens in that iteration.
Some useful mdp methods you will use:
mdp.getStates()
mdp.getPossibleActions(state)
mdp.getTransitionStatesAndProbs(state, action)
mdp.getReward(state)
mdp.isTerminal(state)
"""
ValueIterationAgent.__init__(self, mdp, discount, iterations)
def runValueIteration(self):
"*** YOUR CODE HERE ***"
#TODO
totalState = self.mdp.getStates()
for i in range(self.iterations): # every k
state = totalState[i % len(totalState)]
if self.mdp.isTerminal(state):
continue
actions = self.mdp.getPossibleActions(state)
optimal = max([self.getQValue(state,action) for action in actions])
self.values[state] = optimal
class PrioritizedSweepingValueIterationAgent(AsynchronousValueIterationAgent):
"""
* Please read learningAgents.py before reading this.*
A PrioritizedSweepingValueIterationAgent takes a Markov decision process
(see mdp.py) on initialization and runs prioritized sweeping value iteration
for a given number of iterations using the supplied parameters.
"""
def __init__(self, mdp, discount = 0.9, iterations = 100, theta = 1e-5):
"""
Your prioritized sweeping value iteration agent should take an mdp on
construction, run the indicated number of iterations,
and then act according to the resulting policy.
"""
self.theta = theta
ValueIterationAgent.__init__(self, mdp, discount, iterations)
def runValueIteration(self):
"*** YOUR CODE HERE ***"
q = util.PriorityQueue()
totalState = self.mdp.getStates()
pred = {}
for st in totalState:
if self.mdp.isTerminal(st):
continue
for ac in self.mdp.getPossibleActions(st):
for stt,_ in self.mdp.getTransitionStatesAndProbs(st, ac):
if stt in pred:
pred[stt].add(st)
else:
pred[stt] = {st}
for st in self.mdp.getStates():
if self.mdp.isTerminal(st):
continue
diff = abs(self.values[st] - max([ self.computeQValueFromValues(st, action) for action in self.mdp.getPossibleActions(st) ]) )
q.update(st, -diff)
for i in range(self.iterations):
if q.isEmpty():
break
st = q.pop()
if not self.mdp.isTerminal(st):
self.values[st] = max([self.computeQValueFromValues(st, action) for action in self.mdp.getPossibleActions(st)])
for p in pred[st]:
if self.mdp.isTerminal(p):
continue
difff = abs(self.values[p] - max([self.computeQValueFromValues(p, action) for action in self.mdp.getPossibleActions(p)]))
if difff > self.theta:
q.update(p, -difff)
| [
"[email protected]"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.