max_stars_repo_path
stringlengths 4
286
| max_stars_repo_name
stringlengths 5
119
| max_stars_count
int64 0
191k
| id
stringlengths 1
7
| content
stringlengths 6
1.03M
| content_cleaned
stringlengths 6
1.03M
| language
stringclasses 111
values | language_score
float64 0.03
1
| comments
stringlengths 0
556k
| edu_score
float64 0.32
5.03
| edu_int_score
int64 0
5
|
---|---|---|---|---|---|---|---|---|---|---|
tests/test_util.py | Spotchi/dETaiL | 3 | 6631851 | <filename>tests/test_util.py
def save_int(int_num, filepath):
assert type(int_num) is int
with open(filepath, 'w') as fd:
fd.write(str(int_num))
def load_int(filepath):
with open(filepath, 'r') as fd:
num = int(fd.read())
return num
| <filename>tests/test_util.py
def save_int(int_num, filepath):
assert type(int_num) is int
with open(filepath, 'w') as fd:
fd.write(str(int_num))
def load_int(filepath):
with open(filepath, 'r') as fd:
num = int(fd.read())
return num
| none | 1 | 2.93745 | 3 |
|
setup.py | ljstella/python_intensifies | 0 | 6631852 | from setuptools import setup
setup(
name='python_intensifies',
version='0.2.1',
author="<NAME>",
author_email="<EMAIL>",
keywords=['intensify intensifies shaking memes'],
py_modules=['intensify'],
url="https://github.com/CarvellScott/",
install_requires=[
"Pillow>=7.1.0"
],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: End Users/Desktop',
'Topic :: Artistic Software',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'
],
description="A tool for INTENSIFYING images.",
long_description=open('README.md').read(),
long_description_content_type="text/markdown",
entry_points={
"console_scripts": [
"intensify=intensify:main"
]
}
)
| from setuptools import setup
setup(
name='python_intensifies',
version='0.2.1',
author="<NAME>",
author_email="<EMAIL>",
keywords=['intensify intensifies shaking memes'],
py_modules=['intensify'],
url="https://github.com/CarvellScott/",
install_requires=[
"Pillow>=7.1.0"
],
classifiers=[
'Development Status :: 3 - Alpha',
'Intended Audience :: End Users/Desktop',
'Topic :: Artistic Software',
'License :: OSI Approved :: MIT License',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6'
],
description="A tool for INTENSIFYING images.",
long_description=open('README.md').read(),
long_description_content_type="text/markdown",
entry_points={
"console_scripts": [
"intensify=intensify:main"
]
}
)
| none | 1 | 1.285815 | 1 |
|
Demo/winApiTest.py | liuyu2022/game_robot | 1 | 6631853 | from traceback import print_tb
import win32api
import win32gui
import win32con # 导入win32api相关模块
import time
import cv2
import numpy as np
from PIL import Image
import collections
from gui_controls import Controls
def get_hwnd():
hWndList = []
win32gui.EnumWindows(lambda hWnd, param: param.append(hWnd), hWndList)
ret_hwnd = 0
for hwnd in hWndList:
title = win32gui.GetWindowText(hwnd)
# 调整目标窗口到坐标(600,300),大小设置为(600,600)
if (title.find("九阴真经 江湖") >= 0) or (title.find("九阴真经") >= 0):
print(hwnd, title)
if hwnd > 0:
ret_hwnd = hwnd
return ret_hwnd
Box = collections.namedtuple('Box', 'left top width height')
# k 0x4B
# j 0x4A
# win32con.VK_LEFT 左
# VK_UP 上
# VK_RIGHT 右
# VK_DOWN 下
# win32api.PostMessage(8915604, win32con.WM_KEYDOWN, win32con.VK_LEFT, 0)#发送F9键
# win32api.PostMessage(8915604, win32con.WM_KEYUP, win32con.VK_LEFT, 0)
# 按钮点击
def test_done1():
win32api.PostMessage(200352, win32con.WM_LBUTTONDOWN, 1, 0x003002F3)
time.sleep(1)
win32api.PostMessage(200352, win32con.WM_LBUTTONUP, 1, 0x003002F3)
def yidong(hwnd):
Controls.activate_hwnd(hwnd)
Controls.key_post(hwnd, 75)
win32api.PostMessage(hwnd, win32con.WM_KEYDOWN,
win32con.VK_RIGHT, 0x014D0001)
time.sleep(0.5)
win32api.PostMessage(hwnd, win32con.WM_KEYUP,
win32con.VK_RIGHT, 0xC14D0001)
# 按钮点击
def mouse_click(hwnd, x, y):
point = win32api.MAKELONG(x, y)
win32api.PostMessage(hwnd, win32con.WM_LBUTTONDOWN, 1, point)
time.sleep(1)
win32api.PostMessage(hwnd, win32con.WM_LBUTTONUP, 1, point)
# 窗口最大最小化
def window_test(hwnd):
_, win_type, _, _, _ = win32gui.GetWindowPlacement(hwnd)
print(win_type)
def get_hwnd_offset(hwnd, x, y):
left, top, right, bottom = win32gui.GetWindowRect(hwnd)
return x+left, y+top
def input_hwnd(hwnd, input_list):
# 先删除
for _ in range(5):
win32api.PostMessage(hwnd, win32con.WM_KEYDOWN, 0x27, 0x1F0001)
time.sleep(0.2)
win32api.PostMessage(hwnd, win32con.WM_KEYUP, 0x27, 0x1F0001)
for _ in range(5):
win32api.PostMessage(hwnd, win32con.WM_KEYDOWN, 0x8, 0x1F0001)
time.sleep(0.2)
win32api.PostMessage(hwnd, win32con.WM_KEYUP, 0x8, 0x1F0001)
for input in input_list:
win32api.PostMessage(hwnd, win32con.WM_CHAR, input, 0x1F0001)
def move_to_pos(hwnd, x, y):
# 出发地图键m
Controls.activate_hwnd(hwnd)
# 视角距离滑动拉到最近
# Controls.win_gunlun_qian(hwnd)
# Controls.key_post(hwnd,0x4D)
# # x,y = get_hwnd_offset(hwnd,115,34)
# Controls.win_mouse_click(hwnd,115,34)
# input_hwnd(hwnd,[0x35,0x39,0x31])
# Controls.win_mouse_click(hwnd,185,40)
# input_hwnd(hwnd,[0x32,0x32,0x36])
# Controls.win_mouse_click(hwnd,223,33)
# time.sleep(1)
# Controls.win_mouse_click(hwnd,320,274)
# # 关闭地图
# Controls.key_post(hwnd,0x4D)
# # 移动到拉镖点
# time.sleep(2)
box = check(hwnd)
Controls.win_mouse_move(hwnd, 700, 93)
time.sleep(1)
Controls.win_mouse_click(hwnd, 700, 93)
time.sleep(1)
# Controls.win_mouse_click(hwnd,150,221)
# Controls.get_screen(hwnd)
box = Controls.locate2(
"D:\project\python\jiuyin_robot\image\lb_jiebiao.png")
# if box:
# print("接镖1")
# Controls.win_mouse_move(hwnd,244,479)
# Controls.win_mouse_click(hwnd,244,479)
# Controls.get_screen(hwnd)
# biaoche = Controls.locate2("D:\project\python\jiuyin_robot\image\lb_jiebiaot.png",0.5)
# if biaoche:
# print("接镖2")
# Controls.win_mouse_move(hwnd,658,492)
# Controls.win_mouse_click(hwnd,658,492)
# time.sleep(0.2)
# Controls.get_screen(hwnd)
# queding = Controls.locate2("D:\project\python\jiuyin_robot\image\lb_queding.png")
# if queding:
# print("确定接镖")
# Controls.win_mouse_move(hwnd,398,342)
# Controls.win_mouse_click(hwnd,398,342)
Controls.get_screen(hwnd)
# jiache = Controls.locate2("D:\project\python\jiuyin_robot\image\lb_jiache.png")
# if jiache:
# print("选择驾车")
# x,y = get_xy(jiache)
# Controls.win_mouse_click(hwnd,x,y)
status = Controls.locate(
"D:\project\python\jiuyin_robot\image\lb_icon.png")
if status:
print("拉镖状态ok")
def get_xy(box):
x = box.left + box.width/2
y = box.top - box.height
return int(x), int(y)
def check(hwnd):
Controls.activate_hwnd(hwnd)
offset = (600, 31, 200, 200)
while True:
for x in range(637, 759, 5):
for y in range(31, 154, 5):
Controls.win_mouse_move(hwnd, x, y, 0.1)
Controls.get_screen(hwnd)
box = Controls.locate(
"D:\project\python\jiuyin_robot\image\cj_tiekuang.png", 0.5, offset_form=offset)
if box:
print(box)
# Controls.win_mouse_click_box(hwnd,box,True)
Controls.win_mouse_click(hwnd, x, y)
# 闪烁窗口
def fluash_hwnd(hwnd):
win32gui.FlashWindowEx(hwnd, True, 5, 0)
def tanwei(hwnd):
point = win32api.MAKELONG(479, 344)
win32api.PostMessage(hwnd, win32con.WM_LBUTTONDOWN, 1, point)
time.sleep(0.2)
win32api.PostMessage(hwnd, win32con.WM_LBUTTONUP, 1, point)
def anjian(hwnd):
Controls.activate_hwnd(hwnd)
Controls.key_post(hwnd, 0x68)
for _ in range(100):
Controls.key_post(hwnd, 56)
time.sleep(0.1)
def labiao_npc_text(hwnd):
Controls.activate_hwnd(hwnd)
Controls.key_post(hwnd, 78)
# time.sleep(1)
if __name__ == "__main__":
hwnd = get_hwnd()
check(hwnd)
# Controls.win_mouse_click(hwnd,696,89,False)
# labiao_npc_text(hwnd)
# Controls.activate_hwnd(hwnd)
# anjian(hwnd)
# win32api.MessageBox(0, "这是一个测试消息", "消息框标题",win32con.MB_OK)
| from traceback import print_tb
import win32api
import win32gui
import win32con # 导入win32api相关模块
import time
import cv2
import numpy as np
from PIL import Image
import collections
from gui_controls import Controls
def get_hwnd():
hWndList = []
win32gui.EnumWindows(lambda hWnd, param: param.append(hWnd), hWndList)
ret_hwnd = 0
for hwnd in hWndList:
title = win32gui.GetWindowText(hwnd)
# 调整目标窗口到坐标(600,300),大小设置为(600,600)
if (title.find("九阴真经 江湖") >= 0) or (title.find("九阴真经") >= 0):
print(hwnd, title)
if hwnd > 0:
ret_hwnd = hwnd
return ret_hwnd
Box = collections.namedtuple('Box', 'left top width height')
# k 0x4B
# j 0x4A
# win32con.VK_LEFT 左
# VK_UP 上
# VK_RIGHT 右
# VK_DOWN 下
# win32api.PostMessage(8915604, win32con.WM_KEYDOWN, win32con.VK_LEFT, 0)#发送F9键
# win32api.PostMessage(8915604, win32con.WM_KEYUP, win32con.VK_LEFT, 0)
# 按钮点击
def test_done1():
win32api.PostMessage(200352, win32con.WM_LBUTTONDOWN, 1, 0x003002F3)
time.sleep(1)
win32api.PostMessage(200352, win32con.WM_LBUTTONUP, 1, 0x003002F3)
def yidong(hwnd):
Controls.activate_hwnd(hwnd)
Controls.key_post(hwnd, 75)
win32api.PostMessage(hwnd, win32con.WM_KEYDOWN,
win32con.VK_RIGHT, 0x014D0001)
time.sleep(0.5)
win32api.PostMessage(hwnd, win32con.WM_KEYUP,
win32con.VK_RIGHT, 0xC14D0001)
# 按钮点击
def mouse_click(hwnd, x, y):
point = win32api.MAKELONG(x, y)
win32api.PostMessage(hwnd, win32con.WM_LBUTTONDOWN, 1, point)
time.sleep(1)
win32api.PostMessage(hwnd, win32con.WM_LBUTTONUP, 1, point)
# 窗口最大最小化
def window_test(hwnd):
_, win_type, _, _, _ = win32gui.GetWindowPlacement(hwnd)
print(win_type)
def get_hwnd_offset(hwnd, x, y):
left, top, right, bottom = win32gui.GetWindowRect(hwnd)
return x+left, y+top
def input_hwnd(hwnd, input_list):
# 先删除
for _ in range(5):
win32api.PostMessage(hwnd, win32con.WM_KEYDOWN, 0x27, 0x1F0001)
time.sleep(0.2)
win32api.PostMessage(hwnd, win32con.WM_KEYUP, 0x27, 0x1F0001)
for _ in range(5):
win32api.PostMessage(hwnd, win32con.WM_KEYDOWN, 0x8, 0x1F0001)
time.sleep(0.2)
win32api.PostMessage(hwnd, win32con.WM_KEYUP, 0x8, 0x1F0001)
for input in input_list:
win32api.PostMessage(hwnd, win32con.WM_CHAR, input, 0x1F0001)
def move_to_pos(hwnd, x, y):
# 出发地图键m
Controls.activate_hwnd(hwnd)
# 视角距离滑动拉到最近
# Controls.win_gunlun_qian(hwnd)
# Controls.key_post(hwnd,0x4D)
# # x,y = get_hwnd_offset(hwnd,115,34)
# Controls.win_mouse_click(hwnd,115,34)
# input_hwnd(hwnd,[0x35,0x39,0x31])
# Controls.win_mouse_click(hwnd,185,40)
# input_hwnd(hwnd,[0x32,0x32,0x36])
# Controls.win_mouse_click(hwnd,223,33)
# time.sleep(1)
# Controls.win_mouse_click(hwnd,320,274)
# # 关闭地图
# Controls.key_post(hwnd,0x4D)
# # 移动到拉镖点
# time.sleep(2)
box = check(hwnd)
Controls.win_mouse_move(hwnd, 700, 93)
time.sleep(1)
Controls.win_mouse_click(hwnd, 700, 93)
time.sleep(1)
# Controls.win_mouse_click(hwnd,150,221)
# Controls.get_screen(hwnd)
box = Controls.locate2(
"D:\project\python\jiuyin_robot\image\lb_jiebiao.png")
# if box:
# print("接镖1")
# Controls.win_mouse_move(hwnd,244,479)
# Controls.win_mouse_click(hwnd,244,479)
# Controls.get_screen(hwnd)
# biaoche = Controls.locate2("D:\project\python\jiuyin_robot\image\lb_jiebiaot.png",0.5)
# if biaoche:
# print("接镖2")
# Controls.win_mouse_move(hwnd,658,492)
# Controls.win_mouse_click(hwnd,658,492)
# time.sleep(0.2)
# Controls.get_screen(hwnd)
# queding = Controls.locate2("D:\project\python\jiuyin_robot\image\lb_queding.png")
# if queding:
# print("确定接镖")
# Controls.win_mouse_move(hwnd,398,342)
# Controls.win_mouse_click(hwnd,398,342)
Controls.get_screen(hwnd)
# jiache = Controls.locate2("D:\project\python\jiuyin_robot\image\lb_jiache.png")
# if jiache:
# print("选择驾车")
# x,y = get_xy(jiache)
# Controls.win_mouse_click(hwnd,x,y)
status = Controls.locate(
"D:\project\python\jiuyin_robot\image\lb_icon.png")
if status:
print("拉镖状态ok")
def get_xy(box):
x = box.left + box.width/2
y = box.top - box.height
return int(x), int(y)
def check(hwnd):
Controls.activate_hwnd(hwnd)
offset = (600, 31, 200, 200)
while True:
for x in range(637, 759, 5):
for y in range(31, 154, 5):
Controls.win_mouse_move(hwnd, x, y, 0.1)
Controls.get_screen(hwnd)
box = Controls.locate(
"D:\project\python\jiuyin_robot\image\cj_tiekuang.png", 0.5, offset_form=offset)
if box:
print(box)
# Controls.win_mouse_click_box(hwnd,box,True)
Controls.win_mouse_click(hwnd, x, y)
# 闪烁窗口
def fluash_hwnd(hwnd):
win32gui.FlashWindowEx(hwnd, True, 5, 0)
def tanwei(hwnd):
point = win32api.MAKELONG(479, 344)
win32api.PostMessage(hwnd, win32con.WM_LBUTTONDOWN, 1, point)
time.sleep(0.2)
win32api.PostMessage(hwnd, win32con.WM_LBUTTONUP, 1, point)
def anjian(hwnd):
Controls.activate_hwnd(hwnd)
Controls.key_post(hwnd, 0x68)
for _ in range(100):
Controls.key_post(hwnd, 56)
time.sleep(0.1)
def labiao_npc_text(hwnd):
Controls.activate_hwnd(hwnd)
Controls.key_post(hwnd, 78)
# time.sleep(1)
if __name__ == "__main__":
hwnd = get_hwnd()
check(hwnd)
# Controls.win_mouse_click(hwnd,696,89,False)
# labiao_npc_text(hwnd)
# Controls.activate_hwnd(hwnd)
# anjian(hwnd)
# win32api.MessageBox(0, "这是一个测试消息", "消息框标题",win32con.MB_OK)
| en | 0.201348 | # 导入win32api相关模块 # 调整目标窗口到坐标(600,300),大小设置为(600,600) # k 0x4B # j 0x4A # win32con.VK_LEFT 左 # VK_UP 上 # VK_RIGHT 右 # VK_DOWN 下 # win32api.PostMessage(8915604, win32con.WM_KEYDOWN, win32con.VK_LEFT, 0)#发送F9键 # win32api.PostMessage(8915604, win32con.WM_KEYUP, win32con.VK_LEFT, 0) # 按钮点击 # 按钮点击 # 窗口最大最小化 # 先删除 # 出发地图键m # 视角距离滑动拉到最近 # Controls.win_gunlun_qian(hwnd) # Controls.key_post(hwnd,0x4D) # # x,y = get_hwnd_offset(hwnd,115,34) # Controls.win_mouse_click(hwnd,115,34) # input_hwnd(hwnd,[0x35,0x39,0x31]) # Controls.win_mouse_click(hwnd,185,40) # input_hwnd(hwnd,[0x32,0x32,0x36]) # Controls.win_mouse_click(hwnd,223,33) # time.sleep(1) # Controls.win_mouse_click(hwnd,320,274) # # 关闭地图 # Controls.key_post(hwnd,0x4D) # # 移动到拉镖点 # time.sleep(2) # Controls.win_mouse_click(hwnd,150,221) # Controls.get_screen(hwnd) # if box: # print("接镖1") # Controls.win_mouse_move(hwnd,244,479) # Controls.win_mouse_click(hwnd,244,479) # Controls.get_screen(hwnd) # biaoche = Controls.locate2("D:\project\python\jiuyin_robot\image\lb_jiebiaot.png",0.5) # if biaoche: # print("接镖2") # Controls.win_mouse_move(hwnd,658,492) # Controls.win_mouse_click(hwnd,658,492) # time.sleep(0.2) # Controls.get_screen(hwnd) # queding = Controls.locate2("D:\project\python\jiuyin_robot\image\lb_queding.png") # if queding: # print("确定接镖") # Controls.win_mouse_move(hwnd,398,342) # Controls.win_mouse_click(hwnd,398,342) # jiache = Controls.locate2("D:\project\python\jiuyin_robot\image\lb_jiache.png") # if jiache: # print("选择驾车") # x,y = get_xy(jiache) # Controls.win_mouse_click(hwnd,x,y) # Controls.win_mouse_click_box(hwnd,box,True) # 闪烁窗口 # time.sleep(1) # Controls.win_mouse_click(hwnd,696,89,False) # labiao_npc_text(hwnd) # Controls.activate_hwnd(hwnd) # anjian(hwnd) # win32api.MessageBox(0, "这是一个测试消息", "消息框标题",win32con.MB_OK) | 2.55105 | 3 |
scripts/run_rec_ensemble/run_nlp_strict.py | edervishaj/spotify-recsys-challenge | 3 | 6631854 | from utils.submitter import Submitter
from utils.post_processing import eurm_to_recommendation_list_submission
from recommenders.nlp_strict import NLPStrict
import sys
import datetime
import scipy.sparse as sps
from utils.datareader import Datareader
from utils.evaluator import Evaluator
import numpy as np
from recommenders.similarity.dot_product import dot_product
from recommenders.similarity.s_plus import tversky_similarity
from utils.post_processing import eurm_to_recommendation_list, eurm_remove_seed
from utils.pre_processing import bm25_row
from utils.sparse import *
if __name__ == '__main__':
mode = "online"
name = "nlp_strict"
knn = 50
topk = 750
save_eurm = True
if mode == 'offline':
# Setup
dr = Datareader(mode=mode, verbose=False, only_load=True)
ev = Evaluator(dr)
urm = dr.get_urm()
test_pids = dr.get_test_pids()
# Init object
nlp_strict = NLPStrict(dr)
# Get ucm
ucm = nlp_strict.get_UCM()
# Compute similarity (playlists x playlists)
sim = tversky_similarity(ucm, ucm.T, shrink=200, alpha=0.1, beta=1, k=knn)
sim = sim.tocsr()
# Recommendation
eurm = dot_product(sim, urm, k=topk)
eurm = eurm.tocsr()
eurm = eurm[test_pids, :]
rec_list = eurm_to_recommendation_list(eurm, dr)
if save_eurm:
sps.save_npz(mode + "_" + name + ".npz", eurm, compressed=False)
# Submission
ev.evaluate(rec_list, name=name)
elif mode == 'online':
# Setup
dr = Datareader(mode=mode, verbose=False, only_load=True)
sb = Submitter(dr)
urm = dr.get_urm()
test_pids = dr.get_test_pids()
# Init object
nlp_strict = NLPStrict(dr)
# Get ucm
ucm = nlp_strict.get_UCM()
print(ucm.shape)
# Do not train on challenge set
ucm_T = ucm.copy()
inplace_set_rows_zero(ucm_T, test_pids)
ucm_T = ucm_T.T
# Compute similarity (playlists x playlists)
sim = tversky_similarity(ucm, ucm_T, shrink=200, alpha=0.1, beta=1, k=knn)
sim = sim.tocsr()
# Recommendation
eurm = dot_product(sim, urm, k=topk)
eurm = eurm.tocsr()
eurm = eurm[test_pids, :]
rec_list = eurm_to_recommendation_list(eurm, dr)
if save_eurm:
sps.save_npz(mode + "_" + name + ".npz", eurm, compressed=False)
# Submission
sb.submit(rec_list, name=name)
else:
print('Wrong mode!')
| from utils.submitter import Submitter
from utils.post_processing import eurm_to_recommendation_list_submission
from recommenders.nlp_strict import NLPStrict
import sys
import datetime
import scipy.sparse as sps
from utils.datareader import Datareader
from utils.evaluator import Evaluator
import numpy as np
from recommenders.similarity.dot_product import dot_product
from recommenders.similarity.s_plus import tversky_similarity
from utils.post_processing import eurm_to_recommendation_list, eurm_remove_seed
from utils.pre_processing import bm25_row
from utils.sparse import *
if __name__ == '__main__':
mode = "online"
name = "nlp_strict"
knn = 50
topk = 750
save_eurm = True
if mode == 'offline':
# Setup
dr = Datareader(mode=mode, verbose=False, only_load=True)
ev = Evaluator(dr)
urm = dr.get_urm()
test_pids = dr.get_test_pids()
# Init object
nlp_strict = NLPStrict(dr)
# Get ucm
ucm = nlp_strict.get_UCM()
# Compute similarity (playlists x playlists)
sim = tversky_similarity(ucm, ucm.T, shrink=200, alpha=0.1, beta=1, k=knn)
sim = sim.tocsr()
# Recommendation
eurm = dot_product(sim, urm, k=topk)
eurm = eurm.tocsr()
eurm = eurm[test_pids, :]
rec_list = eurm_to_recommendation_list(eurm, dr)
if save_eurm:
sps.save_npz(mode + "_" + name + ".npz", eurm, compressed=False)
# Submission
ev.evaluate(rec_list, name=name)
elif mode == 'online':
# Setup
dr = Datareader(mode=mode, verbose=False, only_load=True)
sb = Submitter(dr)
urm = dr.get_urm()
test_pids = dr.get_test_pids()
# Init object
nlp_strict = NLPStrict(dr)
# Get ucm
ucm = nlp_strict.get_UCM()
print(ucm.shape)
# Do not train on challenge set
ucm_T = ucm.copy()
inplace_set_rows_zero(ucm_T, test_pids)
ucm_T = ucm_T.T
# Compute similarity (playlists x playlists)
sim = tversky_similarity(ucm, ucm_T, shrink=200, alpha=0.1, beta=1, k=knn)
sim = sim.tocsr()
# Recommendation
eurm = dot_product(sim, urm, k=topk)
eurm = eurm.tocsr()
eurm = eurm[test_pids, :]
rec_list = eurm_to_recommendation_list(eurm, dr)
if save_eurm:
sps.save_npz(mode + "_" + name + ".npz", eurm, compressed=False)
# Submission
sb.submit(rec_list, name=name)
else:
print('Wrong mode!')
| en | 0.705276 | # Setup # Init object # Get ucm # Compute similarity (playlists x playlists) # Recommendation # Submission # Setup # Init object # Get ucm # Do not train on challenge set # Compute similarity (playlists x playlists) # Recommendation # Submission | 2.168171 | 2 |
Allura/allura/model/__init__.py | rohankumardubey/allura | 1 | 6631855 | # -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""The application's model objects"""
from __future__ import unicode_literals
from __future__ import absolute_import
from .neighborhood import Neighborhood, NeighborhoodFile
from .project import Project, ProjectCategory, TroveCategory, ProjectFile, AppConfig
from .index import ArtifactReference, Shortlink
from .artifact import Artifact, MovedArtifact, Message, VersionedArtifact, Snapshot, Feed, AwardFile, Award, AwardGrant
from .artifact import VotableArtifact
from .discuss import Discussion, Thread, PostHistory, Post, DiscussionAttachment
from .attachments import BaseAttachment
from .auth import AuthGlobals, User, ProjectRole, EmailAddress, OldProjectRole
from .auth import AuditLog, audit_log, AlluraUserProperty, UserLoginDetails
from .filesystem import File
from .notification import Notification, Mailbox, SiteNotification
from .repository import Repository, RepositoryImplementation
from .repository import MergeRequest, GitLikeTree
from .stats import Stats
from .oauth import OAuthToken, OAuthConsumerToken, OAuthRequestToken, OAuthAccessToken
from .monq_model import MonQTask
from .webhook import Webhook
from .multifactor import TotpKey
from .types import ACE, ACL, EVERYONE, ALL_PERMISSIONS, DENY_ALL, MarkdownCache
from .session import main_doc_session, main_orm_session, main_explicitflush_orm_session
from .session import project_doc_session, project_orm_session
from .session import artifact_orm_session, repository_orm_session
from .session import task_orm_session
from .session import ArtifactSessionExtension
from . import repository
from . import repo_refresh
from ming.orm import Mapper
Mapper.compile_all()
__all__ = [
'Neighborhood', 'NeighborhoodFile', 'Project', 'ProjectCategory', 'TroveCategory', 'ProjectFile', 'AppConfig',
'ArtifactReference', 'Shortlink', 'Artifact', 'MovedArtifact', 'Message', 'VersionedArtifact', 'Snapshot', 'Feed',
'AwardFile', 'Award', 'AwardGrant', 'VotableArtifact', 'Discussion', 'Thread', 'PostHistory', 'Post',
'DiscussionAttachment', 'BaseAttachment', 'AuthGlobals', 'User', 'ProjectRole', 'EmailAddress', 'OldProjectRole',
'AuditLog', 'audit_log', 'AlluraUserProperty', 'File', 'Notification', 'Mailbox', 'Repository',
'RepositoryImplementation', 'MergeRequest', 'GitLikeTree', 'Stats', 'OAuthToken', 'OAuthConsumerToken',
'OAuthRequestToken', 'OAuthAccessToken', 'MonQTask', 'Webhook', 'ACE', 'ACL', 'EVERYONE', 'ALL_PERMISSIONS',
'DENY_ALL', 'MarkdownCache', 'main_doc_session', 'main_orm_session', 'project_doc_session', 'project_orm_session',
'artifact_orm_session', 'repository_orm_session', 'task_orm_session', 'ArtifactSessionExtension', 'repository',
'repo_refresh', 'SiteNotification', 'TotpKey', 'UserLoginDetails', 'main_explicitflush_orm_session']
| # -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""The application's model objects"""
from __future__ import unicode_literals
from __future__ import absolute_import
from .neighborhood import Neighborhood, NeighborhoodFile
from .project import Project, ProjectCategory, TroveCategory, ProjectFile, AppConfig
from .index import ArtifactReference, Shortlink
from .artifact import Artifact, MovedArtifact, Message, VersionedArtifact, Snapshot, Feed, AwardFile, Award, AwardGrant
from .artifact import VotableArtifact
from .discuss import Discussion, Thread, PostHistory, Post, DiscussionAttachment
from .attachments import BaseAttachment
from .auth import AuthGlobals, User, ProjectRole, EmailAddress, OldProjectRole
from .auth import AuditLog, audit_log, AlluraUserProperty, UserLoginDetails
from .filesystem import File
from .notification import Notification, Mailbox, SiteNotification
from .repository import Repository, RepositoryImplementation
from .repository import MergeRequest, GitLikeTree
from .stats import Stats
from .oauth import OAuthToken, OAuthConsumerToken, OAuthRequestToken, OAuthAccessToken
from .monq_model import MonQTask
from .webhook import Webhook
from .multifactor import TotpKey
from .types import ACE, ACL, EVERYONE, ALL_PERMISSIONS, DENY_ALL, MarkdownCache
from .session import main_doc_session, main_orm_session, main_explicitflush_orm_session
from .session import project_doc_session, project_orm_session
from .session import artifact_orm_session, repository_orm_session
from .session import task_orm_session
from .session import ArtifactSessionExtension
from . import repository
from . import repo_refresh
from ming.orm import Mapper
Mapper.compile_all()
__all__ = [
'Neighborhood', 'NeighborhoodFile', 'Project', 'ProjectCategory', 'TroveCategory', 'ProjectFile', 'AppConfig',
'ArtifactReference', 'Shortlink', 'Artifact', 'MovedArtifact', 'Message', 'VersionedArtifact', 'Snapshot', 'Feed',
'AwardFile', 'Award', 'AwardGrant', 'VotableArtifact', 'Discussion', 'Thread', 'PostHistory', 'Post',
'DiscussionAttachment', 'BaseAttachment', 'AuthGlobals', 'User', 'ProjectRole', 'EmailAddress', 'OldProjectRole',
'AuditLog', 'audit_log', 'AlluraUserProperty', 'File', 'Notification', 'Mailbox', 'Repository',
'RepositoryImplementation', 'MergeRequest', 'GitLikeTree', 'Stats', 'OAuthToken', 'OAuthConsumerToken',
'OAuthRequestToken', 'OAuthAccessToken', 'MonQTask', 'Webhook', 'ACE', 'ACL', 'EVERYONE', 'ALL_PERMISSIONS',
'DENY_ALL', 'MarkdownCache', 'main_doc_session', 'main_orm_session', 'project_doc_session', 'project_orm_session',
'artifact_orm_session', 'repository_orm_session', 'task_orm_session', 'ArtifactSessionExtension', 'repository',
'repo_refresh', 'SiteNotification', 'TotpKey', 'UserLoginDetails', 'main_explicitflush_orm_session']
| en | 0.864803 | # -*- coding: utf-8 -*- # Licensed to the Apache Software Foundation (ASF) under one # or more contributor license agreements. See the NOTICE file # distributed with this work for additional information # regarding copyright ownership. The ASF licenses this file # to you under the Apache License, Version 2.0 (the # "License"); you may not use this file except in compliance # with the License. You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, # software distributed under the License is distributed on an # "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY # KIND, either express or implied. See the License for the # specific language governing permissions and limitations # under the License. The application's model objects | 1.032036 | 1 |
tensorflow_decision_forests/tensorflow/tf_logging.py | Hawk94/decision-forests | 412 | 6631856 | # Copyright 2021 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Logging primitives.
Replacement of absl's logging primitives that are always visible to the user.
"""
from contextlib import contextmanager # pylint: disable=g-importing-member
import sys
from tensorflow_decision_forests.tensorflow.ops.training import op as training_op
# Background
# ==========
#
# By default, logs of the Yggdrasil C++ training code are shown on the
# "standard C output" and "error" channels (COUT and CERR). When executing
# python code in a script, those channels are displayed along side Python
# standard output (i.e. the output of python's "print" function). When running
# in a colab or a notebook, the COUT and CERR channels are not printed i.e. they
# are not visible to the user (unless the user looks in the colab logs). In this
# case, the COUT and CERR channels needs to be "redirected" to the python's
# standard output.
#
# This parameter
# ==============
#
# If this parameter is set to "auto", and if the code is detected as beeing
# executed in a colab or notebook, the COUT and CERR are redirected to the
# python's standart output.
#
# If this parameter is True, the COUT and CERR are redirected.
# If this parameter is False, the COUT and CERR are not redirected.
#
# If the detection of the running environement is incorrect, the training logs
# might not appear in colab (false negative) or the script will hang (stuck when
# the redirection is setup; false positive). If you face one of those
# situations, please ping us.
REDIRECT_YGGDRASIL_CPP_OUTPUT_TO_PYTHON_OUTPUT = "auto"
def info(msg, *args):
"""Print an info message visible to the user.
To use instead of absl.logging.info (to be visible in colabs).
Usage example:
logging_info("Hello %s", "world")
"""
print(msg % args)
def warning(msg, *args):
"""Print a warning message visible to the user.
To use instead of absl.logging.info (to be visible in colabs).
Usage example:
logging_warning("Hello %s", "world")
"""
print("Warning: ", msg % args)
def capture_cpp_log_context(verbose=False):
"""Creates a context to display or hide the c++ code logs to the user.
Tested with python, ipython, colab and jupyter notebook.
In the internal build, only impact python's print.
Args:
verbose: If true, the training logs are shown in logging and python's print.
If false, the training logs are not shown in logging nor python's print.
Returns:
A context.
"""
# Does nothing
@contextmanager
def no_op_context():
yield
# Hide the Yggdrasil training logs.
@contextmanager
def hide_cpp_logs():
# Stop displaying c++ logs.
set_yggdrasil_logging_level(0)
try:
yield
finally:
# Re-start displaying c++ logs.
set_yggdrasil_logging_level(2)
if not verbose:
# Make sure the c++ logs are not visible to the user.
return hide_cpp_logs()
if ((REDIRECT_YGGDRASIL_CPP_OUTPUT_TO_PYTHON_OUTPUT == "auto" and
sys.stdout.isatty()) or
not REDIRECT_YGGDRASIL_CPP_OUTPUT_TO_PYTHON_OUTPUT):
# The cour and cerr of the c++ library are already visible to the user.
return no_op_context()
# pytype: disable=import-error
# pylint: disable=g-import-not-at-top
# pylint: disable=g-importing-member
# pylint: disable=bare-except
# The cout and cerr of the c++ library are not visible to the user.
# Redirect them to python's standard output.
try:
from colabtools.googlelog import CaptureLog
return CaptureLog()
except:
try:
from wurlitzer import sys_pipes
# This can hang if the cout/cerr is visible to the user.
return sys_pipes()
except:
warning("Cannot redirect the training output because neither of "
"colabtools.googlelog or wurlitzer available. Run `pip install "
"wurlitzer -U` and try again.")
return no_op_context()
# pylint: enable=g-importing-member
# pytype: enable=import-error
# pylint: enable=g-import-not-at-top
# pylint: enable=bare-except
def set_yggdrasil_logging_level(level: int) -> None:
"""Sets the amount of logging in YggdrasilDecision Forests code.
No-op in the internal build.
See: yggdrasil_decision_forests::logging::SetLoggingLevel.
Args:
level: Logging level.
"""
training_op.yggdrasil_decision_forests_set_logging_level(level=level)
| # Copyright 2021 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Logging primitives.
Replacement of absl's logging primitives that are always visible to the user.
"""
from contextlib import contextmanager # pylint: disable=g-importing-member
import sys
from tensorflow_decision_forests.tensorflow.ops.training import op as training_op
# Background
# ==========
#
# By default, logs of the Yggdrasil C++ training code are shown on the
# "standard C output" and "error" channels (COUT and CERR). When executing
# python code in a script, those channels are displayed along side Python
# standard output (i.e. the output of python's "print" function). When running
# in a colab or a notebook, the COUT and CERR channels are not printed i.e. they
# are not visible to the user (unless the user looks in the colab logs). In this
# case, the COUT and CERR channels needs to be "redirected" to the python's
# standard output.
#
# This parameter
# ==============
#
# If this parameter is set to "auto", and if the code is detected as beeing
# executed in a colab or notebook, the COUT and CERR are redirected to the
# python's standart output.
#
# If this parameter is True, the COUT and CERR are redirected.
# If this parameter is False, the COUT and CERR are not redirected.
#
# If the detection of the running environement is incorrect, the training logs
# might not appear in colab (false negative) or the script will hang (stuck when
# the redirection is setup; false positive). If you face one of those
# situations, please ping us.
REDIRECT_YGGDRASIL_CPP_OUTPUT_TO_PYTHON_OUTPUT = "auto"
def info(msg, *args):
"""Print an info message visible to the user.
To use instead of absl.logging.info (to be visible in colabs).
Usage example:
logging_info("Hello %s", "world")
"""
print(msg % args)
def warning(msg, *args):
"""Print a warning message visible to the user.
To use instead of absl.logging.info (to be visible in colabs).
Usage example:
logging_warning("Hello %s", "world")
"""
print("Warning: ", msg % args)
def capture_cpp_log_context(verbose=False):
"""Creates a context to display or hide the c++ code logs to the user.
Tested with python, ipython, colab and jupyter notebook.
In the internal build, only impact python's print.
Args:
verbose: If true, the training logs are shown in logging and python's print.
If false, the training logs are not shown in logging nor python's print.
Returns:
A context.
"""
# Does nothing
@contextmanager
def no_op_context():
yield
# Hide the Yggdrasil training logs.
@contextmanager
def hide_cpp_logs():
# Stop displaying c++ logs.
set_yggdrasil_logging_level(0)
try:
yield
finally:
# Re-start displaying c++ logs.
set_yggdrasil_logging_level(2)
if not verbose:
# Make sure the c++ logs are not visible to the user.
return hide_cpp_logs()
if ((REDIRECT_YGGDRASIL_CPP_OUTPUT_TO_PYTHON_OUTPUT == "auto" and
sys.stdout.isatty()) or
not REDIRECT_YGGDRASIL_CPP_OUTPUT_TO_PYTHON_OUTPUT):
# The cour and cerr of the c++ library are already visible to the user.
return no_op_context()
# pytype: disable=import-error
# pylint: disable=g-import-not-at-top
# pylint: disable=g-importing-member
# pylint: disable=bare-except
# The cout and cerr of the c++ library are not visible to the user.
# Redirect them to python's standard output.
try:
from colabtools.googlelog import CaptureLog
return CaptureLog()
except:
try:
from wurlitzer import sys_pipes
# This can hang if the cout/cerr is visible to the user.
return sys_pipes()
except:
warning("Cannot redirect the training output because neither of "
"colabtools.googlelog or wurlitzer available. Run `pip install "
"wurlitzer -U` and try again.")
return no_op_context()
# pylint: enable=g-importing-member
# pytype: enable=import-error
# pylint: enable=g-import-not-at-top
# pylint: enable=bare-except
def set_yggdrasil_logging_level(level: int) -> None:
"""Sets the amount of logging in YggdrasilDecision Forests code.
No-op in the internal build.
See: yggdrasil_decision_forests::logging::SetLoggingLevel.
Args:
level: Logging level.
"""
training_op.yggdrasil_decision_forests_set_logging_level(level=level)
| en | 0.792004 | # Copyright 2021 Google LLC. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # https://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. Logging primitives. Replacement of absl's logging primitives that are always visible to the user. # pylint: disable=g-importing-member # Background # ========== # # By default, logs of the Yggdrasil C++ training code are shown on the # "standard C output" and "error" channels (COUT and CERR). When executing # python code in a script, those channels are displayed along side Python # standard output (i.e. the output of python's "print" function). When running # in a colab or a notebook, the COUT and CERR channels are not printed i.e. they # are not visible to the user (unless the user looks in the colab logs). In this # case, the COUT and CERR channels needs to be "redirected" to the python's # standard output. # # This parameter # ============== # # If this parameter is set to "auto", and if the code is detected as beeing # executed in a colab or notebook, the COUT and CERR are redirected to the # python's standart output. # # If this parameter is True, the COUT and CERR are redirected. # If this parameter is False, the COUT and CERR are not redirected. # # If the detection of the running environement is incorrect, the training logs # might not appear in colab (false negative) or the script will hang (stuck when # the redirection is setup; false positive). If you face one of those # situations, please ping us. Print an info message visible to the user. To use instead of absl.logging.info (to be visible in colabs). Usage example: logging_info("Hello %s", "world") Print a warning message visible to the user. To use instead of absl.logging.info (to be visible in colabs). Usage example: logging_warning("Hello %s", "world") Creates a context to display or hide the c++ code logs to the user. Tested with python, ipython, colab and jupyter notebook. In the internal build, only impact python's print. Args: verbose: If true, the training logs are shown in logging and python's print. If false, the training logs are not shown in logging nor python's print. Returns: A context. # Does nothing # Hide the Yggdrasil training logs. # Stop displaying c++ logs. # Re-start displaying c++ logs. # Make sure the c++ logs are not visible to the user. # The cour and cerr of the c++ library are already visible to the user. # pytype: disable=import-error # pylint: disable=g-import-not-at-top # pylint: disable=g-importing-member # pylint: disable=bare-except # The cout and cerr of the c++ library are not visible to the user. # Redirect them to python's standard output. # This can hang if the cout/cerr is visible to the user. # pylint: enable=g-importing-member # pytype: enable=import-error # pylint: enable=g-import-not-at-top # pylint: enable=bare-except Sets the amount of logging in YggdrasilDecision Forests code. No-op in the internal build. See: yggdrasil_decision_forests::logging::SetLoggingLevel. Args: level: Logging level. | 1.818107 | 2 |
Restaurant_Finder_App/restaurant_finder_app/restaurant_finder_app/authentication/utils/forms.py | midhun3112/restaurant_locator | 0 | 6631857 | from django.contrib.auth.forms import UserCreationForm, UserChangeForm
from django.contrib.auth.forms import AuthenticationForm ,PasswordResetForm,SetPasswordForm
from django import forms
from authentication.models import User, UserProfile
from django.utils.translation import gettext as _
class UserCreationForm(UserCreationForm):
"""
A form that creates a user, with no privileges, from the given email and
password.
"""
def __init__(self, *args, **kargs):
super(UserCreationForm, self).__init__(*args, **kargs)
if 'username' in self.fields:
del self.fields['username']
class Meta:
model = User
fields = ("email",)
class UserChangeForm(UserChangeForm):
"""A form for updating users. Includes all the fields on
the user, but replaces the password field with admin's
password hash display field.
"""
def __init__(self, *args, **kargs):
super(UserChangeForm, self).__init__(*args, **kargs)
if 'username' in self.fields:
del self.fields['username']
class Meta:
model = User
fields = ("email",)
# If you don't do this you cannot use Bootstrap CSS
class LoginForm(AuthenticationForm):
username = forms.EmailField(widget=forms.TextInput(attrs={'required': True, 'max_length': 30, 'placeholder': "Enter your email id", 'style': 'width:84%'}))
password = forms.CharField(widget=forms.PasswordInput(attrs={'required': True, 'required': True,'render_value': False, 'placeholder': "Enter your password", 'style': 'width:84%'}))
# If you don't do this you cannot use Bootstrap CSS
class PasswordResetForm(PasswordResetForm):
email = forms.EmailField(widget=forms.TextInput(attrs={'required': True, 'max_length': 30, 'placeholder': "Enter your email id", 'style': 'width:84%'}))
# If you don't do this you cannot use Bootstrap CSS
class PasswordResetConfirmForm(SetPasswordForm):
new_password1 = forms.CharField(widget=forms.PasswordInput(attrs={'required': True, 'required': True,'render_value': False, 'placeholder': "Enter your password", 'style': 'width:84%'}))
new_password2 = forms.CharField(widget=forms.PasswordInput(attrs={'required': True, 'required': True,'render_value': False, 'placeholder': "Re-Enter your password", 'style': 'width:84%'}))
class RegistrationForm(forms.Form):
firstname = forms.CharField(widget=forms.TextInput(attrs={'required': True, 'max_length': 30, 'placeholder': "Enter your first name", 'style': 'width:84%'}))
lastname = forms.CharField(widget=forms.TextInput(attrs={'required': True, 'max_length': 30, 'placeholder': "Enter your last name", 'style': 'width:84%'}))
email = forms.EmailField(widget=forms.TextInput(attrs={'required': True, 'max_length': 30, 'placeholder': "Enter your email id", 'style': 'width:84%'}))
password1 = forms.CharField(widget=forms.PasswordInput(attrs={'required': True, 'required': True,'render_value': False, 'placeholder': "Enter your password", 'style': 'width:84%'}))
password2 = forms.CharField(widget=forms.PasswordInput(attrs={'required': True, 'required': True,'render_value': False, 'placeholder': "Re-Enter your password", 'style': 'width:84%'}))
def clean_email(self):
try:
user = User.objects.get(email__iexact=self.cleaned_data['email'])
except User.DoesNotExist:
return self.cleaned_data['email']
raise forms.ValidationError(_("The email already exists. Please try another one. If you forgot the password please try the forgot password option to recover your password"))
def clean(self):
if 'password1' in self.cleaned_data and 'password2' in self.cleaned_data:
if self.cleaned_data['password1'] != self.cleaned_data['password2']:
raise forms.ValidationError(_("The two password fields did not match."))
return self.cleaned_data
class UserProfileForm(forms.ModelForm):
class Meta:
model = UserProfile
fields = ('birth_date', 'genre', 'address1', 'address2', 'postal_code', 'state', 'country', 'is_owner') | from django.contrib.auth.forms import UserCreationForm, UserChangeForm
from django.contrib.auth.forms import AuthenticationForm ,PasswordResetForm,SetPasswordForm
from django import forms
from authentication.models import User, UserProfile
from django.utils.translation import gettext as _
class UserCreationForm(UserCreationForm):
"""
A form that creates a user, with no privileges, from the given email and
password.
"""
def __init__(self, *args, **kargs):
super(UserCreationForm, self).__init__(*args, **kargs)
if 'username' in self.fields:
del self.fields['username']
class Meta:
model = User
fields = ("email",)
class UserChangeForm(UserChangeForm):
"""A form for updating users. Includes all the fields on
the user, but replaces the password field with admin's
password hash display field.
"""
def __init__(self, *args, **kargs):
super(UserChangeForm, self).__init__(*args, **kargs)
if 'username' in self.fields:
del self.fields['username']
class Meta:
model = User
fields = ("email",)
# If you don't do this you cannot use Bootstrap CSS
class LoginForm(AuthenticationForm):
username = forms.EmailField(widget=forms.TextInput(attrs={'required': True, 'max_length': 30, 'placeholder': "Enter your email id", 'style': 'width:84%'}))
password = forms.CharField(widget=forms.PasswordInput(attrs={'required': True, 'required': True,'render_value': False, 'placeholder': "Enter your password", 'style': 'width:84%'}))
# If you don't do this you cannot use Bootstrap CSS
class PasswordResetForm(PasswordResetForm):
email = forms.EmailField(widget=forms.TextInput(attrs={'required': True, 'max_length': 30, 'placeholder': "Enter your email id", 'style': 'width:84%'}))
# If you don't do this you cannot use Bootstrap CSS
class PasswordResetConfirmForm(SetPasswordForm):
new_password1 = forms.CharField(widget=forms.PasswordInput(attrs={'required': True, 'required': True,'render_value': False, 'placeholder': "Enter your password", 'style': 'width:84%'}))
new_password2 = forms.CharField(widget=forms.PasswordInput(attrs={'required': True, 'required': True,'render_value': False, 'placeholder': "Re-Enter your password", 'style': 'width:84%'}))
class RegistrationForm(forms.Form):
firstname = forms.CharField(widget=forms.TextInput(attrs={'required': True, 'max_length': 30, 'placeholder': "Enter your first name", 'style': 'width:84%'}))
lastname = forms.CharField(widget=forms.TextInput(attrs={'required': True, 'max_length': 30, 'placeholder': "Enter your last name", 'style': 'width:84%'}))
email = forms.EmailField(widget=forms.TextInput(attrs={'required': True, 'max_length': 30, 'placeholder': "Enter your email id", 'style': 'width:84%'}))
password1 = forms.CharField(widget=forms.PasswordInput(attrs={'required': True, 'required': True,'render_value': False, 'placeholder': "Enter your password", 'style': 'width:84%'}))
password2 = forms.CharField(widget=forms.PasswordInput(attrs={'required': True, 'required': True,'render_value': False, 'placeholder': "Re-Enter your password", 'style': 'width:84%'}))
def clean_email(self):
try:
user = User.objects.get(email__iexact=self.cleaned_data['email'])
except User.DoesNotExist:
return self.cleaned_data['email']
raise forms.ValidationError(_("The email already exists. Please try another one. If you forgot the password please try the forgot password option to recover your password"))
def clean(self):
if 'password1' in self.cleaned_data and 'password2' in self.cleaned_data:
if self.cleaned_data['password1'] != self.cleaned_data['password2']:
raise forms.ValidationError(_("The two password fields did not match."))
return self.cleaned_data
class UserProfileForm(forms.ModelForm):
class Meta:
model = UserProfile
fields = ('birth_date', 'genre', 'address1', 'address2', 'postal_code', 'state', 'country', 'is_owner') | en | 0.851964 | A form that creates a user, with no privileges, from the given email and password. A form for updating users. Includes all the fields on the user, but replaces the password field with admin's password hash display field. # If you don't do this you cannot use Bootstrap CSS # If you don't do this you cannot use Bootstrap CSS # If you don't do this you cannot use Bootstrap CSS | 2.670113 | 3 |
gradedProject/gradedApp/migrations/0001_initial.py | cs-fullstack-2019-spring/django-models3-cw-cgarciapieto | 0 | 6631858 | <reponame>cs-fullstack-2019-spring/django-models3-cw-cgarciapieto
# Generated by Django 2.2 on 2019-02-21 17:22
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Book',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('pageNumber', models.IntegerField(max_length=10000)),
('genre', models.CharField(max_length=200)),
('pub_date', models.DateField()),
],
),
migrations.CreateModel(
name='Car',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('make', models.CharField(max_length=200)),
('model', models.CharField(max_length=200)),
('year', models.DateField()),
],
),
]
| # Generated by Django 2.2 on 2019-02-21 17:22
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Book',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('pageNumber', models.IntegerField(max_length=10000)),
('genre', models.CharField(max_length=200)),
('pub_date', models.DateField()),
],
),
migrations.CreateModel(
name='Car',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('make', models.CharField(max_length=200)),
('model', models.CharField(max_length=200)),
('year', models.DateField()),
],
),
] | en | 0.797742 | # Generated by Django 2.2 on 2019-02-21 17:22 | 2.071491 | 2 |
python/books/introduction_to_practice/p10/exce1.py | ShenJinXiang/example | 0 | 6631859 | <reponame>ShenJinXiang/example
#!/usr/bin/env python3
try:
num = int(input('input number:'))
print('number:', num)
except Exception as err:
print("error:", err) | #!/usr/bin/env python3
try:
num = int(input('input number:'))
print('number:', num)
except Exception as err:
print("error:", err) | fr | 0.221828 | #!/usr/bin/env python3 | 3.552658 | 4 |
src/mox-inventory.py | queercat/Mox-Inventory | 0 | 6631860 | <reponame>queercat/Mox-Inventory
#!/usr/local/bin/python3
# Mox Inventory -- A Scryfall powered opensource MTG invetory utility.
import json # Handles JSON data.
import sqlite3 # Backend DB for MOXI.
import card_util # A utility for creating and handling magic cards.
import requests # Handles even more web reqs.
import flask # Awesome microservice for handling web reqs.
import hashlib # Creating hashes for card IDs.
with open('config.json', 'r') as config_file:
config_obj = json.loads(config_file.read())
db_location = config_obj['db_location']
# Loads the DB and returns a cursor.
def init_db():
conn = sqlite3.connect(db_location, check_same_thread = False) # Monitor to make sure no data corruption with multiple writes.
return conn, conn.cursor()
# Variables for the conn and cursor the the DB.
conn, cursor = init_db()
# Update DB ... Commits changes to DB.
def update_db(info):
conn.execute('INSERT INTO db_interactions VALUES (?)', [info])
conn.commit()
# Hashes using MD5 from name and set to generate a unique ID.
def generate_hash(name, from_set):
md5 = hashlib.md5()
md5.update((name + from_set).encode())
return md5.hexdigest()
# Adds a card to the inventory DB.
def add_card(card_name, from_set, quantity):
if from_set is None:
from_set = card_util.get_set_from_name(card_name)
converted_code = card_util.to_code(from_set.lower())
if converted_code is not None:
from_set = converted_code
hash_id = generate_hash(card_name, from_set)
# Checks if card exists, if it does it increments the value, if not, it creates the card and then increments the value by the quantity.
card = cursor.execute('SELECT * FROM card_inventory WHERE hash_id = ?', [hash_id]).fetchone()
if card is None:
cursor.execute('INSERT INTO card_inventory VALUES (?, ?, ?, ?)', [card_name, quantity, from_set, hash_id])
else:
cursor.execute('UPDATE card_inventory SET quantity = quantity + ? WHERE hash_id = ?', [quantity, hash_id])
update_db('Added {} {} from set {}.'.format(quantity, card_name, from_set))
# Removes a card from the inventory DB adds it to the invoice list if money was made.
def remove_card(card_name, from_set, quantity, sell_price):
hash_id = generate_hash(card_name, from_set)
qty = cursor.execute('SELECT quantity FROM card_inventory WHERE hash_id = ?', [hash_id]).fetchone()
if qty is not None and qty[0] > quantity:
cursor.execute('UPDATE card_inventory SET quantity = quantity - ? WHERE hash_id = ?', [quantity, hash_id])
update_db('Removed {} {} from set {}.'.format(quantity, card_name, from_set))
if sell_price > 0:
card = cursor.execute('SELECT card_name FROM invoice_list WHERE hash_id = ? AND sell_price = ?', [hash_id, sell_price])
if card is not None:
cursor.execute('UPDATE invoice_list SET quantity = quantity + ? WHERE hash_id = ? AND sell_price = ?', [quantity, hash_id, sell_price])
else:
cursor.execute('INSERT INTO invoice_list VALUES (?, ?, ?, ?, ?)', [card_name, quantity, from_set, sell_price, hash_id])
update_db('Sold {} {} from set {} for {}.'.format(quantity, card_name, from_set, sell_price))
# Returns a list version of all cards that match that name and set.
def get_card(card_name, from_set):
cards = None
if from_set is None:
cards_resp = cursor.execute('SELECT card_name, from_set, quantity FROM card_inventory WHERE card_name = ?', [card_name])
return cards_resp.fetchall()
else:
converted_code = card_util.to_code(from_set.lower())
if converted_code is not None:
from_set = converted_code
hash_id = generate_hash(card_name, from_set)
card_resp = cursor.execute('SELECT card_name, from_set, quantity FROM card_inventory WHERE hash_id = ?', [hash_id])
return card_resp.fetchone()
# Returns a list of all cards in the DB.
def get_all_cards():
cards_resp = cursor.execute('SELECT card_name, from_set, quantity FROM card_inventory')
cards = cards_resp.fetchall()
return cards
# Flask stuff! :)
app = flask.Flask(__name__, static_url_path='', static_folder='web')
@app.route('/')
def index():
return flask.send_from_directory('web', 'index.html')
@app.route('/<path:path>')
def serve(path):
return flask.send_from_directory('web', path)
@app.route('/cards/search/', methods = ['GET'])
def search_return_card():
card_name = flask.request.args.get('card', None)
from_set = flask.request.args.get('set', None)
return(flask.jsonify(get_card(card_name, from_set)))
@app.route('/cards/get/', methods = ['GET'])
def return_all_cards():
cards = get_all_cards()
cards_list = []
for card in cards:
name = card[0]
from_set = card[1]
quantity = card[2]
cards_list.append({'name': name, 'from_set': from_set, 'quantity': quantity})
cards_json = flask.jsonify(cards_list)
return(cards_json)
@app.route('/cards/add/', methods = ['GET'])
def add_card_to_db():
print(flask.request.args)
card_name = flask.request.args.get('card', None)
from_set = flask.request.args.get('set', None)
quantity = flask.request.args.get('qty', None)
if quantity is None:
quantity = 1
add_card(card_name, from_set, quantity)
return 'Added {} {} from set {}'.format(quantity, card_name, from_set)
@app.route('/cards/<uuid>', methods = ['POST'])
def handle_add():
message = flask.request.json
token = message['token'] # Some private token IDK.
additions = message['additions'] # Additions are going to be raising the quantity of a card.
removals = message['removals'] # Removals are going to be lowering the quantity of a card.
for card in additions:
card_name = card['card_name']
quantity = card['quantity']
from_set = card['from_set']
add_card(card_name, quantity, from_set)
for card in removals:
card_name = card['card_name']
quantity = card['quantity']
from_set = card['from_set']
sell_price = card['sell_price']
remove_card(card_name, quantity, from_set, sell_price)
if __name__ == "__main__":
app.run('0.0.0.0', port=80) | #!/usr/local/bin/python3
# Mox Inventory -- A Scryfall powered opensource MTG invetory utility.
import json # Handles JSON data.
import sqlite3 # Backend DB for MOXI.
import card_util # A utility for creating and handling magic cards.
import requests # Handles even more web reqs.
import flask # Awesome microservice for handling web reqs.
import hashlib # Creating hashes for card IDs.
with open('config.json', 'r') as config_file:
config_obj = json.loads(config_file.read())
db_location = config_obj['db_location']
# Loads the DB and returns a cursor.
def init_db():
conn = sqlite3.connect(db_location, check_same_thread = False) # Monitor to make sure no data corruption with multiple writes.
return conn, conn.cursor()
# Variables for the conn and cursor the the DB.
conn, cursor = init_db()
# Update DB ... Commits changes to DB.
def update_db(info):
conn.execute('INSERT INTO db_interactions VALUES (?)', [info])
conn.commit()
# Hashes using MD5 from name and set to generate a unique ID.
def generate_hash(name, from_set):
md5 = hashlib.md5()
md5.update((name + from_set).encode())
return md5.hexdigest()
# Adds a card to the inventory DB.
def add_card(card_name, from_set, quantity):
if from_set is None:
from_set = card_util.get_set_from_name(card_name)
converted_code = card_util.to_code(from_set.lower())
if converted_code is not None:
from_set = converted_code
hash_id = generate_hash(card_name, from_set)
# Checks if card exists, if it does it increments the value, if not, it creates the card and then increments the value by the quantity.
card = cursor.execute('SELECT * FROM card_inventory WHERE hash_id = ?', [hash_id]).fetchone()
if card is None:
cursor.execute('INSERT INTO card_inventory VALUES (?, ?, ?, ?)', [card_name, quantity, from_set, hash_id])
else:
cursor.execute('UPDATE card_inventory SET quantity = quantity + ? WHERE hash_id = ?', [quantity, hash_id])
update_db('Added {} {} from set {}.'.format(quantity, card_name, from_set))
# Removes a card from the inventory DB adds it to the invoice list if money was made.
def remove_card(card_name, from_set, quantity, sell_price):
hash_id = generate_hash(card_name, from_set)
qty = cursor.execute('SELECT quantity FROM card_inventory WHERE hash_id = ?', [hash_id]).fetchone()
if qty is not None and qty[0] > quantity:
cursor.execute('UPDATE card_inventory SET quantity = quantity - ? WHERE hash_id = ?', [quantity, hash_id])
update_db('Removed {} {} from set {}.'.format(quantity, card_name, from_set))
if sell_price > 0:
card = cursor.execute('SELECT card_name FROM invoice_list WHERE hash_id = ? AND sell_price = ?', [hash_id, sell_price])
if card is not None:
cursor.execute('UPDATE invoice_list SET quantity = quantity + ? WHERE hash_id = ? AND sell_price = ?', [quantity, hash_id, sell_price])
else:
cursor.execute('INSERT INTO invoice_list VALUES (?, ?, ?, ?, ?)', [card_name, quantity, from_set, sell_price, hash_id])
update_db('Sold {} {} from set {} for {}.'.format(quantity, card_name, from_set, sell_price))
# Returns a list version of all cards that match that name and set.
def get_card(card_name, from_set):
cards = None
if from_set is None:
cards_resp = cursor.execute('SELECT card_name, from_set, quantity FROM card_inventory WHERE card_name = ?', [card_name])
return cards_resp.fetchall()
else:
converted_code = card_util.to_code(from_set.lower())
if converted_code is not None:
from_set = converted_code
hash_id = generate_hash(card_name, from_set)
card_resp = cursor.execute('SELECT card_name, from_set, quantity FROM card_inventory WHERE hash_id = ?', [hash_id])
return card_resp.fetchone()
# Returns a list of all cards in the DB.
def get_all_cards():
cards_resp = cursor.execute('SELECT card_name, from_set, quantity FROM card_inventory')
cards = cards_resp.fetchall()
return cards
# Flask stuff! :)
app = flask.Flask(__name__, static_url_path='', static_folder='web')
@app.route('/')
def index():
return flask.send_from_directory('web', 'index.html')
@app.route('/<path:path>')
def serve(path):
return flask.send_from_directory('web', path)
@app.route('/cards/search/', methods = ['GET'])
def search_return_card():
card_name = flask.request.args.get('card', None)
from_set = flask.request.args.get('set', None)
return(flask.jsonify(get_card(card_name, from_set)))
@app.route('/cards/get/', methods = ['GET'])
def return_all_cards():
cards = get_all_cards()
cards_list = []
for card in cards:
name = card[0]
from_set = card[1]
quantity = card[2]
cards_list.append({'name': name, 'from_set': from_set, 'quantity': quantity})
cards_json = flask.jsonify(cards_list)
return(cards_json)
@app.route('/cards/add/', methods = ['GET'])
def add_card_to_db():
print(flask.request.args)
card_name = flask.request.args.get('card', None)
from_set = flask.request.args.get('set', None)
quantity = flask.request.args.get('qty', None)
if quantity is None:
quantity = 1
add_card(card_name, from_set, quantity)
return 'Added {} {} from set {}'.format(quantity, card_name, from_set)
@app.route('/cards/<uuid>', methods = ['POST'])
def handle_add():
message = flask.request.json
token = message['token'] # Some private token IDK.
additions = message['additions'] # Additions are going to be raising the quantity of a card.
removals = message['removals'] # Removals are going to be lowering the quantity of a card.
for card in additions:
card_name = card['card_name']
quantity = card['quantity']
from_set = card['from_set']
add_card(card_name, quantity, from_set)
for card in removals:
card_name = card['card_name']
quantity = card['quantity']
from_set = card['from_set']
sell_price = card['sell_price']
remove_card(card_name, quantity, from_set, sell_price)
if __name__ == "__main__":
app.run('0.0.0.0', port=80) | en | 0.848729 | #!/usr/local/bin/python3 # Mox Inventory -- A Scryfall powered opensource MTG invetory utility. # Handles JSON data. # Backend DB for MOXI. # A utility for creating and handling magic cards. # Handles even more web reqs. # Awesome microservice for handling web reqs. # Creating hashes for card IDs. # Loads the DB and returns a cursor. # Monitor to make sure no data corruption with multiple writes. # Variables for the conn and cursor the the DB. # Update DB ... Commits changes to DB. # Hashes using MD5 from name and set to generate a unique ID. # Adds a card to the inventory DB. # Checks if card exists, if it does it increments the value, if not, it creates the card and then increments the value by the quantity. # Removes a card from the inventory DB adds it to the invoice list if money was made. # Returns a list version of all cards that match that name and set. # Returns a list of all cards in the DB. # Flask stuff! :) # Some private token IDK. # Additions are going to be raising the quantity of a card. # Removals are going to be lowering the quantity of a card. | 2.549892 | 3 |
pi/makeAudioFiles.py | kevinvu184/SmartRecipes | 1 | 6631861 | import speech_recognition as sr
import subprocess
from google.cloud import texttospeech
import os
def getAudioFile(text, name):
# Instantiates a client
client = texttospeech.TextToSpeechClient()
# Set the text input to be synthesized
synthesis_input = texttospeech.types.SynthesisInput(text=text)
# Build the voice request, select the language code ("en-US") and the ssml
# voice gender ("neutral")
voice = texttospeech.types.VoiceSelectionParams(
language_code='en-AU',
name='en-AU-Wavenet-D',
#ssml_gender=texttospeech.enums.SsmlVoiceGender.FEM
)
# Select the type of audio file you want returned
audio_config = texttospeech.types.AudioConfig(
audio_encoding=texttospeech.enums.AudioEncoding.MP3)
# Perform the text-to-speech request on the text input with the selected
# voice parameters and audio file type
response = client.synthesize_speech(synthesis_input, voice, audio_config)
# The response's audio_content is binary.
with open(name+'.mp3', 'wb') as out:
# Write the response to the output file.
out.write(response.audio_content)
print('Audio content written to file ' + name) | import speech_recognition as sr
import subprocess
from google.cloud import texttospeech
import os
def getAudioFile(text, name):
# Instantiates a client
client = texttospeech.TextToSpeechClient()
# Set the text input to be synthesized
synthesis_input = texttospeech.types.SynthesisInput(text=text)
# Build the voice request, select the language code ("en-US") and the ssml
# voice gender ("neutral")
voice = texttospeech.types.VoiceSelectionParams(
language_code='en-AU',
name='en-AU-Wavenet-D',
#ssml_gender=texttospeech.enums.SsmlVoiceGender.FEM
)
# Select the type of audio file you want returned
audio_config = texttospeech.types.AudioConfig(
audio_encoding=texttospeech.enums.AudioEncoding.MP3)
# Perform the text-to-speech request on the text input with the selected
# voice parameters and audio file type
response = client.synthesize_speech(synthesis_input, voice, audio_config)
# The response's audio_content is binary.
with open(name+'.mp3', 'wb') as out:
# Write the response to the output file.
out.write(response.audio_content)
print('Audio content written to file ' + name) | en | 0.782209 | # Instantiates a client # Set the text input to be synthesized # Build the voice request, select the language code ("en-US") and the ssml # voice gender ("neutral") #ssml_gender=texttospeech.enums.SsmlVoiceGender.FEM # Select the type of audio file you want returned # Perform the text-to-speech request on the text input with the selected # voice parameters and audio file type # The response's audio_content is binary. # Write the response to the output file. | 3.535342 | 4 |
DAY 01/SECOND HALF/main.py | joseguilhermefmoura/Advent-of-Code-2020 | 0 | 6631862 | # Define a function that multiplies all elements one by one in a list of numbers
def multiply(list_of_numbers: list) -> int:
result = 1
for number in list_of_numbers:
result = result * number
return result
def get_puzzle_answer() -> int:
file_input = open("input.txt", "r") # Read the file
file_lines = file_input.readlines() # Get all lines from it
for line in file_lines: # For each number
for j in file_lines: # Read a new number
for k in file_lines: # And another one, then check:
if int(line) + int(j) + int(k) == 2020:
# If not solved, check it again. But if solved:
file_input.close()
return multiply([int(line), int(j), int(k)])
def __main__():
print("Merry Christmas! The answer for this problem is: {0}".format(get_puzzle_answer()))
if __name__ == "__main__":
__main__() | # Define a function that multiplies all elements one by one in a list of numbers
def multiply(list_of_numbers: list) -> int:
result = 1
for number in list_of_numbers:
result = result * number
return result
def get_puzzle_answer() -> int:
file_input = open("input.txt", "r") # Read the file
file_lines = file_input.readlines() # Get all lines from it
for line in file_lines: # For each number
for j in file_lines: # Read a new number
for k in file_lines: # And another one, then check:
if int(line) + int(j) + int(k) == 2020:
# If not solved, check it again. But if solved:
file_input.close()
return multiply([int(line), int(j), int(k)])
def __main__():
print("Merry Christmas! The answer for this problem is: {0}".format(get_puzzle_answer()))
if __name__ == "__main__":
__main__() | en | 0.809527 | # Define a function that multiplies all elements one by one in a list of numbers # Read the file # Get all lines from it # For each number # Read a new number # And another one, then check: # If not solved, check it again. But if solved: | 3.915748 | 4 |
freezerbox/group_by/dependency.py | kalekundert/freezerbox | 0 | 6631863 | <reponame>kalekundert/freezerbox
#!/usr/bin/env python3
import networkx as nx
from math import inf
from copy import deepcopy
from more_itertools import pairwise
def group_by_synthesis(products):
products = list(products)
# Sort groups by order of appearance:
group_from_arg0 = {}
arg0_from_group = {}
next_group = 0
for product in products:
arg0 = product.synthesis_args.by_index[0]
if arg0 not in arg0_from_group:
group_from_arg0[arg0] = next_group
arg0_from_group[next_group] = arg0
next_group += 1
# Construct a dependency graph:
deps = nx.DiGraph()
for i, product in enumerate(products):
arg0 = product.synthesis_args.by_index[0]
deps.add_node(
product.tag,
group=group_from_arg0[arg0],
order=i,
)
for product in products:
for dep in product.dependencies:
if dep in deps:
deps.add_edge(dep, product.tag)
# Split into groups and yield intermediates:
intermediate_from_tag = {
x.tag: x.make_intermediate(0)
for x in products
}
for group, tags in grouped_topological_sort(deps):
arg0 = arg0_from_group[group]
intermediates = [intermediate_from_tag[tag] for tag in tags]
yield arg0, intermediates
def group_by_cleanup(products):
products = list(products)
# Construct a dependency graph:
deps = nx.DiGraph()
for i, product in enumerate(products):
for j, cleanup in enumerate(product.cleanup_args):
deps.add_node((i, j), group=cleanup.by_index[0])
for pair in pairwise(enumerate(product.cleanup_args)):
(j, args_j), (k, args_k) = pair
deps.add_edge((i, j), (i, k))
# Split into groups:
for key, nodes in grouped_topological_sort(deps):
intermediates = [
products[i].make_intermediate(j+1)
for i, j in nodes
]
yield key, intermediates
def grouped_topological_sort(deps):
"""
Arguments:
deps: networkx.DiGraph
A graph of the dependencies to account for. Each node should have
a "group" attribute identifying which group it is part of. The
returned groups will be sorted by this attribute when possible.
"""
by_order = lambda x: deps.nodes[x].get('order', x)
def inner_sort(candidates, dep_counts):
best_groups = []
best_score = (inf, inf)
for type in candidates:
next_candidates = deepcopy(candidates)
next_dep_counts = deepcopy(dep_counts)
next_group = next_candidates.pop(type)
for node in next_group:
for _, child in deps.edges(node):
next_dep_counts[child] -= 1
if next_dep_counts[child] == 0:
child_type = deps.nodes[child]['group']
next_candidates.setdefault(child_type, []).append(child)
del next_dep_counts[child]
remaining_groups = inner_sort(
next_candidates,
next_dep_counts,
)
score = len(remaining_groups), type
if score < best_score:
best_score = score
best_groups = [
(type, sorted(next_group, key=by_order)),
*remaining_groups,
]
if dep_counts and not candidates:
raise nx.NetworkXUnfeasible("graph contains a cycle")
return best_groups
candidates = {}
dep_counts = {}
for v, d in deps.in_degree():
if d > 0:
dep_counts[v] = d
else:
type = deps.nodes[v]['group']
candidates.setdefault(type, []).append(v)
return inner_sort(candidates, dep_counts)
| #!/usr/bin/env python3
import networkx as nx
from math import inf
from copy import deepcopy
from more_itertools import pairwise
def group_by_synthesis(products):
products = list(products)
# Sort groups by order of appearance:
group_from_arg0 = {}
arg0_from_group = {}
next_group = 0
for product in products:
arg0 = product.synthesis_args.by_index[0]
if arg0 not in arg0_from_group:
group_from_arg0[arg0] = next_group
arg0_from_group[next_group] = arg0
next_group += 1
# Construct a dependency graph:
deps = nx.DiGraph()
for i, product in enumerate(products):
arg0 = product.synthesis_args.by_index[0]
deps.add_node(
product.tag,
group=group_from_arg0[arg0],
order=i,
)
for product in products:
for dep in product.dependencies:
if dep in deps:
deps.add_edge(dep, product.tag)
# Split into groups and yield intermediates:
intermediate_from_tag = {
x.tag: x.make_intermediate(0)
for x in products
}
for group, tags in grouped_topological_sort(deps):
arg0 = arg0_from_group[group]
intermediates = [intermediate_from_tag[tag] for tag in tags]
yield arg0, intermediates
def group_by_cleanup(products):
products = list(products)
# Construct a dependency graph:
deps = nx.DiGraph()
for i, product in enumerate(products):
for j, cleanup in enumerate(product.cleanup_args):
deps.add_node((i, j), group=cleanup.by_index[0])
for pair in pairwise(enumerate(product.cleanup_args)):
(j, args_j), (k, args_k) = pair
deps.add_edge((i, j), (i, k))
# Split into groups:
for key, nodes in grouped_topological_sort(deps):
intermediates = [
products[i].make_intermediate(j+1)
for i, j in nodes
]
yield key, intermediates
def grouped_topological_sort(deps):
"""
Arguments:
deps: networkx.DiGraph
A graph of the dependencies to account for. Each node should have
a "group" attribute identifying which group it is part of. The
returned groups will be sorted by this attribute when possible.
"""
by_order = lambda x: deps.nodes[x].get('order', x)
def inner_sort(candidates, dep_counts):
best_groups = []
best_score = (inf, inf)
for type in candidates:
next_candidates = deepcopy(candidates)
next_dep_counts = deepcopy(dep_counts)
next_group = next_candidates.pop(type)
for node in next_group:
for _, child in deps.edges(node):
next_dep_counts[child] -= 1
if next_dep_counts[child] == 0:
child_type = deps.nodes[child]['group']
next_candidates.setdefault(child_type, []).append(child)
del next_dep_counts[child]
remaining_groups = inner_sort(
next_candidates,
next_dep_counts,
)
score = len(remaining_groups), type
if score < best_score:
best_score = score
best_groups = [
(type, sorted(next_group, key=by_order)),
*remaining_groups,
]
if dep_counts and not candidates:
raise nx.NetworkXUnfeasible("graph contains a cycle")
return best_groups
candidates = {}
dep_counts = {}
for v, d in deps.in_degree():
if d > 0:
dep_counts[v] = d
else:
type = deps.nodes[v]['group']
candidates.setdefault(type, []).append(v)
return inner_sort(candidates, dep_counts) | en | 0.872993 | #!/usr/bin/env python3 # Sort groups by order of appearance: # Construct a dependency graph: # Split into groups and yield intermediates: # Construct a dependency graph: # Split into groups: Arguments: deps: networkx.DiGraph A graph of the dependencies to account for. Each node should have a "group" attribute identifying which group it is part of. The returned groups will be sorted by this attribute when possible. | 2.54738 | 3 |
generate_iglu.py | iglu-contest/task1-starting-kit | 2 | 6631864 | <gh_stars>1-10
from argparse import Namespace
import sys
sys.path.append('./python')
from bleu import compute_bleu
from train_and_eval import generate, multinomial_generate, multinomial_generate_seq2seq
from data_loader import CwCDataset
from utils import *
from vocab import load_vocab
import os
import argparse
import torch
import pickle
import pprint
import json
from glob import glob
# from vocab import Vocabulary
# 1. a function named creat_dataloader
# 2. a class Model():
# self.init(): this function will create a model
# self.generate(data=DataIGLU, output='output.txt'): this function will generte the predctions for test set
#################################################
#### You can skip this section #############
#################################################
def format_prev_utterances(prev_utterances, encoder_vocab):
prev_utterances = list(map(lambda x: list(
map(lambda y: encoder_vocab.idx2word[y.item()], x)), prev_utterances))
prev_utterances = list(map(lambda x: " ".join(x), prev_utterances))[0]
return prev_utterances
def wordify_ground_truth_utterance(ground_truth_utterance, decoder_vocab):
"""
Maps from a 2d tensor to a list of tokens and removes eos symbol
"""
return list(map(lambda x: list(map(lambda y: decoder_vocab.idx2word[y.item()], x)), ground_truth_utterance))[0][:-1]
def format_ground_truth_utterance(ground_truth_utterance, decoder_vocab):
ground_truth_utterance = wordify_ground_truth_utterance(
ground_truth_utterance, decoder_vocab)
ground_truth_utterance = " ".join(ground_truth_utterance)
return ground_truth_utterance
def format_generated_utterance(generated_utterance):
generated_utterance = list(map(lambda x: " ".join(x), generated_utterance))
return generated_utterance
def load_saved_config(model_path=''):
config_params = None
config_file = os.path.join(model_path, "config.txt")
if os.path.isfile(config_file):
_, config_params = get_config_params(config_file)
return config_params
def read_iglu_result(iglu_file):
with open(iglu_file, 'r') as f:
iglu_content = f.readlines()
iglu_kv = {}
for line in iglu_content:
if len(line.split('@@@')) != 2:
continue
(time_stamp, pred) = line.split('@@@')
iglu_kv[time_stamp] = pred
return iglu_kv
def build_ref_pred_pair(ref_dict, pred_dict):
ref_list, pred_list = [], []
for k, v in ref_dict.items():
ref_list.append([v])
if k in pred_dict:
pred_list.append(pred_dict[k])
else:
pred_list.append(' ')
return ref_list, pred_list
#########################################################
#########################################################
# !!!! this is the first function you have to implement; it will return your own dataloader
# !!!! the testing set will have the same structure and format as the validation and training data
####
def create_dataloader(data_path, split):
model_path = './saved_model/1626589670356'
config_params = load_saved_config(model_path)
gold_config_path = os.path.join(data_path, 'gold-configurations')
# with open(config_params["encoder_vocab_path"], 'rb') as f:
# encoder_vocab = pickle.load(f)
# with open(config_params["decoder_vocab_path"], 'rb') as f:
# decoder_vocab = pickle.load(f)
encoder_vocab = load_vocab('./vocabulary/encoder_vocab.pkl')
decoder_vocab = load_vocab('./vocabulary/decoder_vocab.pkl')
test_dataset = CwCDataset(
model=config_params["model"], split=split, lower=True, dump_dataset=False,
data_dir=data_path, gold_configs_dir=gold_config_path, vocab_dir=config_params[
"vocab_dir"],
encoder_vocab=encoder_vocab, decoder_vocab=decoder_vocab, load_dataset=False, transform=None
)
test_dataset.set_args(num_prev_utterances=config_params["num_prev_utterances"], blocks_max_weight=config_params["blocks_max_weight"], use_builder_actions=config_params['use_builder_actions'], include_empty_channel=config_params['include_empty_channel'], use_condensed_action_repr=config_params['use_condensed_action_repr'],
action_type_sensitive=config_params['action_type_sensitive'], feasible_next_placements=config_params['feasible_next_placements'], spatial_info_window_size=config_params["spatial_info_window_size"], counters_extra_feasibility_check=config_params["counters_extra_feasibility_check"], use_existing_blocks_counter=config_params["use_existing_blocks_counter"])
test_dl = test_dataset.get_data_loader(
batch_size=1, shuffle=False, num_workers=0)
return test_dl
# This is a class wrapper you have to implement;
# We will create a test entity by running 'model=Model()'
# We will obtain the predictions by running 'model.generate(test_set, output_path)'
class Model():
def __init__(self):
# !!!! this function does not accept any extra input
# you should explicitly feed your model path in your submission here
self.model_path = "saved_model/1626589670356"
# self.model_path="/datadrive/model/utterances_and_block_region_counters/20210718/utterances_and_block_region_counters_trainer-1626589670355/1626589670356/"
# this dict is used to store all your hyper-parameters, you can load them from a file in your submission
self.config_params = load_saved_config(self.model_path)
self.load_model(self.model_path)
# with open(self.config_params["encoder_vocab_path"], 'rb') as f:
# self.encoder_vocab = pickle.load(f)
# with open(self.config_params["decoder_vocab_path"], 'rb') as f:
# self.decoder_vocab = pickle.load(f)
self.encoder_vocab = load_vocab('./vocabulary/encoder_vocab.pkl')
self.decoder_vocab = load_vocab('./vocabulary/decoder_vocab.pkl')
print("Model has been loaded")
def load_model(self, model_path=''):
# you can load the trained models or vocabs in this function
# you need to make sure files to be loaded do exist in the path
self.model_type = self.config_params["model"]
model_files = glob(model_path+"/*-best.pkl")
self.models = {}
for model_file in model_files:
with open(model_file, 'rb') as f:
if not torch.cuda.is_available():
model = torch.load(f, map_location="cpu")
else:
model = torch.load(f)
if "flatten_parameters" in dir(model):
# print(dir(model))
model.flatten_parameters() # TODO: flatten for all sub-modules recursively
if "encoder" in model_file:
self.models["encoder"] = model
elif "decoder" in model_file:
self.models["decoder"] = model
def generate(self, test_set, output_path):
beam_size = 10
max_decoding_length = 30
gamma = 0.8
init_args = Namespace(set_decoder_hidden=self.config_params['set_decoder_hidden'],
concatenate_decoder_inputs=self.config_params['concatenate_decoder_inputs'],
concatenate_decoder_hidden=self.config_params['concatenate_decoder_hidden'],
decoder_input_concat_size=self.config_params['decoder_input_concat_size'],
decoder_hidden_concat_size=self.config_params['decoder_hidden_concat_size'],
advance_decoder_t0=self.config_params['advance_decoder_t0'])
# beam search decoding
generated_utterances_, to_print = generate(
self.models["encoder"], self.models["decoder"],
test_set, self.decoder_vocab,
beam_size=beam_size, max_length=max_decoding_length, args=init_args,
development_mode=False, gamma=gamma
)
def format(output_obj):
prev_utterances = format_prev_utterances(
output_obj["prev_utterances"], self.encoder_vocab)
ground_truth_utterance = format_ground_truth_utterance(
output_obj["ground_truth_utterance"], self.decoder_vocab)
generated_utterance = format_generated_utterance(
output_obj["generated_utterance"])
return {
"prev_utterances": prev_utterances,
"ground_truth_utterance": ground_truth_utterance,
"generated_utterance": generated_utterance,
"json_id": output_obj["json_id"],
"sample_id": output_obj["sample_id"],
"time_stamp": output_obj["time_stamp"]
}
with open(output_path, 'w') as f2:
for dia in generated_utterances_:
generated_one = format(dia)['generated_utterance'][0]
f2.write(dia['time_stamp'] +
' @@@ ' + generated_one + '\n')
# predict_path = output_path.replace('.txt', '_pred.txt')
# ref_path = output_path.replace('.txt', '_ref.txt')
# predict_file = read_iglu_result(predict_path)
# ref_file = read_iglu_result(ref_path)
# reference_corpus, pred_corpus = build_ref_pred_pair(ref_file, predict_file)
# bleu_1_results = compute_bleu(reference_corpus, pred_corpus, max_order=1, smooth=False)
# bleu_2_results = compute_bleu(reference_corpus, pred_corpus, max_order=2, smooth=False)
# bleu_3_results = compute_bleu(reference_corpus, pred_corpus, max_order=3, smooth=False)
# bleu_4_results = compute_bleu(reference_corpus, pred_corpus, max_order=4, smooth=False)
# print(bleu_4_results[0])
def main():
initialize_rngs(2021)
data_path = '/datadrive/uiuc_warmup/'
split = 'test'
model = Model()
test_dataset = create_dataloader(data_path, split)
output_path = os.path.join('.', split+'.txt')
model.generate(test_dataset, output_path)
if __name__ == '__main__':
main()
| from argparse import Namespace
import sys
sys.path.append('./python')
from bleu import compute_bleu
from train_and_eval import generate, multinomial_generate, multinomial_generate_seq2seq
from data_loader import CwCDataset
from utils import *
from vocab import load_vocab
import os
import argparse
import torch
import pickle
import pprint
import json
from glob import glob
# from vocab import Vocabulary
# 1. a function named creat_dataloader
# 2. a class Model():
# self.init(): this function will create a model
# self.generate(data=DataIGLU, output='output.txt'): this function will generte the predctions for test set
#################################################
#### You can skip this section #############
#################################################
def format_prev_utterances(prev_utterances, encoder_vocab):
prev_utterances = list(map(lambda x: list(
map(lambda y: encoder_vocab.idx2word[y.item()], x)), prev_utterances))
prev_utterances = list(map(lambda x: " ".join(x), prev_utterances))[0]
return prev_utterances
def wordify_ground_truth_utterance(ground_truth_utterance, decoder_vocab):
"""
Maps from a 2d tensor to a list of tokens and removes eos symbol
"""
return list(map(lambda x: list(map(lambda y: decoder_vocab.idx2word[y.item()], x)), ground_truth_utterance))[0][:-1]
def format_ground_truth_utterance(ground_truth_utterance, decoder_vocab):
ground_truth_utterance = wordify_ground_truth_utterance(
ground_truth_utterance, decoder_vocab)
ground_truth_utterance = " ".join(ground_truth_utterance)
return ground_truth_utterance
def format_generated_utterance(generated_utterance):
generated_utterance = list(map(lambda x: " ".join(x), generated_utterance))
return generated_utterance
def load_saved_config(model_path=''):
config_params = None
config_file = os.path.join(model_path, "config.txt")
if os.path.isfile(config_file):
_, config_params = get_config_params(config_file)
return config_params
def read_iglu_result(iglu_file):
with open(iglu_file, 'r') as f:
iglu_content = f.readlines()
iglu_kv = {}
for line in iglu_content:
if len(line.split('@@@')) != 2:
continue
(time_stamp, pred) = line.split('@@@')
iglu_kv[time_stamp] = pred
return iglu_kv
def build_ref_pred_pair(ref_dict, pred_dict):
ref_list, pred_list = [], []
for k, v in ref_dict.items():
ref_list.append([v])
if k in pred_dict:
pred_list.append(pred_dict[k])
else:
pred_list.append(' ')
return ref_list, pred_list
#########################################################
#########################################################
# !!!! this is the first function you have to implement; it will return your own dataloader
# !!!! the testing set will have the same structure and format as the validation and training data
####
def create_dataloader(data_path, split):
model_path = './saved_model/1626589670356'
config_params = load_saved_config(model_path)
gold_config_path = os.path.join(data_path, 'gold-configurations')
# with open(config_params["encoder_vocab_path"], 'rb') as f:
# encoder_vocab = pickle.load(f)
# with open(config_params["decoder_vocab_path"], 'rb') as f:
# decoder_vocab = pickle.load(f)
encoder_vocab = load_vocab('./vocabulary/encoder_vocab.pkl')
decoder_vocab = load_vocab('./vocabulary/decoder_vocab.pkl')
test_dataset = CwCDataset(
model=config_params["model"], split=split, lower=True, dump_dataset=False,
data_dir=data_path, gold_configs_dir=gold_config_path, vocab_dir=config_params[
"vocab_dir"],
encoder_vocab=encoder_vocab, decoder_vocab=decoder_vocab, load_dataset=False, transform=None
)
test_dataset.set_args(num_prev_utterances=config_params["num_prev_utterances"], blocks_max_weight=config_params["blocks_max_weight"], use_builder_actions=config_params['use_builder_actions'], include_empty_channel=config_params['include_empty_channel'], use_condensed_action_repr=config_params['use_condensed_action_repr'],
action_type_sensitive=config_params['action_type_sensitive'], feasible_next_placements=config_params['feasible_next_placements'], spatial_info_window_size=config_params["spatial_info_window_size"], counters_extra_feasibility_check=config_params["counters_extra_feasibility_check"], use_existing_blocks_counter=config_params["use_existing_blocks_counter"])
test_dl = test_dataset.get_data_loader(
batch_size=1, shuffle=False, num_workers=0)
return test_dl
# This is a class wrapper you have to implement;
# We will create a test entity by running 'model=Model()'
# We will obtain the predictions by running 'model.generate(test_set, output_path)'
class Model():
def __init__(self):
# !!!! this function does not accept any extra input
# you should explicitly feed your model path in your submission here
self.model_path = "saved_model/1626589670356"
# self.model_path="/datadrive/model/utterances_and_block_region_counters/20210718/utterances_and_block_region_counters_trainer-1626589670355/1626589670356/"
# this dict is used to store all your hyper-parameters, you can load them from a file in your submission
self.config_params = load_saved_config(self.model_path)
self.load_model(self.model_path)
# with open(self.config_params["encoder_vocab_path"], 'rb') as f:
# self.encoder_vocab = pickle.load(f)
# with open(self.config_params["decoder_vocab_path"], 'rb') as f:
# self.decoder_vocab = pickle.load(f)
self.encoder_vocab = load_vocab('./vocabulary/encoder_vocab.pkl')
self.decoder_vocab = load_vocab('./vocabulary/decoder_vocab.pkl')
print("Model has been loaded")
def load_model(self, model_path=''):
# you can load the trained models or vocabs in this function
# you need to make sure files to be loaded do exist in the path
self.model_type = self.config_params["model"]
model_files = glob(model_path+"/*-best.pkl")
self.models = {}
for model_file in model_files:
with open(model_file, 'rb') as f:
if not torch.cuda.is_available():
model = torch.load(f, map_location="cpu")
else:
model = torch.load(f)
if "flatten_parameters" in dir(model):
# print(dir(model))
model.flatten_parameters() # TODO: flatten for all sub-modules recursively
if "encoder" in model_file:
self.models["encoder"] = model
elif "decoder" in model_file:
self.models["decoder"] = model
def generate(self, test_set, output_path):
beam_size = 10
max_decoding_length = 30
gamma = 0.8
init_args = Namespace(set_decoder_hidden=self.config_params['set_decoder_hidden'],
concatenate_decoder_inputs=self.config_params['concatenate_decoder_inputs'],
concatenate_decoder_hidden=self.config_params['concatenate_decoder_hidden'],
decoder_input_concat_size=self.config_params['decoder_input_concat_size'],
decoder_hidden_concat_size=self.config_params['decoder_hidden_concat_size'],
advance_decoder_t0=self.config_params['advance_decoder_t0'])
# beam search decoding
generated_utterances_, to_print = generate(
self.models["encoder"], self.models["decoder"],
test_set, self.decoder_vocab,
beam_size=beam_size, max_length=max_decoding_length, args=init_args,
development_mode=False, gamma=gamma
)
def format(output_obj):
prev_utterances = format_prev_utterances(
output_obj["prev_utterances"], self.encoder_vocab)
ground_truth_utterance = format_ground_truth_utterance(
output_obj["ground_truth_utterance"], self.decoder_vocab)
generated_utterance = format_generated_utterance(
output_obj["generated_utterance"])
return {
"prev_utterances": prev_utterances,
"ground_truth_utterance": ground_truth_utterance,
"generated_utterance": generated_utterance,
"json_id": output_obj["json_id"],
"sample_id": output_obj["sample_id"],
"time_stamp": output_obj["time_stamp"]
}
with open(output_path, 'w') as f2:
for dia in generated_utterances_:
generated_one = format(dia)['generated_utterance'][0]
f2.write(dia['time_stamp'] +
' @@@ ' + generated_one + '\n')
# predict_path = output_path.replace('.txt', '_pred.txt')
# ref_path = output_path.replace('.txt', '_ref.txt')
# predict_file = read_iglu_result(predict_path)
# ref_file = read_iglu_result(ref_path)
# reference_corpus, pred_corpus = build_ref_pred_pair(ref_file, predict_file)
# bleu_1_results = compute_bleu(reference_corpus, pred_corpus, max_order=1, smooth=False)
# bleu_2_results = compute_bleu(reference_corpus, pred_corpus, max_order=2, smooth=False)
# bleu_3_results = compute_bleu(reference_corpus, pred_corpus, max_order=3, smooth=False)
# bleu_4_results = compute_bleu(reference_corpus, pred_corpus, max_order=4, smooth=False)
# print(bleu_4_results[0])
def main():
initialize_rngs(2021)
data_path = '/datadrive/uiuc_warmup/'
split = 'test'
model = Model()
test_dataset = create_dataloader(data_path, split)
output_path = os.path.join('.', split+'.txt')
model.generate(test_dataset, output_path)
if __name__ == '__main__':
main() | en | 0.583999 | # from vocab import Vocabulary # 1. a function named creat_dataloader # 2. a class Model(): # self.init(): this function will create a model # self.generate(data=DataIGLU, output='output.txt'): this function will generte the predctions for test set ################################################# #### You can skip this section ############# ################################################# Maps from a 2d tensor to a list of tokens and removes eos symbol ######################################################### ######################################################### # !!!! this is the first function you have to implement; it will return your own dataloader # !!!! the testing set will have the same structure and format as the validation and training data #### # with open(config_params["encoder_vocab_path"], 'rb') as f: # encoder_vocab = pickle.load(f) # with open(config_params["decoder_vocab_path"], 'rb') as f: # decoder_vocab = pickle.load(f) # This is a class wrapper you have to implement; # We will create a test entity by running 'model=Model()' # We will obtain the predictions by running 'model.generate(test_set, output_path)' # !!!! this function does not accept any extra input # you should explicitly feed your model path in your submission here # self.model_path="/datadrive/model/utterances_and_block_region_counters/20210718/utterances_and_block_region_counters_trainer-1626589670355/1626589670356/" # this dict is used to store all your hyper-parameters, you can load them from a file in your submission # with open(self.config_params["encoder_vocab_path"], 'rb') as f: # self.encoder_vocab = pickle.load(f) # with open(self.config_params["decoder_vocab_path"], 'rb') as f: # self.decoder_vocab = pickle.load(f) # you can load the trained models or vocabs in this function # you need to make sure files to be loaded do exist in the path # print(dir(model)) # TODO: flatten for all sub-modules recursively # beam search decoding # predict_path = output_path.replace('.txt', '_pred.txt') # ref_path = output_path.replace('.txt', '_ref.txt') # predict_file = read_iglu_result(predict_path) # ref_file = read_iglu_result(ref_path) # reference_corpus, pred_corpus = build_ref_pred_pair(ref_file, predict_file) # bleu_1_results = compute_bleu(reference_corpus, pred_corpus, max_order=1, smooth=False) # bleu_2_results = compute_bleu(reference_corpus, pred_corpus, max_order=2, smooth=False) # bleu_3_results = compute_bleu(reference_corpus, pred_corpus, max_order=3, smooth=False) # bleu_4_results = compute_bleu(reference_corpus, pred_corpus, max_order=4, smooth=False) # print(bleu_4_results[0]) | 2.663062 | 3 |
app/__init__.py | Winnyk15/BLOG-APP | 0 | 6631865 | <reponame>Winnyk15/BLOG-APP
from flask import Flask
from flask_bootstrap import Bootstrap
from flask_login import LoginManager
from flask_mail import Mail
from flask_sqlalchemy import SQLAlchemy
from config import Config
app = Flask(__name__)
db = SQLAlchemy(app)
login_manager = LoginManager()
login_manager.init_app(app)
bootstrap = Bootstrap(app)
mail = Mail(app)
login_manager.login_view = 'auth.login'
login_manager.session_protection = 'strong'
login_manager.login_message_category = 'info'
def create_app():
app = Flask(__name__)
app.config.from_object(Config)
from .auth import auth as auth_blueprint
from .main import main as main_blueprint
app.register_blueprint(auth_blueprint)
app.register_blueprint(main_blueprint)
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db.init_app(app)
return app | from flask import Flask
from flask_bootstrap import Bootstrap
from flask_login import LoginManager
from flask_mail import Mail
from flask_sqlalchemy import SQLAlchemy
from config import Config
app = Flask(__name__)
db = SQLAlchemy(app)
login_manager = LoginManager()
login_manager.init_app(app)
bootstrap = Bootstrap(app)
mail = Mail(app)
login_manager.login_view = 'auth.login'
login_manager.session_protection = 'strong'
login_manager.login_message_category = 'info'
def create_app():
app = Flask(__name__)
app.config.from_object(Config)
from .auth import auth as auth_blueprint
from .main import main as main_blueprint
app.register_blueprint(auth_blueprint)
app.register_blueprint(main_blueprint)
app.config['SQLALCHEMY_TRACK_MODIFICATIONS'] = False
db.init_app(app)
return app | none | 1 | 2.225908 | 2 |
|
autosynth/synth.py | parthea/synthtool | 0 | 6631866 | #!/usr/bin/env python3
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Synthesizes a single library and sends a PR."""
import argparse
import os
import pathlib
import sys
import tempfile
import typing
import autosynth
import autosynth.flags
import synthtool.sources.git as synthtool_git
from autosynth import executor, git, git_source, github
from autosynth.change_pusher import (
AbstractChangePusher,
ChangePusher,
SquashingChangePusher,
)
from autosynth.log import logger
from autosynth.synthesizer import AbstractSynthesizer, Synthesizer
from autosynth.synth_toolbox import (
SynthesizeLoopToolbox,
load_metadata,
has_changes,
)
EXIT_CODE_SKIPPED = 28
def synthesize_loop(
toolbox: SynthesizeLoopToolbox,
multiple_prs: bool,
change_pusher: AbstractChangePusher,
synthesizer: AbstractSynthesizer,
) -> int:
"""Loops through all source versions and creates a commit for every version
changed that caused a change in the generated code.
Arguments:
toolbox {SynthesizeLoopToolbox} -- a toolbox
multiple_prs {bool} -- True to create one pull request per source.
change_pusher {AbstractChangePusher} -- Used to push changes to github.
synthesizer {AbstractSynthesizer} -- Invokes synthesize.
Returns:
int -- Number of commits committed to this repo.
"""
if not toolbox.versions:
return 0 # No versions, nothing to synthesize.
# Synthesize the library with the most recent versions of all sources.
youngest = len(toolbox.versions) - 1
has_changes = toolbox.synthesize_version_in_new_branch(synthesizer, youngest)
if not has_changes:
if (
not toolbox.metadata_contains_generated_files(toolbox.branch)
and toolbox.metadata_contains_generated_files(toolbox.sub_branch(youngest))
and not change_pusher.check_if_pr_already_exists(toolbox.branch)
):
# Special case: the repo owner turned on obsolete file tracking.
# Generate a one-time PR containing only metadata changes.
executor.check_call(["git", "checkout", toolbox.branch])
executor.check_call(
["git", "merge", "--squash", toolbox.sub_branch(youngest)]
)
pr_title = "chore: start tracking obsolete files"
executor.check_call(["git", "commit", "-m", pr_title])
pr = change_pusher.push_changes(1, toolbox.branch, pr_title)
pr.add_labels(["context: full"])
return 1
return 0 # No changes, nothing to do.
try:
if multiple_prs:
commit_count = 0
for fork in toolbox.fork():
if change_pusher.check_if_pr_already_exists(fork.branch):
continue
executor.check_call(["git", "checkout", fork.branch])
synthesize_inner_loop(fork, synthesizer)
commit_count += fork.commit_count
if fork.source_name == "self" or fork.count_commits_with_context() > 0:
fork.push_changes(change_pusher)
return commit_count
except Exception as e:
logger.error(e)
# Fallback to the single_pr loop to try to make some progress.
synthesize_loop_single_pr(toolbox, change_pusher, synthesizer)
# But still report the failure.
raise
return synthesize_loop_single_pr(toolbox, change_pusher, synthesizer)
def synthesize_loop_single_pr(
toolbox: SynthesizeLoopToolbox,
change_pusher: AbstractChangePusher,
synthesizer: AbstractSynthesizer,
) -> int:
"""Loops through all source versions and creates a commit for every version
changed that caused a change in the generated code.
This function creates a single pull request for all sources.
Arguments:
toolbox {SynthesizeLoopToolbox} -- a toolbox
change_pusher {AbstractChangePusher} -- Used to push changes to github.
synthesizer {AbstractSynthesizer} -- Invokes synthesize.
Returns:
int -- Number of commits committed to this repo.
"""
if change_pusher.check_if_pr_already_exists(toolbox.branch):
return 0
synthesize_inner_loop(toolbox, synthesizer)
toolbox.push_changes(change_pusher)
return toolbox.commit_count
def synthesize_inner_loop(
toolbox: SynthesizeLoopToolbox, synthesizer: AbstractSynthesizer,
):
# Synthesize with the most recent version of all the sources.
if not toolbox.synthesize_version_in_new_branch(
synthesizer, len(toolbox.versions) - 1
):
return # No differences, nothing more to do.
# Synthesize with the oldest version of all the sources.
if 1 == len(toolbox.versions) or toolbox.synthesize_version_in_new_branch(
synthesizer, 0
):
comment = """changes without context
autosynth cannot find the source of changes triggered by earlier changes in this
repository, or by version upgrades to tools such as linters."""
toolbox.patch_merge_version(0, comment)
# Binary search the range.
synthesize_range(toolbox, synthesizer)
def synthesize_range(
toolbox: SynthesizeLoopToolbox, synthesizer: AbstractSynthesizer
) -> None:
# Loop through all the individual source versions to see which ones triggered a change.
# version_ranges is a stack. The code below maintains the invariant
# that it's sorted with the oldest ranges being popped first.
# That way, we apply changes to the current branch in order from oldest
# to youngest.
version_ranges: typing.List[typing.Tuple[int, int]] = [
(0, len(toolbox.versions) - 1)
]
while version_ranges:
old, young = version_ranges.pop()
if young == old + 1:
# The base case: Found a version that triggered a change.
toolbox.patch_merge_version(young)
continue
if not toolbox.git_branches_differ(
toolbox.sub_branch(old), toolbox.sub_branch(young)
):
continue # No difference; no need to search range.
# Select the middle version to synthesize.
middle = (young - old) // 2 + old
toolbox.synthesize_version_in_new_branch(synthesizer, middle)
version_ranges.append((middle, young))
version_ranges.append((old, middle))
def main() -> int:
"""
Returns:
int -- Number of commits committed to the repo.
"""
with tempfile.TemporaryDirectory() as temp_dir:
return _inner_main(temp_dir)
def _inner_main(temp_dir: str) -> int:
"""
Returns:
int -- Number of commits committed to the repo.
"""
parser = argparse.ArgumentParser()
parser.add_argument("--github-user", default=os.environ.get("GITHUB_USER"))
parser.add_argument("--github-email", default=os.environ.get("GITHUB_EMAIL"))
parser.add_argument("--github-token", default=os.environ.get("GITHUB_TOKEN"))
parser.add_argument(
"--repository", default=os.environ.get("REPOSITORY"), required=True
)
parser.add_argument(
"--synth-path",
default=os.environ.get("SYNTH_PATH"),
help="If specified, changes the directory from which synthtool is invoked.",
)
parser.add_argument(
"--synth-file-name",
default=os.environ.get("SYNTH_FILE_NAME"),
help="If specified, override the synth file name and may be a path to a file. Defaults to 'synth.py'.",
)
parser.add_argument("--metadata-path", default=os.environ.get("METADATA_PATH"))
parser.add_argument("--base-log-dir", default="")
parser.add_argument(
"--deprecated-execution",
default=False,
action="store_true",
help="If specified, execute synth.py directly instead of synthtool. This behavior is deprecated.",
)
parser.add_argument(
"--branch-suffix", default=os.environ.get("BRANCH_SUFFIX", None)
)
parser.add_argument("--pr-title", default="")
parser.add_argument("extra_args", nargs=argparse.REMAINDER)
args = parser.parse_args()
gh = github.GitHub(args.github_token)
branch = "-".join(filter(None, ["autosynth", args.branch_suffix]))
pr_title = args.pr_title or (
f"[CHANGE ME] Re-generated {args.synth_path or ''} to pick up changes in "
f"the API or client library generator."
)
change_pusher: AbstractChangePusher = ChangePusher(args.repository, gh, branch)
synth_file_name = args.synth_file_name or "synth.py"
# capture logs for later
# The logs directory path will be rendered in Sponge and Fusion as the test name,
# so drop all the unimportant parts.
base_log_dir = (
pathlib.Path(args.base_log_dir)
if args.base_log_dir
else pathlib.Path(os.getcwd()) / "logs"
)
base_synth_log_path = (
base_log_dir / pathlib.Path(args.synth_path or args.repository).name
)
logger.info(f"logs will be written to: {base_synth_log_path}")
working_repo_path = synthtool_git.clone(f"https://github.com/{args.repository}.git")
try:
os.chdir(working_repo_path)
git.configure_git(args.github_user, args.github_email)
git.setup_branch(branch)
if args.synth_path:
os.chdir(args.synth_path)
metadata_path = os.path.join(args.metadata_path or "", "synth.metadata")
flags = autosynth.flags.parse_flags(synth_file_name)
# Override flags specified in synth.py with flags specified in environment vars.
for key in flags.keys():
env_value = os.environ.get(key, "")
if env_value:
flags[key] = False if env_value.lower() == "false" else env_value
metadata = load_metadata(metadata_path)
multiple_commits = flags[autosynth.flags.AUTOSYNTH_MULTIPLE_COMMITS]
multiple_prs = flags[autosynth.flags.AUTOSYNTH_MULTIPLE_PRS]
if (not multiple_commits and not multiple_prs) or not metadata:
if change_pusher.check_if_pr_already_exists(branch):
return 0
synth_log_path = base_synth_log_path
for arg in args.extra_args:
synth_log_path = synth_log_path / arg
synth_log = Synthesizer(
metadata_path,
args.extra_args,
deprecated_execution=args.deprecated_execution,
synth_py_path=synth_file_name,
).synthesize(synth_log_path / "sponge_log.log")
if not has_changes():
logger.info("No changes. :)")
sys.exit(EXIT_CODE_SKIPPED)
git.commit_all_changes(pr_title)
change_pusher.push_changes(1, branch, pr_title, synth_log)
return 1
else:
if not multiple_prs and change_pusher.check_if_pr_already_exists(branch):
return 0 # There's already an existing PR
# Enumerate the versions to loop over.
sources = metadata.get("sources", [])
source_versions = [
git_source.enumerate_versions_for_working_repo(metadata_path, sources)
]
# Add supported source version types below:
source_versions.extend(
git_source.enumerate_versions(sources, pathlib.Path(temp_dir))
)
# Prepare to call synthesize loop.
synthesizer = Synthesizer(
metadata_path,
args.extra_args,
deprecated_execution=args.deprecated_execution,
synth_py_path=synth_file_name,
)
x = SynthesizeLoopToolbox(
source_versions,
branch,
temp_dir,
metadata_path,
args.synth_path,
base_synth_log_path,
)
if not multiple_commits:
change_pusher = SquashingChangePusher(change_pusher)
# Call the loop.
commit_count = synthesize_loop(x, multiple_prs, change_pusher, synthesizer)
if commit_count == 0:
logger.info("No changes. :)")
sys.exit(EXIT_CODE_SKIPPED)
return commit_count
finally:
if args.synth_path:
# We're generating code in a mono repo. The state left behind will
# probably be useful for generating the next API.
pass
else:
# We're generating a single API in a single repo, and using a different
# repo to generate the next API. So the next synth will not be able to
# use any of this state. Clean it up to avoid running out of disk space.
executor.run(["git", "clean", "-fdx"], cwd=working_repo_path)
if __name__ == "__main__":
main()
| #!/usr/bin/env python3
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Synthesizes a single library and sends a PR."""
import argparse
import os
import pathlib
import sys
import tempfile
import typing
import autosynth
import autosynth.flags
import synthtool.sources.git as synthtool_git
from autosynth import executor, git, git_source, github
from autosynth.change_pusher import (
AbstractChangePusher,
ChangePusher,
SquashingChangePusher,
)
from autosynth.log import logger
from autosynth.synthesizer import AbstractSynthesizer, Synthesizer
from autosynth.synth_toolbox import (
SynthesizeLoopToolbox,
load_metadata,
has_changes,
)
EXIT_CODE_SKIPPED = 28
def synthesize_loop(
toolbox: SynthesizeLoopToolbox,
multiple_prs: bool,
change_pusher: AbstractChangePusher,
synthesizer: AbstractSynthesizer,
) -> int:
"""Loops through all source versions and creates a commit for every version
changed that caused a change in the generated code.
Arguments:
toolbox {SynthesizeLoopToolbox} -- a toolbox
multiple_prs {bool} -- True to create one pull request per source.
change_pusher {AbstractChangePusher} -- Used to push changes to github.
synthesizer {AbstractSynthesizer} -- Invokes synthesize.
Returns:
int -- Number of commits committed to this repo.
"""
if not toolbox.versions:
return 0 # No versions, nothing to synthesize.
# Synthesize the library with the most recent versions of all sources.
youngest = len(toolbox.versions) - 1
has_changes = toolbox.synthesize_version_in_new_branch(synthesizer, youngest)
if not has_changes:
if (
not toolbox.metadata_contains_generated_files(toolbox.branch)
and toolbox.metadata_contains_generated_files(toolbox.sub_branch(youngest))
and not change_pusher.check_if_pr_already_exists(toolbox.branch)
):
# Special case: the repo owner turned on obsolete file tracking.
# Generate a one-time PR containing only metadata changes.
executor.check_call(["git", "checkout", toolbox.branch])
executor.check_call(
["git", "merge", "--squash", toolbox.sub_branch(youngest)]
)
pr_title = "chore: start tracking obsolete files"
executor.check_call(["git", "commit", "-m", pr_title])
pr = change_pusher.push_changes(1, toolbox.branch, pr_title)
pr.add_labels(["context: full"])
return 1
return 0 # No changes, nothing to do.
try:
if multiple_prs:
commit_count = 0
for fork in toolbox.fork():
if change_pusher.check_if_pr_already_exists(fork.branch):
continue
executor.check_call(["git", "checkout", fork.branch])
synthesize_inner_loop(fork, synthesizer)
commit_count += fork.commit_count
if fork.source_name == "self" or fork.count_commits_with_context() > 0:
fork.push_changes(change_pusher)
return commit_count
except Exception as e:
logger.error(e)
# Fallback to the single_pr loop to try to make some progress.
synthesize_loop_single_pr(toolbox, change_pusher, synthesizer)
# But still report the failure.
raise
return synthesize_loop_single_pr(toolbox, change_pusher, synthesizer)
def synthesize_loop_single_pr(
toolbox: SynthesizeLoopToolbox,
change_pusher: AbstractChangePusher,
synthesizer: AbstractSynthesizer,
) -> int:
"""Loops through all source versions and creates a commit for every version
changed that caused a change in the generated code.
This function creates a single pull request for all sources.
Arguments:
toolbox {SynthesizeLoopToolbox} -- a toolbox
change_pusher {AbstractChangePusher} -- Used to push changes to github.
synthesizer {AbstractSynthesizer} -- Invokes synthesize.
Returns:
int -- Number of commits committed to this repo.
"""
if change_pusher.check_if_pr_already_exists(toolbox.branch):
return 0
synthesize_inner_loop(toolbox, synthesizer)
toolbox.push_changes(change_pusher)
return toolbox.commit_count
def synthesize_inner_loop(
toolbox: SynthesizeLoopToolbox, synthesizer: AbstractSynthesizer,
):
# Synthesize with the most recent version of all the sources.
if not toolbox.synthesize_version_in_new_branch(
synthesizer, len(toolbox.versions) - 1
):
return # No differences, nothing more to do.
# Synthesize with the oldest version of all the sources.
if 1 == len(toolbox.versions) or toolbox.synthesize_version_in_new_branch(
synthesizer, 0
):
comment = """changes without context
autosynth cannot find the source of changes triggered by earlier changes in this
repository, or by version upgrades to tools such as linters."""
toolbox.patch_merge_version(0, comment)
# Binary search the range.
synthesize_range(toolbox, synthesizer)
def synthesize_range(
toolbox: SynthesizeLoopToolbox, synthesizer: AbstractSynthesizer
) -> None:
# Loop through all the individual source versions to see which ones triggered a change.
# version_ranges is a stack. The code below maintains the invariant
# that it's sorted with the oldest ranges being popped first.
# That way, we apply changes to the current branch in order from oldest
# to youngest.
version_ranges: typing.List[typing.Tuple[int, int]] = [
(0, len(toolbox.versions) - 1)
]
while version_ranges:
old, young = version_ranges.pop()
if young == old + 1:
# The base case: Found a version that triggered a change.
toolbox.patch_merge_version(young)
continue
if not toolbox.git_branches_differ(
toolbox.sub_branch(old), toolbox.sub_branch(young)
):
continue # No difference; no need to search range.
# Select the middle version to synthesize.
middle = (young - old) // 2 + old
toolbox.synthesize_version_in_new_branch(synthesizer, middle)
version_ranges.append((middle, young))
version_ranges.append((old, middle))
def main() -> int:
"""
Returns:
int -- Number of commits committed to the repo.
"""
with tempfile.TemporaryDirectory() as temp_dir:
return _inner_main(temp_dir)
def _inner_main(temp_dir: str) -> int:
"""
Returns:
int -- Number of commits committed to the repo.
"""
parser = argparse.ArgumentParser()
parser.add_argument("--github-user", default=os.environ.get("GITHUB_USER"))
parser.add_argument("--github-email", default=os.environ.get("GITHUB_EMAIL"))
parser.add_argument("--github-token", default=os.environ.get("GITHUB_TOKEN"))
parser.add_argument(
"--repository", default=os.environ.get("REPOSITORY"), required=True
)
parser.add_argument(
"--synth-path",
default=os.environ.get("SYNTH_PATH"),
help="If specified, changes the directory from which synthtool is invoked.",
)
parser.add_argument(
"--synth-file-name",
default=os.environ.get("SYNTH_FILE_NAME"),
help="If specified, override the synth file name and may be a path to a file. Defaults to 'synth.py'.",
)
parser.add_argument("--metadata-path", default=os.environ.get("METADATA_PATH"))
parser.add_argument("--base-log-dir", default="")
parser.add_argument(
"--deprecated-execution",
default=False,
action="store_true",
help="If specified, execute synth.py directly instead of synthtool. This behavior is deprecated.",
)
parser.add_argument(
"--branch-suffix", default=os.environ.get("BRANCH_SUFFIX", None)
)
parser.add_argument("--pr-title", default="")
parser.add_argument("extra_args", nargs=argparse.REMAINDER)
args = parser.parse_args()
gh = github.GitHub(args.github_token)
branch = "-".join(filter(None, ["autosynth", args.branch_suffix]))
pr_title = args.pr_title or (
f"[CHANGE ME] Re-generated {args.synth_path or ''} to pick up changes in "
f"the API or client library generator."
)
change_pusher: AbstractChangePusher = ChangePusher(args.repository, gh, branch)
synth_file_name = args.synth_file_name or "synth.py"
# capture logs for later
# The logs directory path will be rendered in Sponge and Fusion as the test name,
# so drop all the unimportant parts.
base_log_dir = (
pathlib.Path(args.base_log_dir)
if args.base_log_dir
else pathlib.Path(os.getcwd()) / "logs"
)
base_synth_log_path = (
base_log_dir / pathlib.Path(args.synth_path or args.repository).name
)
logger.info(f"logs will be written to: {base_synth_log_path}")
working_repo_path = synthtool_git.clone(f"https://github.com/{args.repository}.git")
try:
os.chdir(working_repo_path)
git.configure_git(args.github_user, args.github_email)
git.setup_branch(branch)
if args.synth_path:
os.chdir(args.synth_path)
metadata_path = os.path.join(args.metadata_path or "", "synth.metadata")
flags = autosynth.flags.parse_flags(synth_file_name)
# Override flags specified in synth.py with flags specified in environment vars.
for key in flags.keys():
env_value = os.environ.get(key, "")
if env_value:
flags[key] = False if env_value.lower() == "false" else env_value
metadata = load_metadata(metadata_path)
multiple_commits = flags[autosynth.flags.AUTOSYNTH_MULTIPLE_COMMITS]
multiple_prs = flags[autosynth.flags.AUTOSYNTH_MULTIPLE_PRS]
if (not multiple_commits and not multiple_prs) or not metadata:
if change_pusher.check_if_pr_already_exists(branch):
return 0
synth_log_path = base_synth_log_path
for arg in args.extra_args:
synth_log_path = synth_log_path / arg
synth_log = Synthesizer(
metadata_path,
args.extra_args,
deprecated_execution=args.deprecated_execution,
synth_py_path=synth_file_name,
).synthesize(synth_log_path / "sponge_log.log")
if not has_changes():
logger.info("No changes. :)")
sys.exit(EXIT_CODE_SKIPPED)
git.commit_all_changes(pr_title)
change_pusher.push_changes(1, branch, pr_title, synth_log)
return 1
else:
if not multiple_prs and change_pusher.check_if_pr_already_exists(branch):
return 0 # There's already an existing PR
# Enumerate the versions to loop over.
sources = metadata.get("sources", [])
source_versions = [
git_source.enumerate_versions_for_working_repo(metadata_path, sources)
]
# Add supported source version types below:
source_versions.extend(
git_source.enumerate_versions(sources, pathlib.Path(temp_dir))
)
# Prepare to call synthesize loop.
synthesizer = Synthesizer(
metadata_path,
args.extra_args,
deprecated_execution=args.deprecated_execution,
synth_py_path=synth_file_name,
)
x = SynthesizeLoopToolbox(
source_versions,
branch,
temp_dir,
metadata_path,
args.synth_path,
base_synth_log_path,
)
if not multiple_commits:
change_pusher = SquashingChangePusher(change_pusher)
# Call the loop.
commit_count = synthesize_loop(x, multiple_prs, change_pusher, synthesizer)
if commit_count == 0:
logger.info("No changes. :)")
sys.exit(EXIT_CODE_SKIPPED)
return commit_count
finally:
if args.synth_path:
# We're generating code in a mono repo. The state left behind will
# probably be useful for generating the next API.
pass
else:
# We're generating a single API in a single repo, and using a different
# repo to generate the next API. So the next synth will not be able to
# use any of this state. Clean it up to avoid running out of disk space.
executor.run(["git", "clean", "-fdx"], cwd=working_repo_path)
if __name__ == "__main__":
main()
| en | 0.835779 | #!/usr/bin/env python3 # Copyright 2020 Google LLC # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. Synthesizes a single library and sends a PR. Loops through all source versions and creates a commit for every version changed that caused a change in the generated code. Arguments: toolbox {SynthesizeLoopToolbox} -- a toolbox multiple_prs {bool} -- True to create one pull request per source. change_pusher {AbstractChangePusher} -- Used to push changes to github. synthesizer {AbstractSynthesizer} -- Invokes synthesize. Returns: int -- Number of commits committed to this repo. # No versions, nothing to synthesize. # Synthesize the library with the most recent versions of all sources. # Special case: the repo owner turned on obsolete file tracking. # Generate a one-time PR containing only metadata changes. # No changes, nothing to do. # Fallback to the single_pr loop to try to make some progress. # But still report the failure. Loops through all source versions and creates a commit for every version changed that caused a change in the generated code. This function creates a single pull request for all sources. Arguments: toolbox {SynthesizeLoopToolbox} -- a toolbox change_pusher {AbstractChangePusher} -- Used to push changes to github. synthesizer {AbstractSynthesizer} -- Invokes synthesize. Returns: int -- Number of commits committed to this repo. # Synthesize with the most recent version of all the sources. # No differences, nothing more to do. # Synthesize with the oldest version of all the sources. changes without context autosynth cannot find the source of changes triggered by earlier changes in this repository, or by version upgrades to tools such as linters. # Binary search the range. # Loop through all the individual source versions to see which ones triggered a change. # version_ranges is a stack. The code below maintains the invariant # that it's sorted with the oldest ranges being popped first. # That way, we apply changes to the current branch in order from oldest # to youngest. # The base case: Found a version that triggered a change. # No difference; no need to search range. # Select the middle version to synthesize. Returns: int -- Number of commits committed to the repo. Returns: int -- Number of commits committed to the repo. # capture logs for later # The logs directory path will be rendered in Sponge and Fusion as the test name, # so drop all the unimportant parts. # Override flags specified in synth.py with flags specified in environment vars. # There's already an existing PR # Enumerate the versions to loop over. # Add supported source version types below: # Prepare to call synthesize loop. # Call the loop. # We're generating code in a mono repo. The state left behind will # probably be useful for generating the next API. # We're generating a single API in a single repo, and using a different # repo to generate the next API. So the next synth will not be able to # use any of this state. Clean it up to avoid running out of disk space. | 1.899268 | 2 |
methylize/__init__.py | LifeEGX/methylize | 2 | 6631867 | import logging
from .diff_meth_pos import diff_meth_pos, volcano_plot, manhattan_plot
from .diff_meth_regions import diff_meth_regions
from .genome_browser import fetch_genes
from .helpers import to_BED
from .version import __version__
from . import cpv
logging.basicConfig(level=logging.INFO)
__all__ = [
'diff_meth_pos',
'volcano_plot',
'manhattan_plot',
'diff_meth_regions',
'fetch_genes',
'to_BED',
'cpv',
]
| import logging
from .diff_meth_pos import diff_meth_pos, volcano_plot, manhattan_plot
from .diff_meth_regions import diff_meth_regions
from .genome_browser import fetch_genes
from .helpers import to_BED
from .version import __version__
from . import cpv
logging.basicConfig(level=logging.INFO)
__all__ = [
'diff_meth_pos',
'volcano_plot',
'manhattan_plot',
'diff_meth_regions',
'fetch_genes',
'to_BED',
'cpv',
]
| none | 1 | 1.368694 | 1 |
|
qdef2d/defects/gen_defect_supercell.py | aztan2/charged-defects-framework | 4 | 6631868 | import os
import errno
import json
import argparse
from pymatgen.io.vasp.inputs import Poscar
from qdef2d.defects.core import Defect
def generate(dir_def_main, initdef_file, q, supercell, vacuum, bulkref=False):
"""
Generate defect supercell.
Parameters
----------
dir_def_main (str): path to main defect directory containing unitcell POSCARs
initdef_file (str): json file with details to initialize defect
q (int): charge
supercell (tuple of ints): supercell size as [n1,n2,n3]
vacuum (int): vacuum spacing
[optional] bulkref (bool): generate only bulk reference supercell? Default=False.
"""
## the directory from which this function was called
## should already be the appropriate charge/cell/vacuum subdirectory
subdir_def = os.getcwd()
## read in corresponding unit cell POSCAR
pos_file = os.path.join(dir_def_main,"POSCAR_vac_%d"%vacuum)
if not os.path.exists(pos_file):
raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), pos_file)
poscar = Poscar.from_file(pos_file,check_for_POTCAR=False,read_velocities=False)
## make undefected supercell
structure = poscar.structure.copy()
structure.make_supercell(supercell)
structure_bulk = structure.copy()
if bulkref:
## write bulkref POSCAR
Poscar.write_file(Poscar(structure_bulk),
os.path.join(subdir_def,"POSCAR"))
else:
## read in defect details from initdefect json file
id_file = os.path.join(dir_def_main,initdef_file)
if not os.path.exists(id_file):
raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), id_file)
with open(id_file, 'r') as file:
initdef = json.loads(file.read())
## initialize defect object
defect = Defect(structure_bulk,structure.copy(),supercell,vacuum,q)
## generate defect supercell
defect.generate_supercell(initdef)
## write defect POSCAR
Poscar.write_file(Poscar(defect.structure.get_sorted_structure()),
os.path.join(subdir_def,"POSCAR"))
## write defectproperty.json file (summary of defect created in this supercell)
with open(os.path.join(subdir_def,"defectproperty.json"), 'w') as file:
file.write(json.dumps(defect.as_dict(),indent=4))
if __name__ == '__main__':
## this script can also be run directly from the command line
parser = argparse.ArgumentParser(description='Generate defect supercell.')
parser.add_argument('dir_def_main',help='path to main defect directory containing unitcell POSCARs')
parser.add_argument('initdef_file',help='json file with details to initialize defect')
parser.add_argument('q',type=int,help='charge')
parser.add_argument('supercell',help='supercell size, as n1xn2xn3')
parser.add_argument('vacuum',type=int,help='vacuum spacing')
parser.add_argument('--bulkref',help='generate only bulk reference supercell?',
default=False,action='store_true')
## parse the given arguments
args = parser.parse_args()
generate(args.dir_def_main, args.initdef_file,
args.q, [int(n) for n in args.supercell.split('x')], args.vacuum,
args.bulkref)
| import os
import errno
import json
import argparse
from pymatgen.io.vasp.inputs import Poscar
from qdef2d.defects.core import Defect
def generate(dir_def_main, initdef_file, q, supercell, vacuum, bulkref=False):
"""
Generate defect supercell.
Parameters
----------
dir_def_main (str): path to main defect directory containing unitcell POSCARs
initdef_file (str): json file with details to initialize defect
q (int): charge
supercell (tuple of ints): supercell size as [n1,n2,n3]
vacuum (int): vacuum spacing
[optional] bulkref (bool): generate only bulk reference supercell? Default=False.
"""
## the directory from which this function was called
## should already be the appropriate charge/cell/vacuum subdirectory
subdir_def = os.getcwd()
## read in corresponding unit cell POSCAR
pos_file = os.path.join(dir_def_main,"POSCAR_vac_%d"%vacuum)
if not os.path.exists(pos_file):
raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), pos_file)
poscar = Poscar.from_file(pos_file,check_for_POTCAR=False,read_velocities=False)
## make undefected supercell
structure = poscar.structure.copy()
structure.make_supercell(supercell)
structure_bulk = structure.copy()
if bulkref:
## write bulkref POSCAR
Poscar.write_file(Poscar(structure_bulk),
os.path.join(subdir_def,"POSCAR"))
else:
## read in defect details from initdefect json file
id_file = os.path.join(dir_def_main,initdef_file)
if not os.path.exists(id_file):
raise FileNotFoundError(errno.ENOENT, os.strerror(errno.ENOENT), id_file)
with open(id_file, 'r') as file:
initdef = json.loads(file.read())
## initialize defect object
defect = Defect(structure_bulk,structure.copy(),supercell,vacuum,q)
## generate defect supercell
defect.generate_supercell(initdef)
## write defect POSCAR
Poscar.write_file(Poscar(defect.structure.get_sorted_structure()),
os.path.join(subdir_def,"POSCAR"))
## write defectproperty.json file (summary of defect created in this supercell)
with open(os.path.join(subdir_def,"defectproperty.json"), 'w') as file:
file.write(json.dumps(defect.as_dict(),indent=4))
if __name__ == '__main__':
## this script can also be run directly from the command line
parser = argparse.ArgumentParser(description='Generate defect supercell.')
parser.add_argument('dir_def_main',help='path to main defect directory containing unitcell POSCARs')
parser.add_argument('initdef_file',help='json file with details to initialize defect')
parser.add_argument('q',type=int,help='charge')
parser.add_argument('supercell',help='supercell size, as n1xn2xn3')
parser.add_argument('vacuum',type=int,help='vacuum spacing')
parser.add_argument('--bulkref',help='generate only bulk reference supercell?',
default=False,action='store_true')
## parse the given arguments
args = parser.parse_args()
generate(args.dir_def_main, args.initdef_file,
args.q, [int(n) for n in args.supercell.split('x')], args.vacuum,
args.bulkref)
| en | 0.785261 | Generate defect supercell. Parameters ---------- dir_def_main (str): path to main defect directory containing unitcell POSCARs initdef_file (str): json file with details to initialize defect q (int): charge supercell (tuple of ints): supercell size as [n1,n2,n3] vacuum (int): vacuum spacing [optional] bulkref (bool): generate only bulk reference supercell? Default=False. ## the directory from which this function was called ## should already be the appropriate charge/cell/vacuum subdirectory ## read in corresponding unit cell POSCAR ## make undefected supercell ## write bulkref POSCAR ## read in defect details from initdefect json file ## initialize defect object ## generate defect supercell ## write defect POSCAR ## write defectproperty.json file (summary of defect created in this supercell) ## this script can also be run directly from the command line ## parse the given arguments | 2.695788 | 3 |
frappe/desk/utils.py | oryxsolutions/frappe | 0 | 6631869 | <reponame>oryxsolutions/frappe
# Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
import frappe
def validate_route_conflict(doctype, name):
"""
Raises exception if name clashes with routes from other documents for /app routing
"""
all_names = []
for _doctype in ["Page", "Workspace", "DocType"]:
try:
all_names.extend(
[
slug(d) for d in frappe.get_all(_doctype, pluck="name") if (doctype != _doctype and d != name)
]
)
except frappe.db.TableMissingError:
pass
if slug(name) in all_names:
frappe.msgprint(frappe._("Name already taken, please set a new name"))
raise frappe.NameError
def slug(name):
return name.lower().replace(" ", "-")
| # Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors
# License: MIT. See LICENSE
import frappe
def validate_route_conflict(doctype, name):
"""
Raises exception if name clashes with routes from other documents for /app routing
"""
all_names = []
for _doctype in ["Page", "Workspace", "DocType"]:
try:
all_names.extend(
[
slug(d) for d in frappe.get_all(_doctype, pluck="name") if (doctype != _doctype and d != name)
]
)
except frappe.db.TableMissingError:
pass
if slug(name) in all_names:
frappe.msgprint(frappe._("Name already taken, please set a new name"))
raise frappe.NameError
def slug(name):
return name.lower().replace(" ", "-") | en | 0.731374 | # Copyright (c) 2020, Frappe Technologies Pvt. Ltd. and Contributors # License: MIT. See LICENSE Raises exception if name clashes with routes from other documents for /app routing | 2.232339 | 2 |
junior/process/multi_process_rw.py | Firekiss/python_learn | 0 | 6631870 | # 使用多线程进行内容的读写
from multiprocessing import Process, Queue
import time
class ReadProcess(Process):
def __init__(self, q, *args, **kwargs):
super().__init__(*args, **kwargs)
self.q = q
def run(self):
l = [
'锄禾日当午',
'汗滴禾下土',
'谁知盘中餐',
'粒粒皆辛苦'
]
for line in l:
self.q.put(line)
print('read process put line in queue: {}'.format(line))
time.sleep(2)
class WriteProcess(Process):
def __init__(self, q, *args, **kwargs):
super().__init__(*args, **kwargs)
self.q = q
def run(self):
while True:
line = self.q.get()
print('write process get line in queue: {} '.format(line))
if __name__ == '__main__':
q = Queue(10)
r = ReadProcess(q)
w = WriteProcess(q)
r.start()
w.start() | # 使用多线程进行内容的读写
from multiprocessing import Process, Queue
import time
class ReadProcess(Process):
def __init__(self, q, *args, **kwargs):
super().__init__(*args, **kwargs)
self.q = q
def run(self):
l = [
'锄禾日当午',
'汗滴禾下土',
'谁知盘中餐',
'粒粒皆辛苦'
]
for line in l:
self.q.put(line)
print('read process put line in queue: {}'.format(line))
time.sleep(2)
class WriteProcess(Process):
def __init__(self, q, *args, **kwargs):
super().__init__(*args, **kwargs)
self.q = q
def run(self):
while True:
line = self.q.get()
print('write process get line in queue: {} '.format(line))
if __name__ == '__main__':
q = Queue(10)
r = ReadProcess(q)
w = WriteProcess(q)
r.start()
w.start() | zh | 0.982094 | # 使用多线程进行内容的读写 | 3.805652 | 4 |
scrapli/response.py | rbraddev/scrapli | 1 | 6631871 | """scrapli.response"""
from datetime import datetime
from io import TextIOWrapper
from typing import Any, Dict, List, Optional, Union
from scrapli.helper import _textfsm_get_template, genie_parse, textfsm_parse
class Response:
def __init__(
self,
host: str,
channel_input: str,
textfsm_platform: str = "",
genie_platform: str = "",
expectation: Optional[str] = None,
channel_response: Optional[str] = None,
finale: Optional[str] = None,
failed_when_contains: Optional[Union[str, List[str]]] = None,
):
"""
Scrapli Response
Store channel_input, resulting output, and start/end/elapsed time information. Attempt to
determine if command was successful or not and reflect that in a failed attribute.
Args:
host: host that was operated on
channel_input: input that got sent down the channel
textfsm_platform: ntc-templates friendly platform type
genie_platform: cisco pyats/genie friendly platform type
expectation: used for send_inputs_interact -- string to expect back from the channel
after initial input
channel_response: used for send_inputs_interact -- string to use to respond to expected
prompt
finale: string of prompt to look for to know when "done" with interaction
failed_when_contains: list of strings that, if present in final output, represent a
failed command/interaction
Returns:
N/A # noqa: DAR202
Raises:
N/A # noqa
"""
self.host = host
self.start_time = datetime.now()
self.finish_time: Optional[datetime] = None
self.elapsed_time: Optional[float] = None
self.channel_input = channel_input
self.textfsm_platform = textfsm_platform
self.genie_platform = genie_platform
self.expectation = expectation
self.channel_response = channel_response
self.finale = finale
self.raw_result: str = ""
self.result: str = ""
if isinstance(failed_when_contains, str):
failed_when_contains = [failed_when_contains]
self.failed_when_contains = failed_when_contains
self.failed = True
def __bool__(self) -> bool:
"""
Magic bool method based on channel_input being failed or not
Args:
N/A
Returns:
bool: True/False if channel_input failed
Raises:
N/A
"""
return self.failed
def __repr__(self) -> str:
"""
Magic repr method for SSH2NetResponse class
Args:
N/A
Returns:
str: repr for class object
Raises:
N/A
"""
return f"Scrape <Success: {str(not self.failed)}>"
def __str__(self) -> str:
"""
Magic str method for SSH2NetResponse class
Args:
N/A
Returns:
str: str for class object
Raises:
N/A
"""
return f"Scrape <Success: {str(not self.failed)}>"
def record_response(self, result: str) -> None:
"""
Record channel_input results and elapsed time of channel input/reading output
Args:
result: string result of channel_input
Returns:
N/A # noqa: DAR202
Raises:
N/A
"""
self.finish_time = datetime.now()
self.elapsed_time = (self.finish_time - self.start_time).total_seconds()
self.result = result
if not self.failed_when_contains:
self.failed = False
elif not any(err in result for err in self.failed_when_contains):
self.failed = False
def textfsm_parse_output(self) -> Union[Dict[str, Any], List[Any]]:
"""
Parse results with textfsm, always return structured data
Returns an empty list if parsing fails!
Args:
N/A
Returns:
structured_result: empty list or parsed data from textfsm
Raises:
N/A
"""
template = _textfsm_get_template(self.textfsm_platform, self.channel_input)
if isinstance(template, TextIOWrapper):
structured_result = textfsm_parse(template, self.result) or []
else:
structured_result = []
return structured_result
def genie_parse_output(self) -> Union[Dict[str, Any], List[Any]]:
"""
Parse results with genie, always return structured data
Returns an empty list if parsing fails!
Args:
N/A
Returns:
structured_result: empty list or parsed data from genie
Raises:
N/A
"""
structured_result = genie_parse(self.genie_platform, self.channel_input, self.result)
return structured_result
| """scrapli.response"""
from datetime import datetime
from io import TextIOWrapper
from typing import Any, Dict, List, Optional, Union
from scrapli.helper import _textfsm_get_template, genie_parse, textfsm_parse
class Response:
def __init__(
self,
host: str,
channel_input: str,
textfsm_platform: str = "",
genie_platform: str = "",
expectation: Optional[str] = None,
channel_response: Optional[str] = None,
finale: Optional[str] = None,
failed_when_contains: Optional[Union[str, List[str]]] = None,
):
"""
Scrapli Response
Store channel_input, resulting output, and start/end/elapsed time information. Attempt to
determine if command was successful or not and reflect that in a failed attribute.
Args:
host: host that was operated on
channel_input: input that got sent down the channel
textfsm_platform: ntc-templates friendly platform type
genie_platform: cisco pyats/genie friendly platform type
expectation: used for send_inputs_interact -- string to expect back from the channel
after initial input
channel_response: used for send_inputs_interact -- string to use to respond to expected
prompt
finale: string of prompt to look for to know when "done" with interaction
failed_when_contains: list of strings that, if present in final output, represent a
failed command/interaction
Returns:
N/A # noqa: DAR202
Raises:
N/A # noqa
"""
self.host = host
self.start_time = datetime.now()
self.finish_time: Optional[datetime] = None
self.elapsed_time: Optional[float] = None
self.channel_input = channel_input
self.textfsm_platform = textfsm_platform
self.genie_platform = genie_platform
self.expectation = expectation
self.channel_response = channel_response
self.finale = finale
self.raw_result: str = ""
self.result: str = ""
if isinstance(failed_when_contains, str):
failed_when_contains = [failed_when_contains]
self.failed_when_contains = failed_when_contains
self.failed = True
def __bool__(self) -> bool:
"""
Magic bool method based on channel_input being failed or not
Args:
N/A
Returns:
bool: True/False if channel_input failed
Raises:
N/A
"""
return self.failed
def __repr__(self) -> str:
"""
Magic repr method for SSH2NetResponse class
Args:
N/A
Returns:
str: repr for class object
Raises:
N/A
"""
return f"Scrape <Success: {str(not self.failed)}>"
def __str__(self) -> str:
"""
Magic str method for SSH2NetResponse class
Args:
N/A
Returns:
str: str for class object
Raises:
N/A
"""
return f"Scrape <Success: {str(not self.failed)}>"
def record_response(self, result: str) -> None:
"""
Record channel_input results and elapsed time of channel input/reading output
Args:
result: string result of channel_input
Returns:
N/A # noqa: DAR202
Raises:
N/A
"""
self.finish_time = datetime.now()
self.elapsed_time = (self.finish_time - self.start_time).total_seconds()
self.result = result
if not self.failed_when_contains:
self.failed = False
elif not any(err in result for err in self.failed_when_contains):
self.failed = False
def textfsm_parse_output(self) -> Union[Dict[str, Any], List[Any]]:
"""
Parse results with textfsm, always return structured data
Returns an empty list if parsing fails!
Args:
N/A
Returns:
structured_result: empty list or parsed data from textfsm
Raises:
N/A
"""
template = _textfsm_get_template(self.textfsm_platform, self.channel_input)
if isinstance(template, TextIOWrapper):
structured_result = textfsm_parse(template, self.result) or []
else:
structured_result = []
return structured_result
def genie_parse_output(self) -> Union[Dict[str, Any], List[Any]]:
"""
Parse results with genie, always return structured data
Returns an empty list if parsing fails!
Args:
N/A
Returns:
structured_result: empty list or parsed data from genie
Raises:
N/A
"""
structured_result = genie_parse(self.genie_platform, self.channel_input, self.result)
return structured_result
| en | 0.746638 | scrapli.response Scrapli Response Store channel_input, resulting output, and start/end/elapsed time information. Attempt to determine if command was successful or not and reflect that in a failed attribute. Args: host: host that was operated on channel_input: input that got sent down the channel textfsm_platform: ntc-templates friendly platform type genie_platform: cisco pyats/genie friendly platform type expectation: used for send_inputs_interact -- string to expect back from the channel after initial input channel_response: used for send_inputs_interact -- string to use to respond to expected prompt finale: string of prompt to look for to know when "done" with interaction failed_when_contains: list of strings that, if present in final output, represent a failed command/interaction Returns: N/A # noqa: DAR202 Raises: N/A # noqa Magic bool method based on channel_input being failed or not Args: N/A Returns: bool: True/False if channel_input failed Raises: N/A Magic repr method for SSH2NetResponse class Args: N/A Returns: str: repr for class object Raises: N/A Magic str method for SSH2NetResponse class Args: N/A Returns: str: str for class object Raises: N/A Record channel_input results and elapsed time of channel input/reading output Args: result: string result of channel_input Returns: N/A # noqa: DAR202 Raises: N/A Parse results with textfsm, always return structured data Returns an empty list if parsing fails! Args: N/A Returns: structured_result: empty list or parsed data from textfsm Raises: N/A Parse results with genie, always return structured data Returns an empty list if parsing fails! Args: N/A Returns: structured_result: empty list or parsed data from genie Raises: N/A | 2.846283 | 3 |
test/inverted_owg/TestDungeons.py | KrisDavie/ALttPDoorRandomizer | 42 | 6631872 | <gh_stars>10-100
from test.inverted_owg.TestInvertedOWG import TestInvertedOWG
class TestDungeons(TestInvertedOWG):
def testFirstDungeonChests(self):
self.run_location_tests([
["Hyrule Castle - Map Chest", False, []],
["Hyrule Castle - Map Chest", True, ['Beat Agahnim 1']],
["Hyrule Castle - Map Chest", True, ['Moon Pearl', 'Pegasus Boots']],
["Hyrule Castle - Map Chest", True, ['Magic Mirror', 'Pegasus Boots']],
["Sanctuary", False, []],
["Sanctuary", False, ['Beat Agahnim 1']],
["Sanctuary", True, ['Magic Mirror', 'Beat Agahnim 1']],
["Sanctuary", True, ['Lamp', 'Beat Agahnim 1', 'Small Key (Escape)']],
["Sanctuary", True, ['Moon Pearl', 'Pegasus Boots']],
["Sanctuary", True, ['Magic Mirror', 'Pegasus Boots']],
["Sewers - Secret Room - Left", False, []],
["Sewers - Secret Room - Left", True, ['Moon Pearl', 'Progressive Glove', 'Pegasus Boots']],
["Sewers - Secret Room - Left", True, ['Moon Pearl', 'Pegasus Boots', 'Lamp', 'Small Key (Escape)']],
["Sewers - Secret Room - Left", True,
['Magic Mirror', 'Pegasus Boots', 'Lamp', 'Small Key (Escape)']],
["Sewers - Secret Room - Left", True, ['Beat Agahnim 1', 'Lamp', 'Small Key (Escape)']],
["Eastern Palace - Compass Chest", False, []],
["Eastern Palace - Compass Chest", True, ['Moon Pearl', 'Pegasus Boots']],
["Eastern Palace - Compass Chest", True, ['Magic Mirror', 'Pegasus Boots']],
["Eastern Palace - Compass Chest", True, ['Beat Agahnim 1']],
["Desert Palace - Map Chest", False, []],
["Desert Palace - Map Chest", True, ['Moon Pearl', 'Pegasus Boots']],
["Desert Palace - Map Chest", True, ['Book of Mudora', 'Magic Mirror', 'Pegasus Boots']],
["Desert Palace - Boss", False, []],
["Desert Palace - Boss", False, [], ['Small Key (Desert Palace)']],
["Desert Palace - Boss", False, [], ['Big Key (Desert Palace)']],
["Desert Palace - Boss", False, [], ['Lamp', 'Fire Rod']],
["Desert Palace - Boss", True, ['Progressive Sword', 'Small Key (Desert Palace)', 'Big Key (Desert Palace)', 'Moon Pearl', 'Pegasus Boots', 'Lamp']],
["Desert Palace - Boss", True, ['Progressive Sword', 'Small Key (Desert Palace)', 'Big Key (Desert Palace)', 'Moon Pearl', 'Pegasus Boots', 'Fire Rod']],
["Tower of Hera - Basement Cage", False, []],
["Tower of Hera - Basement Cage", False, [], ['Moon Pearl']],
["Tower of Hera - Basement Cage", True, ['Pegasus Boots', 'Moon Pearl']],
["Castle Tower - Room 03", False, []],
["Castle Tower - Room 03", False, [], ['Progressive Sword', 'Hammer', 'Progressive Bow', 'Fire Rod', 'Ice Rod', 'Cane of Somaria', 'Cane of Byrna']],
["Castle Tower - Room 03", True, ['Pegasus Boots', 'Progressive Sword']],
["Castle Tower - Room 03", True, ['Pegasus Boots', 'Progressive Bow']],
#todo: Qirn Jump
#["Palace of Darkness - Shooter Room", True, []],
["Palace of Darkness - Shooter Room", True, ['Pegasus Boots']],
["Palace of Darkness - Shooter Room", True, ['Hammer']],
["Palace of Darkness - Shooter Room", True, ['Flippers']],
["Palace of Darkness - Shooter Room", True, ['Pegasus Boots', 'Progressive Glove']],
["Palace of Darkness - Shooter Room", True, ['Pegasus Boots', 'Magic Mirror']],
["Swamp Palace - Entrance", False, []],
["Swamp Palace - Entrance", False, [], ['Magic Mirror']],
["Swamp Palace - Entrance", False, [], ['Flippers']],
["Swamp Palace - Entrance", True, ['Magic Mirror', 'Flippers', 'Pegasus Boots']],
["Swamp Palace - Entrance", True, ['Magic Mirror', 'Flippers', 'Beat Agahnim 1']],
["Skull Woods - Compass Chest", True, []],
["Skull Woods - Big Chest", False, []],
["Skull Woods - Big Chest", False, [], ['Big Key (Skull Woods)']],
["Skull Woods - Big Chest", True, ['Big Key (Skull Woods)']],
["Skull Woods - Big Key Chest", True, []],
["Skull Woods - Bridge Room", False, []],
["Skull Woods - Bridge Room", False, [], ['Fire Rod']],
["Skull Woods - Bridge Room", True, ['Fire Rod']],
["Thieves' Town - Map Chest", True, []],
["Ice Palace - Compass Chest", False, []],
["Ice Palace - Compass Chest", False, [], ['Fire Rod', 'Bombos', 'Progressive Sword']],
#todo: Qirn Jump
#["Ice Palace - Compass Chest", True, ['Fire Rod']],
#["Ice Palace - Compass Chest", True, ['Bombos', 'Progressive Sword']],
["Ice Palace - Compass Chest", True, ['Pegasus Boots', 'Fire Rod']],
["Ice Palace - Compass Chest", True, ['Pegasus Boots', 'Bombos', 'Progressive Sword', 'Small Key (Ice Palace)']],
["Misery Mire - Bridge Chest", False, []],
["Misery Mire - Bridge Chest", False, [], ['Ether']],
["Misery Mire - Bridge Chest", False, [], ['Progressive Sword']],
["Misery Mire - Bridge Chest", True, ['Pegasus Boots', 'Ether', 'Progressive Sword']],
["Turtle Rock - Compass Chest", False, []],
["Turtle Rock - Compass Chest", False, [], ['Cane of Somaria']],
["Turtle Rock - Compass Chest", True, ['Pegasus Boots', 'Magic Mirror', 'Moon Pearl', 'Cane of Somaria', 'Small Key (Turtle Rock)', 'Small Key (Turtle Rock)', 'Small Key (Turtle Rock)', 'Small Key (Turtle Rock)']],
["Turtle Rock - Compass Chest", True, ['Pegasus Boots', 'Quake', 'Progressive Sword', 'Cane of Somaria']],
["Turtle Rock - Chain Chomps", False, []],
["Turtle Rock - Chain Chomps", True, ['Pegasus Boots', 'Magic Mirror', 'Moon Pearl']],
["Turtle Rock - Crystaroller Room", False, []],
["Turtle Rock - Crystaroller Room", True, ['Pegasus Boots', 'Magic Mirror', 'Moon Pearl', 'Big Key (Turtle Rock)']],
["Turtle Rock - Crystaroller Room", True, ['Pegasus Boots', 'Magic Mirror', 'Moon Pearl', 'Lamp', 'Cane of Somaria']],
["Ganons Tower - Hope Room - Left", False, []],
["Ganons Tower - Hope Room - Left", False, [], ['Crystal 1']],
["Ganons Tower - Hope Room - Left", False, [], ['Crystal 2']],
["Ganons Tower - Hope Room - Left", False, [], ['Crystal 3']],
["Ganons Tower - Hope Room - Left", False, [], ['Crystal 4']],
["Ganons Tower - Hope Room - Left", False, [], ['Crystal 5']],
["Ganons Tower - Hope Room - Left", False, [], ['Crystal 6']],
["Ganons Tower - Hope Room - Left", False, [], ['Crystal 7']],
#todo: smarter dungeon revive logic
#["Ganons Tower - Hope Room - Left", True, ['Beat Agahnim 1', 'Hookshot', 'Crystal 1', 'Crystal 2', 'Crystal 3', 'Crystal 4', 'Crystal 5', 'Crystal 6', 'Crystal 7']],
#["Ganons Tower - Hope Room - Left", True, ['Pegasus Boots', 'Magic Mirror', 'Hookshot', 'Crystal 1', 'Crystal 2', 'Crystal 3', 'Crystal 4', 'Crystal 5', 'Crystal 6', 'Crystal 7']],
["Ganons Tower - Hope Room - Left", True, ['Pegasus Boots', 'Moon Pearl', 'Hookshot', 'Crystal 1', 'Crystal 2', 'Crystal 3', 'Crystal 4', 'Crystal 5', 'Crystal 6', 'Crystal 7']],
]) | from test.inverted_owg.TestInvertedOWG import TestInvertedOWG
class TestDungeons(TestInvertedOWG):
def testFirstDungeonChests(self):
self.run_location_tests([
["Hyrule Castle - Map Chest", False, []],
["Hyrule Castle - Map Chest", True, ['Beat Agahnim 1']],
["Hyrule Castle - Map Chest", True, ['Moon Pearl', 'Pegasus Boots']],
["Hyrule Castle - Map Chest", True, ['Magic Mirror', 'Pegasus Boots']],
["Sanctuary", False, []],
["Sanctuary", False, ['Beat Agahnim 1']],
["Sanctuary", True, ['Magic Mirror', 'Beat Agahnim 1']],
["Sanctuary", True, ['Lamp', 'Beat Agahnim 1', 'Small Key (Escape)']],
["Sanctuary", True, ['Moon Pearl', 'Pegasus Boots']],
["Sanctuary", True, ['Magic Mirror', 'Pegasus Boots']],
["Sewers - Secret Room - Left", False, []],
["Sewers - Secret Room - Left", True, ['Moon Pearl', 'Progressive Glove', 'Pegasus Boots']],
["Sewers - Secret Room - Left", True, ['Moon Pearl', 'Pegasus Boots', 'Lamp', 'Small Key (Escape)']],
["Sewers - Secret Room - Left", True,
['Magic Mirror', 'Pegasus Boots', 'Lamp', 'Small Key (Escape)']],
["Sewers - Secret Room - Left", True, ['Beat Agahnim 1', 'Lamp', 'Small Key (Escape)']],
["Eastern Palace - Compass Chest", False, []],
["Eastern Palace - Compass Chest", True, ['Moon Pearl', 'Pegasus Boots']],
["Eastern Palace - Compass Chest", True, ['Magic Mirror', 'Pegasus Boots']],
["Eastern Palace - Compass Chest", True, ['Beat Agahnim 1']],
["Desert Palace - Map Chest", False, []],
["Desert Palace - Map Chest", True, ['Moon Pearl', 'Pegasus Boots']],
["Desert Palace - Map Chest", True, ['Book of Mudora', 'Magic Mirror', 'Pegasus Boots']],
["Desert Palace - Boss", False, []],
["Desert Palace - Boss", False, [], ['Small Key (Desert Palace)']],
["Desert Palace - Boss", False, [], ['Big Key (Desert Palace)']],
["Desert Palace - Boss", False, [], ['Lamp', 'Fire Rod']],
["Desert Palace - Boss", True, ['Progressive Sword', 'Small Key (Desert Palace)', 'Big Key (Desert Palace)', 'Moon Pearl', 'Pegasus Boots', 'Lamp']],
["Desert Palace - Boss", True, ['Progressive Sword', 'Small Key (Desert Palace)', 'Big Key (Desert Palace)', 'Moon Pearl', 'Pegasus Boots', 'Fire Rod']],
["Tower of Hera - Basement Cage", False, []],
["Tower of Hera - Basement Cage", False, [], ['Moon Pearl']],
["Tower of Hera - Basement Cage", True, ['Pegasus Boots', 'Moon Pearl']],
["Castle Tower - Room 03", False, []],
["Castle Tower - Room 03", False, [], ['Progressive Sword', 'Hammer', 'Progressive Bow', 'Fire Rod', 'Ice Rod', 'Cane of Somaria', 'Cane of Byrna']],
["Castle Tower - Room 03", True, ['Pegasus Boots', 'Progressive Sword']],
["Castle Tower - Room 03", True, ['Pegasus Boots', 'Progressive Bow']],
#todo: Qirn Jump
#["Palace of Darkness - Shooter Room", True, []],
["Palace of Darkness - Shooter Room", True, ['Pegasus Boots']],
["Palace of Darkness - Shooter Room", True, ['Hammer']],
["Palace of Darkness - Shooter Room", True, ['Flippers']],
["Palace of Darkness - Shooter Room", True, ['Pegasus Boots', 'Progressive Glove']],
["Palace of Darkness - Shooter Room", True, ['Pegasus Boots', 'Magic Mirror']],
["Swamp Palace - Entrance", False, []],
["Swamp Palace - Entrance", False, [], ['Magic Mirror']],
["Swamp Palace - Entrance", False, [], ['Flippers']],
["Swamp Palace - Entrance", True, ['Magic Mirror', 'Flippers', 'Pegasus Boots']],
["Swamp Palace - Entrance", True, ['Magic Mirror', 'Flippers', 'Beat Agahnim 1']],
["Skull Woods - Compass Chest", True, []],
["Skull Woods - Big Chest", False, []],
["Skull Woods - Big Chest", False, [], ['Big Key (Skull Woods)']],
["Skull Woods - Big Chest", True, ['Big Key (Skull Woods)']],
["Skull Woods - Big Key Chest", True, []],
["Skull Woods - Bridge Room", False, []],
["Skull Woods - Bridge Room", False, [], ['Fire Rod']],
["Skull Woods - Bridge Room", True, ['Fire Rod']],
["Thieves' Town - Map Chest", True, []],
["Ice Palace - Compass Chest", False, []],
["Ice Palace - Compass Chest", False, [], ['Fire Rod', 'Bombos', 'Progressive Sword']],
#todo: Qirn Jump
#["Ice Palace - Compass Chest", True, ['Fire Rod']],
#["Ice Palace - Compass Chest", True, ['Bombos', 'Progressive Sword']],
["Ice Palace - Compass Chest", True, ['Pegasus Boots', 'Fire Rod']],
["Ice Palace - Compass Chest", True, ['Pegasus Boots', 'Bombos', 'Progressive Sword', 'Small Key (Ice Palace)']],
["Misery Mire - Bridge Chest", False, []],
["Misery Mire - Bridge Chest", False, [], ['Ether']],
["Misery Mire - Bridge Chest", False, [], ['Progressive Sword']],
["Misery Mire - Bridge Chest", True, ['Pegasus Boots', 'Ether', 'Progressive Sword']],
["Turtle Rock - Compass Chest", False, []],
["Turtle Rock - Compass Chest", False, [], ['Cane of Somaria']],
["Turtle Rock - Compass Chest", True, ['Pegasus Boots', 'Magic Mirror', 'Moon Pearl', 'Cane of Somaria', 'Small Key (Turtle Rock)', 'Small Key (Turtle Rock)', 'Small Key (Turtle Rock)', 'Small Key (Turtle Rock)']],
["Turtle Rock - Compass Chest", True, ['Pegasus Boots', 'Quake', 'Progressive Sword', 'Cane of Somaria']],
["Turtle Rock - Chain Chomps", False, []],
["Turtle Rock - Chain Chomps", True, ['Pegasus Boots', 'Magic Mirror', 'Moon Pearl']],
["Turtle Rock - Crystaroller Room", False, []],
["Turtle Rock - Crystaroller Room", True, ['Pegasus Boots', 'Magic Mirror', 'Moon Pearl', 'Big Key (Turtle Rock)']],
["Turtle Rock - Crystaroller Room", True, ['Pegasus Boots', 'Magic Mirror', 'Moon Pearl', 'Lamp', 'Cane of Somaria']],
["Ganons Tower - Hope Room - Left", False, []],
["Ganons Tower - Hope Room - Left", False, [], ['Crystal 1']],
["Ganons Tower - Hope Room - Left", False, [], ['Crystal 2']],
["Ganons Tower - Hope Room - Left", False, [], ['Crystal 3']],
["Ganons Tower - Hope Room - Left", False, [], ['Crystal 4']],
["Ganons Tower - Hope Room - Left", False, [], ['Crystal 5']],
["Ganons Tower - Hope Room - Left", False, [], ['Crystal 6']],
["Ganons Tower - Hope Room - Left", False, [], ['Crystal 7']],
#todo: smarter dungeon revive logic
#["Ganons Tower - Hope Room - Left", True, ['Beat Agahnim 1', 'Hookshot', 'Crystal 1', 'Crystal 2', 'Crystal 3', 'Crystal 4', 'Crystal 5', 'Crystal 6', 'Crystal 7']],
#["Ganons Tower - Hope Room - Left", True, ['Pegasus Boots', 'Magic Mirror', 'Hookshot', 'Crystal 1', 'Crystal 2', 'Crystal 3', 'Crystal 4', 'Crystal 5', 'Crystal 6', 'Crystal 7']],
["Ganons Tower - Hope Room - Left", True, ['Pegasus Boots', 'Moon Pearl', 'Hookshot', 'Crystal 1', 'Crystal 2', 'Crystal 3', 'Crystal 4', 'Crystal 5', 'Crystal 6', 'Crystal 7']],
]) | en | 0.323858 | #todo: Qirn Jump #["Palace of Darkness - Shooter Room", True, []], #todo: Qirn Jump #["Ice Palace - Compass Chest", True, ['Fire Rod']], #["Ice Palace - Compass Chest", True, ['Bombos', 'Progressive Sword']], #todo: smarter dungeon revive logic #["Ganons Tower - Hope Room - Left", True, ['Beat Agahnim 1', 'Hookshot', 'Crystal 1', 'Crystal 2', 'Crystal 3', 'Crystal 4', 'Crystal 5', 'Crystal 6', 'Crystal 7']], #["Ganons Tower - Hope Room - Left", True, ['Pegasus Boots', 'Magic Mirror', 'Hookshot', 'Crystal 1', 'Crystal 2', 'Crystal 3', 'Crystal 4', 'Crystal 5', 'Crystal 6', 'Crystal 7']], | 2.054377 | 2 |
code/model/nell_eval.py | yuan-pku/reasoning-gan | 0 | 6631873 | from __future__ import division
import csv
from collections import defaultdict
import random
import numpy as np
def nell_eval(model_answers, correct_answers):
test_data_path = correct_answers
test_prediction_path = model_answers
f = open(test_data_path)
test_data = f.readlines()
f.close()
# load prediction scores
preds = {}
with open(test_prediction_path) as f:
for line in f:
e1, e2, score = line.strip().split()
score = float(score)
if (e1, e2) not in preds:
preds[(e1, e2)] = score
else:
if preds[(e1, e2)] < score:
preds[(e1, e2)] = score
def get_pred_score(e1, e2):
if (e1, e2) in preds:
return preds[(e1,e2)]
else:
return -np.inf
test_pairs = defaultdict(lambda : defaultdict(int))
for line in test_data:
e1 = line.split(',')[0].replace('thing$', '')
e2 = line.split(',')[1].split(':')[0].replace('thing$', '')
label = 1 if line[-2] == '+' else 0
test_pairs[e1][e2] = label
aps = []
score_all = []
# calculate MAP
for e1 in test_pairs:
y_true = []
y_score = []
for e2 in test_pairs[e1]:
score = get_pred_score(e1, e2)
score_all.append(score)
y_score.append(score)
y_true.append(test_pairs[e1][e2])
count = list(zip(y_score, y_true))
count.sort(key=lambda x: x[0], reverse=True)
ranks = []
correct = 0
for idx_, item in enumerate(count):
if item[1] == 1:
correct += 1
ranks.append(correct / (1.0 + idx_))
if len(ranks) == 0:
ranks.append(0)
aps.append(np.mean(ranks))
mean_ap = np.mean(aps)
print('MINERVA MAP: {} ({} queries evaluated)'.format(mean_ap, len(aps)))
return mean_ap
| from __future__ import division
import csv
from collections import defaultdict
import random
import numpy as np
def nell_eval(model_answers, correct_answers):
test_data_path = correct_answers
test_prediction_path = model_answers
f = open(test_data_path)
test_data = f.readlines()
f.close()
# load prediction scores
preds = {}
with open(test_prediction_path) as f:
for line in f:
e1, e2, score = line.strip().split()
score = float(score)
if (e1, e2) not in preds:
preds[(e1, e2)] = score
else:
if preds[(e1, e2)] < score:
preds[(e1, e2)] = score
def get_pred_score(e1, e2):
if (e1, e2) in preds:
return preds[(e1,e2)]
else:
return -np.inf
test_pairs = defaultdict(lambda : defaultdict(int))
for line in test_data:
e1 = line.split(',')[0].replace('thing$', '')
e2 = line.split(',')[1].split(':')[0].replace('thing$', '')
label = 1 if line[-2] == '+' else 0
test_pairs[e1][e2] = label
aps = []
score_all = []
# calculate MAP
for e1 in test_pairs:
y_true = []
y_score = []
for e2 in test_pairs[e1]:
score = get_pred_score(e1, e2)
score_all.append(score)
y_score.append(score)
y_true.append(test_pairs[e1][e2])
count = list(zip(y_score, y_true))
count.sort(key=lambda x: x[0], reverse=True)
ranks = []
correct = 0
for idx_, item in enumerate(count):
if item[1] == 1:
correct += 1
ranks.append(correct / (1.0 + idx_))
if len(ranks) == 0:
ranks.append(0)
aps.append(np.mean(ranks))
mean_ap = np.mean(aps)
print('MINERVA MAP: {} ({} queries evaluated)'.format(mean_ap, len(aps)))
return mean_ap
| en | 0.625658 | # load prediction scores # calculate MAP | 2.768452 | 3 |
gala-spider/spider/data_process/processor.py | seandong37tt4qu/jeszhengq | 0 | 6631874 | from typing import List
from abc import ABCMeta
from abc import abstractmethod
from spider.entity_mgt.models import ObserveEntity
class DataProcessor(metaclass=ABCMeta):
@abstractmethod
def get_observe_entities(self, timestamp: float = None) -> List[ObserveEntity]:
pass
| from typing import List
from abc import ABCMeta
from abc import abstractmethod
from spider.entity_mgt.models import ObserveEntity
class DataProcessor(metaclass=ABCMeta):
@abstractmethod
def get_observe_entities(self, timestamp: float = None) -> List[ObserveEntity]:
pass
| none | 1 | 2.508327 | 3 |
|
TrimStreamlinesCF.py | stardustscience/ParaviewCustomModules | 1 | 6631875 | <reponame>stardustscience/ParaviewCustomModules
## ========================================================================== ##
## Copyright (c) 2019 The University of Texas at Austin. ##
## All rights reserved. ##
## ##
## Licensed under the Apache License, Version 2.0 (the "License"); ##
## you may not use this file except in compliance with the License. ##
## A copy of the License is included with this software in the file LICENSE. ##
## If your copy does not contain the License, you may obtain a copy of the ##
## License at: ##
## ##
## https://www.apache.org/licenses/LICENSE-2.0 ##
## ##
## Unless required by applicable law or agreed to in writing, software ##
## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT ##
## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ##
## See the License for the specific language governing permissions and ##
## limitations under the License. ##
## ##
## ========================================================================== ##
Name = 'TrimStreamlines'
Label = 'Trim Streamlines'
Help = 'Trim streamlines based to a range of IntegrationTime'
NumberOfInputs = 1
InputDataType = 'vtkUnstructuredGrid'
OutputDataType = 'vtkUnstructuredGrid'
ExtraXml = ''
Properties = dict(
start = -1,
end = -1
)
def RequestData():
import numpy as np
from vtk.numpy_interface import dataset_adapter as dsa
sl = self.GetPolyDataInput()
nsl = dsa.WrapDataObject(sl)
itime = nsl.PointData['IntegrationTime']
nv = nsl.Cells[nsl.CellLocations] # number of verts in each line
ns = nsl.CellLocations + 1 # index of first vertex in each line
ne = ns + nv # index one past the last vertex
olines = [nsl.Cells[i:j] for i,j in zip(ns, ne)]
nlines = []
iarrays = {'points': nsl.Points} # initialize source arrays with input points
oarrays = {'points': []}
for n in nsl.PointData.keys():
iarrays[n] = nsl.PointData[n] # add input point data arrays to source arrays
oarrays[n] = [] # add empty destination arrays
knt = 0
for line in olines:
if start != -1: line = [l for l in line if itime[l] > start]
if end != -1: line = [l for l in line if itime[l] < end]
for n in iarrays.keys(): oarrays[n].append(iarrays[n][line])
nlines.append(range(knt, knt+len(line)))
knt = knt + len(line)
tsl = vtk.vtkUnstructuredGrid()
line_lengths = [len(l)+1 for l in nlines] # number of vertices + 1 for count
ct = dsa.numpyTovtkDataArray(np.array([vtk.VTK_POLY_LINE]*len(nlines)).astype('u1'))
co = dsa.numpy_support.numpy_to_vtkIdTypeArray(np.hstack(([0], np.cumsum(line_lengths)[:-1])))
ca = vtk.vtkCellArray()
for l in nlines:
ca.InsertNextCell(len(l), l)
tsl.SetCells(ct, co, ca)
cid = dsa.numpyTovtkDataArray(np.arange(len(nlines)).astype('i4'))
cid.SetName('cell id')
tsl.GetCellData().AddArray(cid)
ptsa = np.concatenate(oarrays['points']).reshape((-1, 3)).astype('f4')
op = vtk.vtkPoints()
op.SetNumberOfPoints(ptsa.shape[0])
for i, p in enumerate(ptsa):
op.InsertPoint(i, p[0], p[1], p[2])
tsl.SetPoints(op)
for n in oarrays:
if n != 'points':
a = dsa.numpyTovtkDataArray(np.concatenate(oarrays[n]))
a.SetName(n)
tsl.GetPointData().AddArray(a)
self.GetUnstructuredGridOutput().ShallowCopy(tsl)
| ## ========================================================================== ##
## Copyright (c) 2019 The University of Texas at Austin. ##
## All rights reserved. ##
## ##
## Licensed under the Apache License, Version 2.0 (the "License"); ##
## you may not use this file except in compliance with the License. ##
## A copy of the License is included with this software in the file LICENSE. ##
## If your copy does not contain the License, you may obtain a copy of the ##
## License at: ##
## ##
## https://www.apache.org/licenses/LICENSE-2.0 ##
## ##
## Unless required by applicable law or agreed to in writing, software ##
## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT ##
## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ##
## See the License for the specific language governing permissions and ##
## limitations under the License. ##
## ##
## ========================================================================== ##
Name = 'TrimStreamlines'
Label = 'Trim Streamlines'
Help = 'Trim streamlines based to a range of IntegrationTime'
NumberOfInputs = 1
InputDataType = 'vtkUnstructuredGrid'
OutputDataType = 'vtkUnstructuredGrid'
ExtraXml = ''
Properties = dict(
start = -1,
end = -1
)
def RequestData():
import numpy as np
from vtk.numpy_interface import dataset_adapter as dsa
sl = self.GetPolyDataInput()
nsl = dsa.WrapDataObject(sl)
itime = nsl.PointData['IntegrationTime']
nv = nsl.Cells[nsl.CellLocations] # number of verts in each line
ns = nsl.CellLocations + 1 # index of first vertex in each line
ne = ns + nv # index one past the last vertex
olines = [nsl.Cells[i:j] for i,j in zip(ns, ne)]
nlines = []
iarrays = {'points': nsl.Points} # initialize source arrays with input points
oarrays = {'points': []}
for n in nsl.PointData.keys():
iarrays[n] = nsl.PointData[n] # add input point data arrays to source arrays
oarrays[n] = [] # add empty destination arrays
knt = 0
for line in olines:
if start != -1: line = [l for l in line if itime[l] > start]
if end != -1: line = [l for l in line if itime[l] < end]
for n in iarrays.keys(): oarrays[n].append(iarrays[n][line])
nlines.append(range(knt, knt+len(line)))
knt = knt + len(line)
tsl = vtk.vtkUnstructuredGrid()
line_lengths = [len(l)+1 for l in nlines] # number of vertices + 1 for count
ct = dsa.numpyTovtkDataArray(np.array([vtk.VTK_POLY_LINE]*len(nlines)).astype('u1'))
co = dsa.numpy_support.numpy_to_vtkIdTypeArray(np.hstack(([0], np.cumsum(line_lengths)[:-1])))
ca = vtk.vtkCellArray()
for l in nlines:
ca.InsertNextCell(len(l), l)
tsl.SetCells(ct, co, ca)
cid = dsa.numpyTovtkDataArray(np.arange(len(nlines)).astype('i4'))
cid.SetName('cell id')
tsl.GetCellData().AddArray(cid)
ptsa = np.concatenate(oarrays['points']).reshape((-1, 3)).astype('f4')
op = vtk.vtkPoints()
op.SetNumberOfPoints(ptsa.shape[0])
for i, p in enumerate(ptsa):
op.InsertPoint(i, p[0], p[1], p[2])
tsl.SetPoints(op)
for n in oarrays:
if n != 'points':
a = dsa.numpyTovtkDataArray(np.concatenate(oarrays[n]))
a.SetName(n)
tsl.GetPointData().AddArray(a)
self.GetUnstructuredGridOutput().ShallowCopy(tsl) | en | 0.684769 | ## ========================================================================== ## ## Copyright (c) 2019 The University of Texas at Austin. ## ## All rights reserved. ## ## ## ## Licensed under the Apache License, Version 2.0 (the "License"); ## ## you may not use this file except in compliance with the License. ## ## A copy of the License is included with this software in the file LICENSE. ## ## If your copy does not contain the License, you may obtain a copy of the ## ## License at: ## ## ## ## https://www.apache.org/licenses/LICENSE-2.0 ## ## ## ## Unless required by applicable law or agreed to in writing, software ## ## distributed under the License is distributed on an "AS IS" BASIS, WITHOUT ## ## WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. ## ## See the License for the specific language governing permissions and ## ## limitations under the License. ## ## ## ## ========================================================================== ## # number of verts in each line # index of first vertex in each line # index one past the last vertex # initialize source arrays with input points # add input point data arrays to source arrays # add empty destination arrays # number of vertices + 1 for count | 1.912005 | 2 |
paddlenlp/transformers/nezha/modeling.py | pangyoki/PaddleNLP | 1 | 6631876 | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
# Copyright 2020 Huawei Technologies Co., Ltd.
# Copyright 2018 The Google AI Language Team Authors, The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import math
import numpy as np
import paddle
import paddle.nn as nn
import paddle.nn.functional as F
from paddlenlp.transformers import PretrainedModel, register_base_model
__all__ = [
'NeZhaModel', "NeZhaPretrainedModel", 'NeZhaForPretraining',
'NeZhaForSequenceClassification', 'NeZhaPretrainingHeads',
'NeZhaForTokenClassification', 'NeZhaForQuestionAnswering',
'NeZhaForMultipleChoice'
]
def get_activation(activation_string):
if activation_string in ACT2FN:
return ACT2FN[activation_string]
else:
raise KeyError("function {} not found in ACT2FN mapping {}".format(
activation_string, list(ACT2FN.keys())))
def mish(x):
return x * F.tanh(F.softplus(x))
def linear_act(x):
return x
def swish(x):
return x * F.sigmoid(x)
def gelu_new(x):
"""
Implementation of the GELU activation function currently in Google BERT repo (identical to OpenAI GPT). Also see
the Gaussian Error Linear Units paper: https://arxiv.org/abs/1606.08415
"""
return 0.5 * x * (1.0 + paddle.tanh(
math.sqrt(2.0 / math.pi) * (x + 0.044715 * paddle.pow(x, 3.0))))
ACT2FN = {
"relu": F.relu,
"gelu": F.gelu,
"gelu_new": gelu_new,
"tanh": F.tanh,
"sigmoid": F.sigmoid,
"mish": mish,
"linear": linear_act,
"swish": swish,
}
class NeZhaAttention(nn.Layer):
def __init__(self,
hidden_size=768,
num_attention_heads=12,
hidden_dropout_prob=0.1,
attention_probs_dropout_prob=0.1,
max_relative_position=64,
layer_norm_eps=1e-12):
super(NeZhaAttention, self).__init__()
if hidden_size % num_attention_heads != 0:
raise ValueError(
"The hidden size (%d) is not a multiple of the number of attention "
"heads (%d)" % (hidden_size, num_attention_heads))
self.num_attention_heads = num_attention_heads
self.attention_head_size = int(hidden_size / num_attention_heads)
self.all_head_size = self.num_attention_heads * self.attention_head_size
self.query = nn.Linear(hidden_size, self.all_head_size)
self.key = nn.Linear(hidden_size, self.all_head_size)
self.value = nn.Linear(hidden_size, self.all_head_size)
self.relative_positions_embeddings = self.generate_relative_positions_embeddings(
length=512,
depth=self.attention_head_size,
max_relative_position=max_relative_position)
self.attention_dropout = nn.Dropout(attention_probs_dropout_prob)
self.dense = nn.Linear(hidden_size, hidden_size)
self.layer_norm = nn.LayerNorm(hidden_size, epsilon=layer_norm_eps)
self.output_dropout = nn.Dropout(hidden_dropout_prob)
def generate_relative_positions_embeddings(self,
length,
depth,
max_relative_position=127):
vocab_size = max_relative_position * 2 + 1
range_vec = paddle.arange(length)
range_mat = paddle.tile(
range_vec, repeat_times=[length]).reshape((length, length))
distance_mat = range_mat - paddle.t(range_mat)
distance_mat_clipped = paddle.clip(
distance_mat.astype('float32'), -max_relative_position,
max_relative_position)
final_mat = distance_mat_clipped + max_relative_position
embeddings_table = np.zeros([vocab_size, depth])
for pos in range(vocab_size):
for i in range(depth // 2):
embeddings_table[pos, 2 * i] = np.sin(pos / np.power(10000, 2 *
i / depth))
embeddings_table[pos, 2 * i + 1] = np.cos(pos / np.power(
10000, 2 * i / depth))
embeddings_table_tensor = paddle.to_tensor(
embeddings_table, dtype='float32')
flat_relative_positions_matrix = final_mat.reshape((-1, ))
one_hot_relative_positions_matrix = paddle.nn.functional.one_hot(
flat_relative_positions_matrix.astype('int64'),
num_classes=vocab_size)
embeddings = paddle.matmul(one_hot_relative_positions_matrix,
embeddings_table_tensor)
my_shape = final_mat.shape
my_shape.append(depth)
embeddings = embeddings.reshape(my_shape)
return embeddings
def transpose_for_scores(self, x):
new_x_shape = x.shape[:-1] + [
self.num_attention_heads, self.attention_head_size
]
x = x.reshape(new_x_shape)
return x.transpose((0, 2, 1, 3))
def forward(self, hidden_states, attention_mask):
mixed_query_layer = self.query(hidden_states)
mixed_key_layer = self.key(hidden_states)
mixed_value_layer = self.value(hidden_states)
query_layer = self.transpose_for_scores(mixed_query_layer)
key_layer = self.transpose_for_scores(mixed_key_layer)
value_layer = self.transpose_for_scores(mixed_value_layer)
# Take the dot product between "query" and "key" to get the raw attention scores.
attention_scores = paddle.matmul(query_layer,
key_layer.transpose((0, 1, 3, 2)))
batch_size, num_attention_heads, from_seq_length, to_seq_length = attention_scores.shape
relations_keys = self.relative_positions_embeddings.detach().clone(
)[:to_seq_length, :to_seq_length, :]
query_layer_t = query_layer.transpose((2, 0, 1, 3))
query_layer_r = query_layer_t.reshape(
(from_seq_length, batch_size * num_attention_heads,
self.attention_head_size))
key_position_scores = paddle.matmul(query_layer_r,
relations_keys.transpose((0, 2, 1)))
key_position_scores_r = key_position_scores.reshape(
(from_seq_length, batch_size, num_attention_heads, from_seq_length))
key_position_scores_r_t = key_position_scores_r.transpose((1, 2, 0, 3))
attention_scores = attention_scores + key_position_scores_r_t
attention_scores = attention_scores / math.sqrt(
self.attention_head_size)
attention_scores = attention_scores + attention_mask
# Normalize the attention scores to probabilities.
attention_probs = nn.Softmax(axis=-1)(attention_scores)
# This is actually dropping out entire tokens to attend to, which might
# seem a bit unusual, but is taken from the original Transformer paper.
attention_probs = self.attention_dropout(attention_probs)
context_layer = paddle.matmul(attention_probs, value_layer)
relations_values = self.relative_positions_embeddings.clone(
)[:to_seq_length, :to_seq_length, :]
attention_probs_t = attention_probs.transpose((2, 0, 1, 3))
attentions_probs_r = attention_probs_t.reshape(
(from_seq_length, batch_size * num_attention_heads, to_seq_length))
value_position_scores = paddle.matmul(attentions_probs_r,
relations_values)
value_position_scores_r = value_position_scores.reshape(
(from_seq_length, batch_size, num_attention_heads,
self.attention_head_size))
value_position_scores_r_t = value_position_scores_r.transpose(
(1, 2, 0, 3))
context_layer = context_layer + value_position_scores_r_t
context_layer = context_layer.transpose((0, 2, 1, 3))
new_context_layer_shape = context_layer.shape[:-2] + [
self.all_head_size
]
context_layer = context_layer.reshape(new_context_layer_shape)
projected_context_layer = self.dense(context_layer)
projected_context_layer_dropout = self.output_dropout(
projected_context_layer)
layer_normed_context_layer = self.layer_norm(
hidden_states + projected_context_layer_dropout)
return layer_normed_context_layer, attention_scores
class NeZhaLayer(nn.Layer):
def __init__(self,
hidden_size=768,
num_attention_heads=12,
intermediate_size=3072,
hidden_act="gelu",
hidden_dropout_prob=0.1,
attention_probs_dropout_prob=0.1,
max_relative_position=64,
layer_norm_eps=1e-12):
super(NeZhaLayer, self).__init__()
self.seq_len_dim = 1
self.layer_norm = nn.LayerNorm(hidden_size, epsilon=layer_norm_eps)
self.attention = NeZhaAttention(
hidden_size=hidden_size,
num_attention_heads=num_attention_heads,
hidden_dropout_prob=hidden_dropout_prob,
attention_probs_dropout_prob=attention_probs_dropout_prob,
max_relative_position=max_relative_position,
layer_norm_eps=layer_norm_eps)
self.ffn = nn.Linear(hidden_size, intermediate_size)
self.ffn_output = nn.Linear(intermediate_size, hidden_size)
self.activation = ACT2FN[hidden_act]
self.dropout = nn.Dropout(hidden_dropout_prob)
def forward(self, hidden_states, attention_mask=None):
attention_output, layer_att = self.attention(hidden_states,
attention_mask)
ffn_output = self.ffn(attention_output)
ffn_output = self.activation(ffn_output)
ffn_output = self.ffn_output(ffn_output)
ffn_output_dropout = self.dropout(ffn_output)
hidden_states = self.layer_norm(ffn_output_dropout + attention_output)
return hidden_states, layer_att
class NeZhaEncoder(nn.Layer):
def __init__(self,
hidden_size=768,
num_hidden_layers=12,
num_attention_heads=12,
intermediate_size=3072,
hidden_act="gelu",
hidden_dropout_prob=0.1,
attention_probs_dropout_prob=0.1,
max_relative_position=64,
layer_norm_eps='1e-12'):
super(NeZhaEncoder, self).__init__()
layer = NeZhaLayer(
hidden_size=hidden_size,
num_attention_heads=num_attention_heads,
intermediate_size=intermediate_size,
hidden_act=hidden_act,
hidden_dropout_prob=hidden_dropout_prob,
attention_probs_dropout_prob=attention_probs_dropout_prob,
max_relative_position=max_relative_position,
layer_norm_eps=layer_norm_eps)
self.layer = nn.LayerList(
[copy.deepcopy(layer) for _ in range(num_hidden_layers)])
def forward(self, hidden_states, attention_mask):
all_encoder_layers = []
all_encoder_att = []
for i, layer_module in enumerate(self.layer):
all_encoder_layers.append(hidden_states)
hidden_states, layer_att = layer_module(all_encoder_layers[i],
attention_mask)
all_encoder_att.append(layer_att)
all_encoder_layers.append(hidden_states)
return all_encoder_layers, all_encoder_att
class NeZhaEmbeddings(nn.Layer):
def __init__(self,
vocab_size,
hidden_size=768,
hidden_dropout_prob=0.1,
max_position_embeddings=512,
type_vocab_size=16,
use_relative_position=True):
super(NeZhaEmbeddings, self).__init__()
self.use_relative_position = use_relative_position
self.word_embeddings = nn.Embedding(vocab_size, hidden_size)
if not use_relative_position:
self.position_embeddings = nn.Embedding(max_position_embeddings,
hidden_size)
self.token_type_embeddings = nn.Embedding(type_vocab_size, hidden_size)
self.layer_norm = nn.LayerNorm(hidden_size)
self.dropout = nn.Dropout(hidden_dropout_prob)
def forward(self, input_ids, token_type_ids=None):
seq_length = input_ids.shape[1]
position_ids = paddle.arange(seq_length, dtype='int64')
position_ids = position_ids.unsqueeze(0).expand_as(input_ids)
if token_type_ids is None:
token_type_ids = paddle.zeros_like(input_ids, dtype="int64")
words_embeddings = self.word_embeddings(input_ids)
embeddings = words_embeddings
if not self.use_relative_position:
position_embeddings = self.position_embeddings(position_ids)
embeddings += position_embeddings
token_type_embeddings = self.token_type_embeddings(token_type_ids)
embeddings += token_type_embeddings
embeddings = self.layer_norm(embeddings)
embeddings = self.dropout(embeddings)
return embeddings
class NeZhaPooler(nn.Layer):
def __init__(self, hidden_size):
super(NeZhaPooler, self).__init__()
self.dense = nn.Linear(hidden_size, hidden_size)
self.activation = nn.Tanh()
def forward(self, hidden_states):
# We "pool" the model by simply taking the hidden state corresponding
# to the first token.
first_token_tensor = hidden_states[:, 0]
pooled_output = self.dense(first_token_tensor)
pooled_output = self.activation(pooled_output)
return pooled_output
class NeZhaPretrainedModel(PretrainedModel):
"""
An abstract class for pretrained NeZha models. It provides NeZha related
`model_config_file`, `pretrained_init_configuration`, `resource_files_names`,
`pretrained_resource_files_map`, `base_model_prefix` for downloading and
loading pretrained models.
See :class:`~paddlenlp.transformers.model_utils.PretrainedModel` for more details.
"""
model_config_file = "model_config.json"
pretrained_init_configuration = {
"nezha-base-chinese": {
"vocab_size": 21128,
"hidden_size": 768,
"num_hidden_layers": 12,
"num_attention_heads": 12,
"intermediate_size": 3072,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"attention_probs_dropout_prob": 0.1,
"max_position_embeddings": 512,
"max_relative_position": 64,
"type_vocab_size": 2,
"initializer_range": 0.02,
"use_relative_position": True
},
"nezha-large-chinese": {
"vocab_size": 21128,
"hidden_size": 1024,
"num_hidden_layers": 24,
"num_attention_heads": 16,
"intermediate_size": 4096,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"attention_probs_dropout_prob": 0.1,
"max_position_embeddings": 512,
"max_relative_position": 64,
"type_vocab_size": 2,
"initializer_range": 0.02,
"use_relative_position": True
},
"nezha-base-wwm-chinese": {
"vocab_size": 21128,
"hidden_size": 768,
"num_hidden_layers": 12,
"num_attention_heads": 12,
"intermediate_size": 3072,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"attention_probs_dropout_prob": 0.1,
"max_position_embeddings": 512,
"max_relative_position": 64,
"type_vocab_size": 2,
"initializer_range": 0.02,
"use_relative_position": True
},
"nezha-large-wwm-chinese": {
"vocab_size": 21128,
"hidden_size": 1024,
"num_hidden_layers": 24,
"num_attention_heads": 16,
"intermediate_size": 4096,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"attention_probs_dropout_prob": 0.1,
"max_position_embeddings": 512,
"max_relative_position": 64,
"type_vocab_size": 2,
"initializer_range": 0.02,
"use_relative_position": True
},
}
resource_files_names = {"model_state": "model_state.pdparams"}
pretrained_resource_files_map = {
"model_state": {
"nezha-base-chinese":
"https://paddlenlp.bj.bcebos.com/models/transformers/nezha/nezha-base-chinese.pdparams",
"nezha-large-chinese":
"https://paddlenlp.bj.bcebos.com/models/transformers/nezha/nezha-large-chinese.pdparams",
"nezha-base-wwm-chinese":
"https://paddlenlp.bj.bcebos.com/models/transformers/nezha/nezha-base-wwm-chinese.pdparams",
"nezha-large-wwm-chinese":
"https://paddlenlp.bj.bcebos.com/models/transformers/nezha/nezha-large-wwm-chinese.pdparams",
}
}
base_model_prefix = "nezha"
def init_weights(self, layer):
""" Initialization hook """
if isinstance(layer, (nn.Linear, nn.Embedding)):
# In the dygraph mode, use the `set_value` to reset the parameter directly,
# and reset the `state_dict` to update parameter in static mode.
if isinstance(layer.weight, paddle.Tensor):
layer.weight.set_value(
paddle.tensor.normal(
mean=0.0,
std=self.initializer_range
if hasattr(self, "initializer_range") else
self.nezha.config["initializer_range"],
shape=layer.weight.shape))
elif isinstance(layer, nn.LayerNorm):
layer._epsilon = 1e-12
@register_base_model
class NeZhaModel(NeZhaPretrainedModel):
"""
The bare NeZha Model transformer outputting raw hidden-states.
This model inherits from :class:`~paddlenlp.transformers.model_utils.PretrainedModel`.
Refer to the superclass documentation for the generic methods.
This model is also a Paddle `paddle.nn.Layer <https://www.paddlepaddle.org.cn/documentation
/docs/en/api/paddle/fluid/dygraph/layers/Layer_en.html>`__ subclass. Use it as a regular Paddle Layer
and refer to the Paddle documentation for all matter related to general usage and behavior.
Args:
vocab_size (int):
Vocabulary size of `inputs_ids` in `DistilBertModel`. Defines the number of different tokens that can
be represented by the `inputs_ids` passed when calling `DistilBertModel`.
hidden_size (int, optional):
Dimensionality of the embedding layer, encoder layers and the pooler layer. Defaults to `768`.
num_hidden_layers (int, optional):
Number of hidden layers in the Transformer encoder. Defaults to `12`.
num_attention_heads (int, optional):
Number of attention heads for each attention layer in the Transformer encoder.
Defaults to `12`.
intermediate_size (int, optional):
Dimensionality of the feed-forward (ff) layer in the encoder. Input tensors
to ff layers are firstly projected from `hidden_size` to `intermediate_size`,
and then projected back to `hidden_size`. Typically `intermediate_size` is larger than `hidden_size`.
Defaults to `3072`.
hidden_act (str, optional):
The non-linear activation function in the feed-forward layer.
``"gelu"``, ``"relu"`` and any other paddle supported activation functions
are supported. Defaults to `"gelu"`.
hidden_dropout_prob (float, optional):
The dropout probability for all fully connected layers in the embeddings and encoder.
Defaults to `0.1`.
attention_probs_dropout_prob (float, optional):
The dropout probability used in MultiHeadAttention in all encoder layers to drop some attention target.
Defaults to `0.1`.
max_position_embeddings (int, optional):
The maximum value of the dimensionality of position encoding, which dictates the maximum supported length of an input
sequence. Defaults to `512`.
type_vocab_size (int, optional):
The vocabulary size of `token_type_ids`.
Defaults to `16`.
initializer_range (float, optional):
The standard deviation of the normal initializer.
Defaults to `0.02`.
.. note::
A normal_initializer initializes weight matrices as normal distributions.
See :meth:`NeZhaPretrainedModel.init_weights()` for how weights are initialized in `NeZhaModel`.
max_relative_embeddings (int, optional):
The maximum value of the dimensionality of relative encoding, which dictates the maximum supported
relative distance of two sentences.
Defaults to `64`.
layer_norm_eps (float, optional):
The small value added to the variance in `LayerNorm` to prevent division by zero.
Defaults to `1e-12`.
use_relative_position (bool, optional):
Whether or not to use relative position embedding. Defaults to `True`.
"""
def __init__(self,
vocab_size,
hidden_size=768,
num_hidden_layers=12,
num_attention_heads=12,
intermediate_size=3072,
hidden_act="gelu",
hidden_dropout_prob=0.1,
attention_probs_dropout_prob=0.1,
max_position_embeddings=512,
type_vocab_size=2,
initializer_range=0.02,
max_relative_position=64,
layer_norm_eps=1e-12,
use_relative_position=True):
super(NeZhaModel, self).__init__()
self.initializer_range = initializer_range
self.embeddings = NeZhaEmbeddings(
vocab_size=vocab_size,
hidden_size=hidden_size,
hidden_dropout_prob=hidden_dropout_prob,
max_position_embeddings=max_position_embeddings,
type_vocab_size=type_vocab_size,
use_relative_position=use_relative_position)
self.encoder = NeZhaEncoder(
hidden_size=hidden_size,
num_hidden_layers=num_hidden_layers,
num_attention_heads=num_attention_heads,
intermediate_size=intermediate_size,
hidden_act=hidden_act,
hidden_dropout_prob=hidden_dropout_prob,
attention_probs_dropout_prob=attention_probs_dropout_prob,
max_relative_position=max_relative_position,
layer_norm_eps=layer_norm_eps)
self.pooler = NeZhaPooler(hidden_size)
self.apply(self.init_weights)
def forward(self, input_ids, token_type_ids=None, attention_mask=None):
r'''
The NeZhaModel forward method, overrides the `__call__()` special method.
Args:
input_ids (Tensor):
Indices of input sequence tokens in the vocabulary. They are
numerical representations of tokens that build the input sequence.
Its data type should be `int64` and it has a shape of [batch_size, sequence_length].
token_type_ids (Tensor, optional):
Segment token indices to indicate different portions of the inputs.
Selected in the range ``[0, type_vocab_size - 1]``.
If `type_vocab_size` is 2, which means the inputs have two portions.
Indices can either be 0 or 1:
- 0 corresponds to a *sentence A* token,
- 1 corresponds to a *sentence B* token.
Its data type should be `int64` and it has a shape of [batch_size, sequence_length].
Defaults to `None`, which means we don't add segment embeddings.
attention_mask (Tensor, optional):
Mask used in multi-head attention to avoid performing attention to some unwanted positions,
usually the paddings or the subsequent positions.
Its data type can be int, float and bool.
When the data type is bool, the `masked` tokens have `False` values and the others have `True` values.
When the data type is int, the `masked` tokens have `0` values and the others have `1` values.
When the data type is float, the `masked` tokens have `-INF` values and the others have `0` values.
It is a tensor with shape broadcasted to `[batch_size, num_attention_heads, sequence_length, sequence_length]`.
For example, its shape can be [batch_size, sequence_length], [batch_size, sequence_length, sequence_length],
[batch_size, num_attention_heads, sequence_length, sequence_length].
We use whole-word-mask in NeZha, so the whole word will have the same value. For example, "使用" as a word,
"使" and "用" will have the same value.
Defaults to `None`, which means nothing needed to be prevented attention to.
Returns:
tuple: Returns tuple (`sequence_output`, `pooled_output`).
With the fields:
- `sequence_output` (Tensor):
Sequence of hidden-states at the last layer of the model.
It's data type should be float32 and its shape is [batch_size, sequence_length, hidden_size].
- `pooled_output` (Tensor):
The output of first token (`[CLS]`) in sequence.
We "pool" the model by simply taking the hidden state corresponding to the first token.
Its data type should be float32 and its shape is [batch_size, hidden_size].
Example:
.. code-block::
import paddle
from paddlenlp.transformers import NeZhaModel, NeZhaTokenizer
tokenizer = NeZhaTokenizer.from_pretrained('nezha-base-chinese')
model = NeZhaModel.from_pretrained('nezha-base-chinese')
inputs = tokenizer("欢迎使用百度飞浆!")
inputs = {k:paddle.to_tensor([v]) for (k, v) in inputs.items()}
output = model(**inputs)
'''
if attention_mask is None:
attention_mask = paddle.ones_like(input_ids)
if token_type_ids is None:
token_type_ids = paddle.zeros_like(input_ids)
extended_attention_mask = attention_mask.unsqueeze(1).unsqueeze(2)
extended_attention_mask = (1.0 - extended_attention_mask) * -10000.0
embedding_output = self.embeddings(input_ids, token_type_ids)
encoder_outputs, _ = self.encoder(embedding_output,
extended_attention_mask)
sequence_output = encoder_outputs[-1]
pooled_output = self.pooler(sequence_output)
return sequence_output, pooled_output
class NeZhaLMPredictionHead(nn.Layer):
def __init__(self,
hidden_size,
vocab_size,
hidden_act,
embedding_weights=None,
layer_norm_eps=1e-12):
super(NeZhaLMPredictionHead, self).__init__()
self.dense = nn.Linear(hidden_size, hidden_size)
self.activation = ACT2FN[hidden_act]
self.layer_norm = nn.LayerNorm(hidden_size, epsilon=layer_norm_eps)
self.decoder_weight = embedding_weights
self.decoder_bias = self.create_parameter(
shape=[vocab_size], dtype=self.decoder_weight.dtype, is_bias=True)
def forward(self, hidden_states):
hidden_states = self.dense(hidden_states)
hidden_states = self.activation(hidden_states)
hidden_states = self.layer_norm(hidden_states)
hidden_states = paddle.tensor.matmul(
hidden_states, self.decoder_weight,
transpose_y=True) + self.decoder_bias
return hidden_states
class NeZhaPretrainingHeads(nn.Layer):
"""
Perform language modeling task and next sentence classification task.
Args:
hidden_size (int):
See :class:`NeZhaModel`.
vocab_size (int):
See :class:`NeZhaModel`.
hidden_act (str):
Activation function used in the language modeling task.
embedding_weights (Tensor, optional):
Decoding weights used to map hidden_states to logits of the masked token prediction.
Its data type should be float32 and its shape is [vocab_size, hidden_size].
Defaults to `None`, which means use the same weights of the embedding layer.
"""
def __init__(self,
hidden_size,
vocab_size,
hidden_act,
embedding_weights=None):
super(NeZhaPretrainingHeads, self).__init__()
self.predictions = NeZhaLMPredictionHead(hidden_size, vocab_size,
hidden_act, embedding_weights)
self.seq_relationship = nn.Linear(hidden_size, 2)
def forward(self, sequence_output, pooled_output):
"""
Args:
sequence_output(Tensor):
Sequence of hidden-states at the last layer of the model.
It's data type should be float32 and its shape is [batch_size, sequence_length, hidden_size].
pooled_output(Tensor):
The output of first token (`[CLS]`) in sequence.
We "pool" the model by simply taking the hidden state corresponding to the first token.
Its data type should be float32 and its shape is [batch_size, hidden_size].
Returns:
tuple: Returns tuple (``prediction_scores``, ``seq_relationship_score``).
With the fields:
- `prediction_scores` (Tensor):
The scores of masked token prediction. Its data type should be float32.
If `masked_positions` is None, its shape is [batch_size, sequence_length, vocab_size].
Otherwise, its shape is [batch_size, mask_token_num, vocab_size].
- `seq_relationship_score` (Tensor):
The scores of next sentence prediction.
Its data type should be float32 and its shape is [batch_size, 2].
"""
prediction_scores = self.predictions(sequence_output)
seq_relationship_score = self.seq_relationship(pooled_output)
return prediction_scores, seq_relationship_score
class NeZhaForPretraining(NeZhaPretrainedModel):
"""
NeZha Model with pretraining tasks on top.
Args:
nezha (:class:`NeZhaModel`):
An instance of :class:`NeZhaModel`.
"""
def __init__(self, nezha):
super(NeZhaForPretraining, self).__init__()
self.nezha = nezha
self.cls = NeZhaPretrainingHeads(
self.nezha.config["hidden_size"], self.nezha.config["vocab_size"],
self.nezha.config["hidden_act"],
self.nezha.embeddings.word_embeddings.weight)
self.apply(self.init_weights)
def forward(self,
input_ids,
token_type_ids=None,
attention_mask=None,
masked_lm_labels=None,
next_sentence_label=None):
r"""
Args:
input_ids (Tensor):
See :class:`NeZhaModel`.
token_type_ids (Tensor, optional):
See :class:`NeZhaModel`.
attention_mask (Tensor, optional):
See :class:`NeZhaModel`.
masked_lm_labels (Tensor, optional):
The labels of the masked language modeling, its dimensionality is equal to `prediction_scores`.
Its data type should be int64 and its shape is [batch_size, sequence_length, 1].
next_sentence_label (Tensor, optional):
The labels of the next sentence prediction task, the dimensionality of `next_sentence_labels`
is equal to `seq_relation_labels`. Its data type should be int64 and its shape is [batch_size, 1].
Returns:
Tensor or tuple: Returns Tensor ``total_loss`` if `masked_lm_labels` is not None.
Returns tuple (``prediction_scores``, ``seq_relationship_score``) if `masked_lm_labels` is None.
With the fields:
- `total_loss` (Tensor):
- `prediction_scores` (Tensor):
The scores of masked token prediction. Its data type should be float32.
If `masked_positions` is None, its shape is [batch_size, sequence_length, vocab_size].
Otherwise, its shape is [batch_size, mask_token_num, vocab_size].
- `seq_relationship_score` (Tensor):
The scores of next sentence prediction.
Its data type should be float32 and its shape is [batch_size, 2].
"""
sequence_output, pooled_output = self.nezha(input_ids, token_type_ids,
attention_mask)
prediction_scores, seq_relationship_score = self.cls(sequence_output,
pooled_output)
if masked_lm_labels is not None and next_sentence_label is not None:
loss_fct = nn.CrossEntropyLoss(ignore_index=-1)
masked_lm_loss = loss_fct(
prediction_scores.reshape(
(-1, self.nezha.config["vocab_size"])),
masked_lm_labels.reshape((-1, )))
next_sentence_loss = loss_fct(
seq_relationship_score.reshape((-1, 2)),
next_sentence_label.reshape((-1, )))
total_loss = masked_lm_loss + next_sentence_loss
return total_loss
elif masked_lm_labels is not None:
loss_fct = nn.CrossEntropyLoss(ignore_index=-1)
masked_lm_loss = loss_fct(
prediction_scores.reshape(
(-1, self.nezha.config["vocab_size"])),
masked_lm_labels.reshape((-1, )))
total_loss = masked_lm_loss
return total_loss
else:
return prediction_scores, seq_relationship_score
class NeZhaForQuestionAnswering(NeZhaPretrainedModel):
"""
NeZha with a linear layer on top of the hidden-states output to compute `span_start_logits`
and `span_end_logits`, designed for question-answering tasks like SQuAD.
Args:
nezha (:class:`NeZhaModel`):
An instance of NeZhaModel.
dropout (float, optional):
The dropout probability for output of NeZha.
If None, use the same value as `hidden_dropout_prob` of `NeZhaModel`
instance `nezha`. Defaults to `None`.
"""
def __init__(self, nezha, dropout=None):
super(NeZhaForQuestionAnswering, self).__init__()
self.nezha = nezha
self.classifier = nn.Linear(self.nezha.config["hidden_size"], 2)
self.apply(self.init_weights)
def forward(self, input_ids, token_type_ids=None, attention_mask=None):
r"""
The NeZhaForQuestionAnswering forward method, overrides the __call__() special method.
Args:
input_ids (Tensor):
See :class:`NeZhaModel`.
token_type_ids (Tensor, optional):
See :class:`NeZhaModel`.
attention_mask (Tensor, optional):
See :class:`NeZhaModel`.
Returns:
tuple: Returns tuple (`start_logits`, `end_logits`).
With the fields:
- `start_logits` (Tensor):
A tensor of the input token classification logits, indicates the start position of the labelled span.
Its data type should be float32 and its shape is [batch_size, sequence_length].
- `end_logits` (Tensor):
A tensor of the input token classification logits, indicates the end position of the labelled span.
Its data type should be float32 and its shape is [batch_size, sequence_length].
Example:
.. code-block::
import paddle
from paddlenlp.transformers import NeZhaForQuestionAnswering
from paddlenlp.transformers import NeZhaTokenizer
tokenizer = NeZhaTokenizer.from_pretrained('nezha-base-chinese')
model = NeZhaForQuestionAnswering.from_pretrained('nezha-base-chinese')
inputs = tokenizer("欢迎使用百度飞桨!")
inputs = {k:paddle.to_tensor([v]) for (k, v) in inputs.items()}
outputs = model(**inputs)
start_logits = outputs[0]
end_logits =outputs[1]
"""
sequence_output, _ = self.nezha(input_ids, token_type_ids,
attention_mask)
logits = self.classifier(sequence_output)
logits = paddle.transpose(logits, perm=[2, 0, 1])
start_logits, end_logits = paddle.unstack(x=logits, axis=0)
return start_logits, end_logits
class NeZhaForSequenceClassification(NeZhaPretrainedModel):
"""
NeZha Model with a linear layer on top of the output layer, designed for
sequence classification/regression tasks like GLUE tasks.
Args:
nezha (:class:`NeZhaModel`):
An instance of NeZhaModel.
num_classes (int, optional):
The number of classes. Defaults to `2`.
dropout (float, optional):
The dropout probability for output of NeZha.
If None, use the same value as `hidden_dropout_prob` of `NeZhaModel`
instance `nezha`. Defaults to None.
"""
def __init__(self, nezha, num_classes=2, dropout=None):
super(NeZhaForSequenceClassification, self).__init__()
self.num_classes = num_classes
self.nezha = nezha
self.dropout = nn.Dropout(dropout if dropout is not None else
self.nezha.config["hidden_dropout_prob"])
self.classifier = nn.Linear(self.nezha.config["hidden_size"],
num_classes)
self.apply(self.init_weights)
def forward(self, input_ids, token_type_ids=None, attention_mask=None):
r"""
The NeZhaForSequenceClassification forward method, overrides the __call__() special method.
Args:
input_ids (Tensor):
See :class:`NeZhaModel`.
token_type_ids (Tensor, optional):
See :class:`NeZhaModel`.
attention_mask (Tensor, optional):
See :class:`NeZhaModel`.
Returns:
Tensor: Returns tensor `logits`, a tensor of the input text classification logits.
Shape as `[batch_size, num_classes]` and dtype as float32.
Example:
.. code-block::
import paddle
from paddlenlp.transformers import NeZhaForSequenceClassification
from paddlenlp.transformers import NeZhaTokenizer
tokenizer = NeZhaTokenizer.from_pretrained('nezha-base-chinese')
model = NeZhaForSequenceClassification.from_pretrained('nezha-base-chinese')
inputs = tokenizer("欢迎使用百度飞桨!")
inputs = {k:paddle.to_tensor([v]) for (k, v) in inputs.items()}
outputs = model(**inputs)
logits =outputs[0]
"""
_, pooled_output = self.nezha(input_ids, token_type_ids, attention_mask)
pooled_output = self.dropout(pooled_output)
logits = self.classifier(pooled_output)
return logits
class NeZhaForTokenClassification(NeZhaPretrainedModel):
"""
NeZha Model with a linear layer on top of the hidden-states output layer,
designed for token classification tasks like NER tasks.
Args:
nezha (:class:`NeZhaModel`):
An instance of NeZhaModel.
num_classes (int, optional):
The number of classes. Defaults to `2`.
dropout (float, optional):
The dropout probability for output of NeZha.
If None, use the same value as `hidden_dropout_prob` of `NeZhaModel`
instance `nezha`. Defaults to `None`.
"""
def __init__(self, nezha, num_classes=2, dropout=None):
super(NeZhaForTokenClassification, self).__init__()
self.num_classes = num_classes
self.nezha = nezha
self.dropout = nn.Dropout(dropout if dropout is not None else
self.nezha.config["hidden_dropout_prob"])
self.classifier = nn.Linear(self.nezha.config["hidden_size"],
num_classes)
self.apply(self.init_weights)
def forward(self, input_ids, token_type_ids=None, attention_mask=None):
r"""
The NeZhaForTokenClassification forward method, overrides the __call__() special method.
Args:
input_ids (Tensor):
See :class:`NeZhaModel`.
token_type_ids (Tensor, optional):
See :class:`NeZhaModel`.
attention_mask (list, optional):
See :class:`NeZhaModel`.
Returns:
Tensor: Returns tensor `logits`, a tensor of the input token classification logits.
Shape as `[batch_size, sequence_length, num_classes]` and dtype as `float32`.
Example:
.. code-block::
import paddle
from paddlenlp.transformers import NeZhaForTokenClassification
from paddlenlp.transformers import NeZhaTokenizer
tokenizer = NeZhaTokenizer.from_pretrained('nezha-base-chinese')
model = NeZhaForTokenClassification.from_pretrained('nezha-base-chinese')
inputs = tokenizer("欢迎使用百度飞桨!")
inputs = {k:paddle.to_tensor([v]) for (k, v) in inputs.items()}
outputs = model(**inputs)
logits = outputs[0]
"""
sequence_output, _ = self.nezha(input_ids, token_type_ids,
attention_mask)
sequence_output = self.dropout(sequence_output)
logits = self.classifier(sequence_output)
return logits
class NeZhaForMultipleChoice(NeZhaPretrainedModel):
"""
NeZha Model with a linear layer on top of the hidden-states output layer,
designed for multiple choice tasks like RocStories/SWAG tasks.
Args:
nezha (:class:`NeZhaModel`):
An instance of NeZhaModel.
num_choices (int, optional):
The number of choices. Defaults to `2`.
dropout (float, optional):
The dropout probability for output of NeZha.
If None, use the same value as `hidden_dropout_prob` of `NeZhaModel`
instance `nezha`. Defaults to `None`.
"""
def __init__(self, nezha, num_choices=2, dropout=None):
super(NeZhaForMultipleChoice, self).__init__()
self.num_choices = num_choices
self.nezha = nezha
self.dropout = nn.Dropout(dropout if dropout is not None else
self.nezha.config["hidden_dropout_prob"])
self.classifier = nn.Linear(self.nezha.config["hidden_size"], 1)
self.apply(self.init_weights)
def forward(self, input_ids, token_type_ids=None, attention_mask=None):
r"""
The NeZhaForMultipleChoice forward method, overrides the __call__() special method.
Args:
input_ids (Tensor):
See :class:`NeZhaModel`.
token_type_ids (Tensor, optional):
See :class:`NeZhaModel`.
attention_mask (list, optional):
See :class:`NeZhaModel`.
Returns:
Tensor: Returns tensor `reshaped_logits`, a tensor of the input multiple choice classification logits.
Shape as `[batch_size, num_classes]` and dtype as `float32`.
"""
# input_ids: [bs, num_choice, seq_l]
input_ids = input_ids.reshape(
(-1, input_ids.shape[-1])) # flat_input_ids: [bs*num_choice,seq_l]
if token_type_ids:
token_type_ids = token_type_ids.reshape(
(-1, token_type_ids.shape[-1]))
if attention_mask:
attention_mask = attention_mask.reshape(
(-1, attention_mask.shape[-1]))
_, pooled_output = self.nezha(input_ids, token_type_ids, attention_mask)
pooled_output = self.dropout(pooled_output)
logits = self.classifier(pooled_output) # logits: (bs*num_choice,1)
reshaped_logits = logits.reshape(
(-1, self.num_choices)) # logits: (bs, num_choice)
return reshaped_logits
| # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
# Copyright 2020 Huawei Technologies Co., Ltd.
# Copyright 2018 The Google AI Language Team Authors, The HuggingFace Inc. team.
# Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import math
import numpy as np
import paddle
import paddle.nn as nn
import paddle.nn.functional as F
from paddlenlp.transformers import PretrainedModel, register_base_model
__all__ = [
'NeZhaModel', "NeZhaPretrainedModel", 'NeZhaForPretraining',
'NeZhaForSequenceClassification', 'NeZhaPretrainingHeads',
'NeZhaForTokenClassification', 'NeZhaForQuestionAnswering',
'NeZhaForMultipleChoice'
]
def get_activation(activation_string):
if activation_string in ACT2FN:
return ACT2FN[activation_string]
else:
raise KeyError("function {} not found in ACT2FN mapping {}".format(
activation_string, list(ACT2FN.keys())))
def mish(x):
return x * F.tanh(F.softplus(x))
def linear_act(x):
return x
def swish(x):
return x * F.sigmoid(x)
def gelu_new(x):
"""
Implementation of the GELU activation function currently in Google BERT repo (identical to OpenAI GPT). Also see
the Gaussian Error Linear Units paper: https://arxiv.org/abs/1606.08415
"""
return 0.5 * x * (1.0 + paddle.tanh(
math.sqrt(2.0 / math.pi) * (x + 0.044715 * paddle.pow(x, 3.0))))
ACT2FN = {
"relu": F.relu,
"gelu": F.gelu,
"gelu_new": gelu_new,
"tanh": F.tanh,
"sigmoid": F.sigmoid,
"mish": mish,
"linear": linear_act,
"swish": swish,
}
class NeZhaAttention(nn.Layer):
def __init__(self,
hidden_size=768,
num_attention_heads=12,
hidden_dropout_prob=0.1,
attention_probs_dropout_prob=0.1,
max_relative_position=64,
layer_norm_eps=1e-12):
super(NeZhaAttention, self).__init__()
if hidden_size % num_attention_heads != 0:
raise ValueError(
"The hidden size (%d) is not a multiple of the number of attention "
"heads (%d)" % (hidden_size, num_attention_heads))
self.num_attention_heads = num_attention_heads
self.attention_head_size = int(hidden_size / num_attention_heads)
self.all_head_size = self.num_attention_heads * self.attention_head_size
self.query = nn.Linear(hidden_size, self.all_head_size)
self.key = nn.Linear(hidden_size, self.all_head_size)
self.value = nn.Linear(hidden_size, self.all_head_size)
self.relative_positions_embeddings = self.generate_relative_positions_embeddings(
length=512,
depth=self.attention_head_size,
max_relative_position=max_relative_position)
self.attention_dropout = nn.Dropout(attention_probs_dropout_prob)
self.dense = nn.Linear(hidden_size, hidden_size)
self.layer_norm = nn.LayerNorm(hidden_size, epsilon=layer_norm_eps)
self.output_dropout = nn.Dropout(hidden_dropout_prob)
def generate_relative_positions_embeddings(self,
length,
depth,
max_relative_position=127):
vocab_size = max_relative_position * 2 + 1
range_vec = paddle.arange(length)
range_mat = paddle.tile(
range_vec, repeat_times=[length]).reshape((length, length))
distance_mat = range_mat - paddle.t(range_mat)
distance_mat_clipped = paddle.clip(
distance_mat.astype('float32'), -max_relative_position,
max_relative_position)
final_mat = distance_mat_clipped + max_relative_position
embeddings_table = np.zeros([vocab_size, depth])
for pos in range(vocab_size):
for i in range(depth // 2):
embeddings_table[pos, 2 * i] = np.sin(pos / np.power(10000, 2 *
i / depth))
embeddings_table[pos, 2 * i + 1] = np.cos(pos / np.power(
10000, 2 * i / depth))
embeddings_table_tensor = paddle.to_tensor(
embeddings_table, dtype='float32')
flat_relative_positions_matrix = final_mat.reshape((-1, ))
one_hot_relative_positions_matrix = paddle.nn.functional.one_hot(
flat_relative_positions_matrix.astype('int64'),
num_classes=vocab_size)
embeddings = paddle.matmul(one_hot_relative_positions_matrix,
embeddings_table_tensor)
my_shape = final_mat.shape
my_shape.append(depth)
embeddings = embeddings.reshape(my_shape)
return embeddings
def transpose_for_scores(self, x):
new_x_shape = x.shape[:-1] + [
self.num_attention_heads, self.attention_head_size
]
x = x.reshape(new_x_shape)
return x.transpose((0, 2, 1, 3))
def forward(self, hidden_states, attention_mask):
mixed_query_layer = self.query(hidden_states)
mixed_key_layer = self.key(hidden_states)
mixed_value_layer = self.value(hidden_states)
query_layer = self.transpose_for_scores(mixed_query_layer)
key_layer = self.transpose_for_scores(mixed_key_layer)
value_layer = self.transpose_for_scores(mixed_value_layer)
# Take the dot product between "query" and "key" to get the raw attention scores.
attention_scores = paddle.matmul(query_layer,
key_layer.transpose((0, 1, 3, 2)))
batch_size, num_attention_heads, from_seq_length, to_seq_length = attention_scores.shape
relations_keys = self.relative_positions_embeddings.detach().clone(
)[:to_seq_length, :to_seq_length, :]
query_layer_t = query_layer.transpose((2, 0, 1, 3))
query_layer_r = query_layer_t.reshape(
(from_seq_length, batch_size * num_attention_heads,
self.attention_head_size))
key_position_scores = paddle.matmul(query_layer_r,
relations_keys.transpose((0, 2, 1)))
key_position_scores_r = key_position_scores.reshape(
(from_seq_length, batch_size, num_attention_heads, from_seq_length))
key_position_scores_r_t = key_position_scores_r.transpose((1, 2, 0, 3))
attention_scores = attention_scores + key_position_scores_r_t
attention_scores = attention_scores / math.sqrt(
self.attention_head_size)
attention_scores = attention_scores + attention_mask
# Normalize the attention scores to probabilities.
attention_probs = nn.Softmax(axis=-1)(attention_scores)
# This is actually dropping out entire tokens to attend to, which might
# seem a bit unusual, but is taken from the original Transformer paper.
attention_probs = self.attention_dropout(attention_probs)
context_layer = paddle.matmul(attention_probs, value_layer)
relations_values = self.relative_positions_embeddings.clone(
)[:to_seq_length, :to_seq_length, :]
attention_probs_t = attention_probs.transpose((2, 0, 1, 3))
attentions_probs_r = attention_probs_t.reshape(
(from_seq_length, batch_size * num_attention_heads, to_seq_length))
value_position_scores = paddle.matmul(attentions_probs_r,
relations_values)
value_position_scores_r = value_position_scores.reshape(
(from_seq_length, batch_size, num_attention_heads,
self.attention_head_size))
value_position_scores_r_t = value_position_scores_r.transpose(
(1, 2, 0, 3))
context_layer = context_layer + value_position_scores_r_t
context_layer = context_layer.transpose((0, 2, 1, 3))
new_context_layer_shape = context_layer.shape[:-2] + [
self.all_head_size
]
context_layer = context_layer.reshape(new_context_layer_shape)
projected_context_layer = self.dense(context_layer)
projected_context_layer_dropout = self.output_dropout(
projected_context_layer)
layer_normed_context_layer = self.layer_norm(
hidden_states + projected_context_layer_dropout)
return layer_normed_context_layer, attention_scores
class NeZhaLayer(nn.Layer):
def __init__(self,
hidden_size=768,
num_attention_heads=12,
intermediate_size=3072,
hidden_act="gelu",
hidden_dropout_prob=0.1,
attention_probs_dropout_prob=0.1,
max_relative_position=64,
layer_norm_eps=1e-12):
super(NeZhaLayer, self).__init__()
self.seq_len_dim = 1
self.layer_norm = nn.LayerNorm(hidden_size, epsilon=layer_norm_eps)
self.attention = NeZhaAttention(
hidden_size=hidden_size,
num_attention_heads=num_attention_heads,
hidden_dropout_prob=hidden_dropout_prob,
attention_probs_dropout_prob=attention_probs_dropout_prob,
max_relative_position=max_relative_position,
layer_norm_eps=layer_norm_eps)
self.ffn = nn.Linear(hidden_size, intermediate_size)
self.ffn_output = nn.Linear(intermediate_size, hidden_size)
self.activation = ACT2FN[hidden_act]
self.dropout = nn.Dropout(hidden_dropout_prob)
def forward(self, hidden_states, attention_mask=None):
attention_output, layer_att = self.attention(hidden_states,
attention_mask)
ffn_output = self.ffn(attention_output)
ffn_output = self.activation(ffn_output)
ffn_output = self.ffn_output(ffn_output)
ffn_output_dropout = self.dropout(ffn_output)
hidden_states = self.layer_norm(ffn_output_dropout + attention_output)
return hidden_states, layer_att
class NeZhaEncoder(nn.Layer):
def __init__(self,
hidden_size=768,
num_hidden_layers=12,
num_attention_heads=12,
intermediate_size=3072,
hidden_act="gelu",
hidden_dropout_prob=0.1,
attention_probs_dropout_prob=0.1,
max_relative_position=64,
layer_norm_eps='1e-12'):
super(NeZhaEncoder, self).__init__()
layer = NeZhaLayer(
hidden_size=hidden_size,
num_attention_heads=num_attention_heads,
intermediate_size=intermediate_size,
hidden_act=hidden_act,
hidden_dropout_prob=hidden_dropout_prob,
attention_probs_dropout_prob=attention_probs_dropout_prob,
max_relative_position=max_relative_position,
layer_norm_eps=layer_norm_eps)
self.layer = nn.LayerList(
[copy.deepcopy(layer) for _ in range(num_hidden_layers)])
def forward(self, hidden_states, attention_mask):
all_encoder_layers = []
all_encoder_att = []
for i, layer_module in enumerate(self.layer):
all_encoder_layers.append(hidden_states)
hidden_states, layer_att = layer_module(all_encoder_layers[i],
attention_mask)
all_encoder_att.append(layer_att)
all_encoder_layers.append(hidden_states)
return all_encoder_layers, all_encoder_att
class NeZhaEmbeddings(nn.Layer):
def __init__(self,
vocab_size,
hidden_size=768,
hidden_dropout_prob=0.1,
max_position_embeddings=512,
type_vocab_size=16,
use_relative_position=True):
super(NeZhaEmbeddings, self).__init__()
self.use_relative_position = use_relative_position
self.word_embeddings = nn.Embedding(vocab_size, hidden_size)
if not use_relative_position:
self.position_embeddings = nn.Embedding(max_position_embeddings,
hidden_size)
self.token_type_embeddings = nn.Embedding(type_vocab_size, hidden_size)
self.layer_norm = nn.LayerNorm(hidden_size)
self.dropout = nn.Dropout(hidden_dropout_prob)
def forward(self, input_ids, token_type_ids=None):
seq_length = input_ids.shape[1]
position_ids = paddle.arange(seq_length, dtype='int64')
position_ids = position_ids.unsqueeze(0).expand_as(input_ids)
if token_type_ids is None:
token_type_ids = paddle.zeros_like(input_ids, dtype="int64")
words_embeddings = self.word_embeddings(input_ids)
embeddings = words_embeddings
if not self.use_relative_position:
position_embeddings = self.position_embeddings(position_ids)
embeddings += position_embeddings
token_type_embeddings = self.token_type_embeddings(token_type_ids)
embeddings += token_type_embeddings
embeddings = self.layer_norm(embeddings)
embeddings = self.dropout(embeddings)
return embeddings
class NeZhaPooler(nn.Layer):
def __init__(self, hidden_size):
super(NeZhaPooler, self).__init__()
self.dense = nn.Linear(hidden_size, hidden_size)
self.activation = nn.Tanh()
def forward(self, hidden_states):
# We "pool" the model by simply taking the hidden state corresponding
# to the first token.
first_token_tensor = hidden_states[:, 0]
pooled_output = self.dense(first_token_tensor)
pooled_output = self.activation(pooled_output)
return pooled_output
class NeZhaPretrainedModel(PretrainedModel):
"""
An abstract class for pretrained NeZha models. It provides NeZha related
`model_config_file`, `pretrained_init_configuration`, `resource_files_names`,
`pretrained_resource_files_map`, `base_model_prefix` for downloading and
loading pretrained models.
See :class:`~paddlenlp.transformers.model_utils.PretrainedModel` for more details.
"""
model_config_file = "model_config.json"
pretrained_init_configuration = {
"nezha-base-chinese": {
"vocab_size": 21128,
"hidden_size": 768,
"num_hidden_layers": 12,
"num_attention_heads": 12,
"intermediate_size": 3072,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"attention_probs_dropout_prob": 0.1,
"max_position_embeddings": 512,
"max_relative_position": 64,
"type_vocab_size": 2,
"initializer_range": 0.02,
"use_relative_position": True
},
"nezha-large-chinese": {
"vocab_size": 21128,
"hidden_size": 1024,
"num_hidden_layers": 24,
"num_attention_heads": 16,
"intermediate_size": 4096,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"attention_probs_dropout_prob": 0.1,
"max_position_embeddings": 512,
"max_relative_position": 64,
"type_vocab_size": 2,
"initializer_range": 0.02,
"use_relative_position": True
},
"nezha-base-wwm-chinese": {
"vocab_size": 21128,
"hidden_size": 768,
"num_hidden_layers": 12,
"num_attention_heads": 12,
"intermediate_size": 3072,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"attention_probs_dropout_prob": 0.1,
"max_position_embeddings": 512,
"max_relative_position": 64,
"type_vocab_size": 2,
"initializer_range": 0.02,
"use_relative_position": True
},
"nezha-large-wwm-chinese": {
"vocab_size": 21128,
"hidden_size": 1024,
"num_hidden_layers": 24,
"num_attention_heads": 16,
"intermediate_size": 4096,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"attention_probs_dropout_prob": 0.1,
"max_position_embeddings": 512,
"max_relative_position": 64,
"type_vocab_size": 2,
"initializer_range": 0.02,
"use_relative_position": True
},
}
resource_files_names = {"model_state": "model_state.pdparams"}
pretrained_resource_files_map = {
"model_state": {
"nezha-base-chinese":
"https://paddlenlp.bj.bcebos.com/models/transformers/nezha/nezha-base-chinese.pdparams",
"nezha-large-chinese":
"https://paddlenlp.bj.bcebos.com/models/transformers/nezha/nezha-large-chinese.pdparams",
"nezha-base-wwm-chinese":
"https://paddlenlp.bj.bcebos.com/models/transformers/nezha/nezha-base-wwm-chinese.pdparams",
"nezha-large-wwm-chinese":
"https://paddlenlp.bj.bcebos.com/models/transformers/nezha/nezha-large-wwm-chinese.pdparams",
}
}
base_model_prefix = "nezha"
def init_weights(self, layer):
""" Initialization hook """
if isinstance(layer, (nn.Linear, nn.Embedding)):
# In the dygraph mode, use the `set_value` to reset the parameter directly,
# and reset the `state_dict` to update parameter in static mode.
if isinstance(layer.weight, paddle.Tensor):
layer.weight.set_value(
paddle.tensor.normal(
mean=0.0,
std=self.initializer_range
if hasattr(self, "initializer_range") else
self.nezha.config["initializer_range"],
shape=layer.weight.shape))
elif isinstance(layer, nn.LayerNorm):
layer._epsilon = 1e-12
@register_base_model
class NeZhaModel(NeZhaPretrainedModel):
"""
The bare NeZha Model transformer outputting raw hidden-states.
This model inherits from :class:`~paddlenlp.transformers.model_utils.PretrainedModel`.
Refer to the superclass documentation for the generic methods.
This model is also a Paddle `paddle.nn.Layer <https://www.paddlepaddle.org.cn/documentation
/docs/en/api/paddle/fluid/dygraph/layers/Layer_en.html>`__ subclass. Use it as a regular Paddle Layer
and refer to the Paddle documentation for all matter related to general usage and behavior.
Args:
vocab_size (int):
Vocabulary size of `inputs_ids` in `DistilBertModel`. Defines the number of different tokens that can
be represented by the `inputs_ids` passed when calling `DistilBertModel`.
hidden_size (int, optional):
Dimensionality of the embedding layer, encoder layers and the pooler layer. Defaults to `768`.
num_hidden_layers (int, optional):
Number of hidden layers in the Transformer encoder. Defaults to `12`.
num_attention_heads (int, optional):
Number of attention heads for each attention layer in the Transformer encoder.
Defaults to `12`.
intermediate_size (int, optional):
Dimensionality of the feed-forward (ff) layer in the encoder. Input tensors
to ff layers are firstly projected from `hidden_size` to `intermediate_size`,
and then projected back to `hidden_size`. Typically `intermediate_size` is larger than `hidden_size`.
Defaults to `3072`.
hidden_act (str, optional):
The non-linear activation function in the feed-forward layer.
``"gelu"``, ``"relu"`` and any other paddle supported activation functions
are supported. Defaults to `"gelu"`.
hidden_dropout_prob (float, optional):
The dropout probability for all fully connected layers in the embeddings and encoder.
Defaults to `0.1`.
attention_probs_dropout_prob (float, optional):
The dropout probability used in MultiHeadAttention in all encoder layers to drop some attention target.
Defaults to `0.1`.
max_position_embeddings (int, optional):
The maximum value of the dimensionality of position encoding, which dictates the maximum supported length of an input
sequence. Defaults to `512`.
type_vocab_size (int, optional):
The vocabulary size of `token_type_ids`.
Defaults to `16`.
initializer_range (float, optional):
The standard deviation of the normal initializer.
Defaults to `0.02`.
.. note::
A normal_initializer initializes weight matrices as normal distributions.
See :meth:`NeZhaPretrainedModel.init_weights()` for how weights are initialized in `NeZhaModel`.
max_relative_embeddings (int, optional):
The maximum value of the dimensionality of relative encoding, which dictates the maximum supported
relative distance of two sentences.
Defaults to `64`.
layer_norm_eps (float, optional):
The small value added to the variance in `LayerNorm` to prevent division by zero.
Defaults to `1e-12`.
use_relative_position (bool, optional):
Whether or not to use relative position embedding. Defaults to `True`.
"""
def __init__(self,
vocab_size,
hidden_size=768,
num_hidden_layers=12,
num_attention_heads=12,
intermediate_size=3072,
hidden_act="gelu",
hidden_dropout_prob=0.1,
attention_probs_dropout_prob=0.1,
max_position_embeddings=512,
type_vocab_size=2,
initializer_range=0.02,
max_relative_position=64,
layer_norm_eps=1e-12,
use_relative_position=True):
super(NeZhaModel, self).__init__()
self.initializer_range = initializer_range
self.embeddings = NeZhaEmbeddings(
vocab_size=vocab_size,
hidden_size=hidden_size,
hidden_dropout_prob=hidden_dropout_prob,
max_position_embeddings=max_position_embeddings,
type_vocab_size=type_vocab_size,
use_relative_position=use_relative_position)
self.encoder = NeZhaEncoder(
hidden_size=hidden_size,
num_hidden_layers=num_hidden_layers,
num_attention_heads=num_attention_heads,
intermediate_size=intermediate_size,
hidden_act=hidden_act,
hidden_dropout_prob=hidden_dropout_prob,
attention_probs_dropout_prob=attention_probs_dropout_prob,
max_relative_position=max_relative_position,
layer_norm_eps=layer_norm_eps)
self.pooler = NeZhaPooler(hidden_size)
self.apply(self.init_weights)
def forward(self, input_ids, token_type_ids=None, attention_mask=None):
r'''
The NeZhaModel forward method, overrides the `__call__()` special method.
Args:
input_ids (Tensor):
Indices of input sequence tokens in the vocabulary. They are
numerical representations of tokens that build the input sequence.
Its data type should be `int64` and it has a shape of [batch_size, sequence_length].
token_type_ids (Tensor, optional):
Segment token indices to indicate different portions of the inputs.
Selected in the range ``[0, type_vocab_size - 1]``.
If `type_vocab_size` is 2, which means the inputs have two portions.
Indices can either be 0 or 1:
- 0 corresponds to a *sentence A* token,
- 1 corresponds to a *sentence B* token.
Its data type should be `int64` and it has a shape of [batch_size, sequence_length].
Defaults to `None`, which means we don't add segment embeddings.
attention_mask (Tensor, optional):
Mask used in multi-head attention to avoid performing attention to some unwanted positions,
usually the paddings or the subsequent positions.
Its data type can be int, float and bool.
When the data type is bool, the `masked` tokens have `False` values and the others have `True` values.
When the data type is int, the `masked` tokens have `0` values and the others have `1` values.
When the data type is float, the `masked` tokens have `-INF` values and the others have `0` values.
It is a tensor with shape broadcasted to `[batch_size, num_attention_heads, sequence_length, sequence_length]`.
For example, its shape can be [batch_size, sequence_length], [batch_size, sequence_length, sequence_length],
[batch_size, num_attention_heads, sequence_length, sequence_length].
We use whole-word-mask in NeZha, so the whole word will have the same value. For example, "使用" as a word,
"使" and "用" will have the same value.
Defaults to `None`, which means nothing needed to be prevented attention to.
Returns:
tuple: Returns tuple (`sequence_output`, `pooled_output`).
With the fields:
- `sequence_output` (Tensor):
Sequence of hidden-states at the last layer of the model.
It's data type should be float32 and its shape is [batch_size, sequence_length, hidden_size].
- `pooled_output` (Tensor):
The output of first token (`[CLS]`) in sequence.
We "pool" the model by simply taking the hidden state corresponding to the first token.
Its data type should be float32 and its shape is [batch_size, hidden_size].
Example:
.. code-block::
import paddle
from paddlenlp.transformers import NeZhaModel, NeZhaTokenizer
tokenizer = NeZhaTokenizer.from_pretrained('nezha-base-chinese')
model = NeZhaModel.from_pretrained('nezha-base-chinese')
inputs = tokenizer("欢迎使用百度飞浆!")
inputs = {k:paddle.to_tensor([v]) for (k, v) in inputs.items()}
output = model(**inputs)
'''
if attention_mask is None:
attention_mask = paddle.ones_like(input_ids)
if token_type_ids is None:
token_type_ids = paddle.zeros_like(input_ids)
extended_attention_mask = attention_mask.unsqueeze(1).unsqueeze(2)
extended_attention_mask = (1.0 - extended_attention_mask) * -10000.0
embedding_output = self.embeddings(input_ids, token_type_ids)
encoder_outputs, _ = self.encoder(embedding_output,
extended_attention_mask)
sequence_output = encoder_outputs[-1]
pooled_output = self.pooler(sequence_output)
return sequence_output, pooled_output
class NeZhaLMPredictionHead(nn.Layer):
def __init__(self,
hidden_size,
vocab_size,
hidden_act,
embedding_weights=None,
layer_norm_eps=1e-12):
super(NeZhaLMPredictionHead, self).__init__()
self.dense = nn.Linear(hidden_size, hidden_size)
self.activation = ACT2FN[hidden_act]
self.layer_norm = nn.LayerNorm(hidden_size, epsilon=layer_norm_eps)
self.decoder_weight = embedding_weights
self.decoder_bias = self.create_parameter(
shape=[vocab_size], dtype=self.decoder_weight.dtype, is_bias=True)
def forward(self, hidden_states):
hidden_states = self.dense(hidden_states)
hidden_states = self.activation(hidden_states)
hidden_states = self.layer_norm(hidden_states)
hidden_states = paddle.tensor.matmul(
hidden_states, self.decoder_weight,
transpose_y=True) + self.decoder_bias
return hidden_states
class NeZhaPretrainingHeads(nn.Layer):
"""
Perform language modeling task and next sentence classification task.
Args:
hidden_size (int):
See :class:`NeZhaModel`.
vocab_size (int):
See :class:`NeZhaModel`.
hidden_act (str):
Activation function used in the language modeling task.
embedding_weights (Tensor, optional):
Decoding weights used to map hidden_states to logits of the masked token prediction.
Its data type should be float32 and its shape is [vocab_size, hidden_size].
Defaults to `None`, which means use the same weights of the embedding layer.
"""
def __init__(self,
hidden_size,
vocab_size,
hidden_act,
embedding_weights=None):
super(NeZhaPretrainingHeads, self).__init__()
self.predictions = NeZhaLMPredictionHead(hidden_size, vocab_size,
hidden_act, embedding_weights)
self.seq_relationship = nn.Linear(hidden_size, 2)
def forward(self, sequence_output, pooled_output):
"""
Args:
sequence_output(Tensor):
Sequence of hidden-states at the last layer of the model.
It's data type should be float32 and its shape is [batch_size, sequence_length, hidden_size].
pooled_output(Tensor):
The output of first token (`[CLS]`) in sequence.
We "pool" the model by simply taking the hidden state corresponding to the first token.
Its data type should be float32 and its shape is [batch_size, hidden_size].
Returns:
tuple: Returns tuple (``prediction_scores``, ``seq_relationship_score``).
With the fields:
- `prediction_scores` (Tensor):
The scores of masked token prediction. Its data type should be float32.
If `masked_positions` is None, its shape is [batch_size, sequence_length, vocab_size].
Otherwise, its shape is [batch_size, mask_token_num, vocab_size].
- `seq_relationship_score` (Tensor):
The scores of next sentence prediction.
Its data type should be float32 and its shape is [batch_size, 2].
"""
prediction_scores = self.predictions(sequence_output)
seq_relationship_score = self.seq_relationship(pooled_output)
return prediction_scores, seq_relationship_score
class NeZhaForPretraining(NeZhaPretrainedModel):
"""
NeZha Model with pretraining tasks on top.
Args:
nezha (:class:`NeZhaModel`):
An instance of :class:`NeZhaModel`.
"""
def __init__(self, nezha):
super(NeZhaForPretraining, self).__init__()
self.nezha = nezha
self.cls = NeZhaPretrainingHeads(
self.nezha.config["hidden_size"], self.nezha.config["vocab_size"],
self.nezha.config["hidden_act"],
self.nezha.embeddings.word_embeddings.weight)
self.apply(self.init_weights)
def forward(self,
input_ids,
token_type_ids=None,
attention_mask=None,
masked_lm_labels=None,
next_sentence_label=None):
r"""
Args:
input_ids (Tensor):
See :class:`NeZhaModel`.
token_type_ids (Tensor, optional):
See :class:`NeZhaModel`.
attention_mask (Tensor, optional):
See :class:`NeZhaModel`.
masked_lm_labels (Tensor, optional):
The labels of the masked language modeling, its dimensionality is equal to `prediction_scores`.
Its data type should be int64 and its shape is [batch_size, sequence_length, 1].
next_sentence_label (Tensor, optional):
The labels of the next sentence prediction task, the dimensionality of `next_sentence_labels`
is equal to `seq_relation_labels`. Its data type should be int64 and its shape is [batch_size, 1].
Returns:
Tensor or tuple: Returns Tensor ``total_loss`` if `masked_lm_labels` is not None.
Returns tuple (``prediction_scores``, ``seq_relationship_score``) if `masked_lm_labels` is None.
With the fields:
- `total_loss` (Tensor):
- `prediction_scores` (Tensor):
The scores of masked token prediction. Its data type should be float32.
If `masked_positions` is None, its shape is [batch_size, sequence_length, vocab_size].
Otherwise, its shape is [batch_size, mask_token_num, vocab_size].
- `seq_relationship_score` (Tensor):
The scores of next sentence prediction.
Its data type should be float32 and its shape is [batch_size, 2].
"""
sequence_output, pooled_output = self.nezha(input_ids, token_type_ids,
attention_mask)
prediction_scores, seq_relationship_score = self.cls(sequence_output,
pooled_output)
if masked_lm_labels is not None and next_sentence_label is not None:
loss_fct = nn.CrossEntropyLoss(ignore_index=-1)
masked_lm_loss = loss_fct(
prediction_scores.reshape(
(-1, self.nezha.config["vocab_size"])),
masked_lm_labels.reshape((-1, )))
next_sentence_loss = loss_fct(
seq_relationship_score.reshape((-1, 2)),
next_sentence_label.reshape((-1, )))
total_loss = masked_lm_loss + next_sentence_loss
return total_loss
elif masked_lm_labels is not None:
loss_fct = nn.CrossEntropyLoss(ignore_index=-1)
masked_lm_loss = loss_fct(
prediction_scores.reshape(
(-1, self.nezha.config["vocab_size"])),
masked_lm_labels.reshape((-1, )))
total_loss = masked_lm_loss
return total_loss
else:
return prediction_scores, seq_relationship_score
class NeZhaForQuestionAnswering(NeZhaPretrainedModel):
"""
NeZha with a linear layer on top of the hidden-states output to compute `span_start_logits`
and `span_end_logits`, designed for question-answering tasks like SQuAD.
Args:
nezha (:class:`NeZhaModel`):
An instance of NeZhaModel.
dropout (float, optional):
The dropout probability for output of NeZha.
If None, use the same value as `hidden_dropout_prob` of `NeZhaModel`
instance `nezha`. Defaults to `None`.
"""
def __init__(self, nezha, dropout=None):
super(NeZhaForQuestionAnswering, self).__init__()
self.nezha = nezha
self.classifier = nn.Linear(self.nezha.config["hidden_size"], 2)
self.apply(self.init_weights)
def forward(self, input_ids, token_type_ids=None, attention_mask=None):
r"""
The NeZhaForQuestionAnswering forward method, overrides the __call__() special method.
Args:
input_ids (Tensor):
See :class:`NeZhaModel`.
token_type_ids (Tensor, optional):
See :class:`NeZhaModel`.
attention_mask (Tensor, optional):
See :class:`NeZhaModel`.
Returns:
tuple: Returns tuple (`start_logits`, `end_logits`).
With the fields:
- `start_logits` (Tensor):
A tensor of the input token classification logits, indicates the start position of the labelled span.
Its data type should be float32 and its shape is [batch_size, sequence_length].
- `end_logits` (Tensor):
A tensor of the input token classification logits, indicates the end position of the labelled span.
Its data type should be float32 and its shape is [batch_size, sequence_length].
Example:
.. code-block::
import paddle
from paddlenlp.transformers import NeZhaForQuestionAnswering
from paddlenlp.transformers import NeZhaTokenizer
tokenizer = NeZhaTokenizer.from_pretrained('nezha-base-chinese')
model = NeZhaForQuestionAnswering.from_pretrained('nezha-base-chinese')
inputs = tokenizer("欢迎使用百度飞桨!")
inputs = {k:paddle.to_tensor([v]) for (k, v) in inputs.items()}
outputs = model(**inputs)
start_logits = outputs[0]
end_logits =outputs[1]
"""
sequence_output, _ = self.nezha(input_ids, token_type_ids,
attention_mask)
logits = self.classifier(sequence_output)
logits = paddle.transpose(logits, perm=[2, 0, 1])
start_logits, end_logits = paddle.unstack(x=logits, axis=0)
return start_logits, end_logits
class NeZhaForSequenceClassification(NeZhaPretrainedModel):
"""
NeZha Model with a linear layer on top of the output layer, designed for
sequence classification/regression tasks like GLUE tasks.
Args:
nezha (:class:`NeZhaModel`):
An instance of NeZhaModel.
num_classes (int, optional):
The number of classes. Defaults to `2`.
dropout (float, optional):
The dropout probability for output of NeZha.
If None, use the same value as `hidden_dropout_prob` of `NeZhaModel`
instance `nezha`. Defaults to None.
"""
def __init__(self, nezha, num_classes=2, dropout=None):
super(NeZhaForSequenceClassification, self).__init__()
self.num_classes = num_classes
self.nezha = nezha
self.dropout = nn.Dropout(dropout if dropout is not None else
self.nezha.config["hidden_dropout_prob"])
self.classifier = nn.Linear(self.nezha.config["hidden_size"],
num_classes)
self.apply(self.init_weights)
def forward(self, input_ids, token_type_ids=None, attention_mask=None):
r"""
The NeZhaForSequenceClassification forward method, overrides the __call__() special method.
Args:
input_ids (Tensor):
See :class:`NeZhaModel`.
token_type_ids (Tensor, optional):
See :class:`NeZhaModel`.
attention_mask (Tensor, optional):
See :class:`NeZhaModel`.
Returns:
Tensor: Returns tensor `logits`, a tensor of the input text classification logits.
Shape as `[batch_size, num_classes]` and dtype as float32.
Example:
.. code-block::
import paddle
from paddlenlp.transformers import NeZhaForSequenceClassification
from paddlenlp.transformers import NeZhaTokenizer
tokenizer = NeZhaTokenizer.from_pretrained('nezha-base-chinese')
model = NeZhaForSequenceClassification.from_pretrained('nezha-base-chinese')
inputs = tokenizer("欢迎使用百度飞桨!")
inputs = {k:paddle.to_tensor([v]) for (k, v) in inputs.items()}
outputs = model(**inputs)
logits =outputs[0]
"""
_, pooled_output = self.nezha(input_ids, token_type_ids, attention_mask)
pooled_output = self.dropout(pooled_output)
logits = self.classifier(pooled_output)
return logits
class NeZhaForTokenClassification(NeZhaPretrainedModel):
"""
NeZha Model with a linear layer on top of the hidden-states output layer,
designed for token classification tasks like NER tasks.
Args:
nezha (:class:`NeZhaModel`):
An instance of NeZhaModel.
num_classes (int, optional):
The number of classes. Defaults to `2`.
dropout (float, optional):
The dropout probability for output of NeZha.
If None, use the same value as `hidden_dropout_prob` of `NeZhaModel`
instance `nezha`. Defaults to `None`.
"""
def __init__(self, nezha, num_classes=2, dropout=None):
super(NeZhaForTokenClassification, self).__init__()
self.num_classes = num_classes
self.nezha = nezha
self.dropout = nn.Dropout(dropout if dropout is not None else
self.nezha.config["hidden_dropout_prob"])
self.classifier = nn.Linear(self.nezha.config["hidden_size"],
num_classes)
self.apply(self.init_weights)
def forward(self, input_ids, token_type_ids=None, attention_mask=None):
r"""
The NeZhaForTokenClassification forward method, overrides the __call__() special method.
Args:
input_ids (Tensor):
See :class:`NeZhaModel`.
token_type_ids (Tensor, optional):
See :class:`NeZhaModel`.
attention_mask (list, optional):
See :class:`NeZhaModel`.
Returns:
Tensor: Returns tensor `logits`, a tensor of the input token classification logits.
Shape as `[batch_size, sequence_length, num_classes]` and dtype as `float32`.
Example:
.. code-block::
import paddle
from paddlenlp.transformers import NeZhaForTokenClassification
from paddlenlp.transformers import NeZhaTokenizer
tokenizer = NeZhaTokenizer.from_pretrained('nezha-base-chinese')
model = NeZhaForTokenClassification.from_pretrained('nezha-base-chinese')
inputs = tokenizer("欢迎使用百度飞桨!")
inputs = {k:paddle.to_tensor([v]) for (k, v) in inputs.items()}
outputs = model(**inputs)
logits = outputs[0]
"""
sequence_output, _ = self.nezha(input_ids, token_type_ids,
attention_mask)
sequence_output = self.dropout(sequence_output)
logits = self.classifier(sequence_output)
return logits
class NeZhaForMultipleChoice(NeZhaPretrainedModel):
"""
NeZha Model with a linear layer on top of the hidden-states output layer,
designed for multiple choice tasks like RocStories/SWAG tasks.
Args:
nezha (:class:`NeZhaModel`):
An instance of NeZhaModel.
num_choices (int, optional):
The number of choices. Defaults to `2`.
dropout (float, optional):
The dropout probability for output of NeZha.
If None, use the same value as `hidden_dropout_prob` of `NeZhaModel`
instance `nezha`. Defaults to `None`.
"""
def __init__(self, nezha, num_choices=2, dropout=None):
super(NeZhaForMultipleChoice, self).__init__()
self.num_choices = num_choices
self.nezha = nezha
self.dropout = nn.Dropout(dropout if dropout is not None else
self.nezha.config["hidden_dropout_prob"])
self.classifier = nn.Linear(self.nezha.config["hidden_size"], 1)
self.apply(self.init_weights)
def forward(self, input_ids, token_type_ids=None, attention_mask=None):
r"""
The NeZhaForMultipleChoice forward method, overrides the __call__() special method.
Args:
input_ids (Tensor):
See :class:`NeZhaModel`.
token_type_ids (Tensor, optional):
See :class:`NeZhaModel`.
attention_mask (list, optional):
See :class:`NeZhaModel`.
Returns:
Tensor: Returns tensor `reshaped_logits`, a tensor of the input multiple choice classification logits.
Shape as `[batch_size, num_classes]` and dtype as `float32`.
"""
# input_ids: [bs, num_choice, seq_l]
input_ids = input_ids.reshape(
(-1, input_ids.shape[-1])) # flat_input_ids: [bs*num_choice,seq_l]
if token_type_ids:
token_type_ids = token_type_ids.reshape(
(-1, token_type_ids.shape[-1]))
if attention_mask:
attention_mask = attention_mask.reshape(
(-1, attention_mask.shape[-1]))
_, pooled_output = self.nezha(input_ids, token_type_ids, attention_mask)
pooled_output = self.dropout(pooled_output)
logits = self.classifier(pooled_output) # logits: (bs*num_choice,1)
reshaped_logits = logits.reshape(
(-1, self.num_choices)) # logits: (bs, num_choice)
return reshaped_logits
| en | 0.674484 | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved. # Copyright 2020 Huawei Technologies Co., Ltd. # Copyright 2018 The Google AI Language Team Authors, The HuggingFace Inc. team. # Copyright (c) 2018, NVIDIA CORPORATION. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. Implementation of the GELU activation function currently in Google BERT repo (identical to OpenAI GPT). Also see
the Gaussian Error Linear Units paper: https://arxiv.org/abs/1606.08415 # Take the dot product between "query" and "key" to get the raw attention scores. # Normalize the attention scores to probabilities. # This is actually dropping out entire tokens to attend to, which might # seem a bit unusual, but is taken from the original Transformer paper. # We "pool" the model by simply taking the hidden state corresponding # to the first token. An abstract class for pretrained NeZha models. It provides NeZha related
`model_config_file`, `pretrained_init_configuration`, `resource_files_names`,
`pretrained_resource_files_map`, `base_model_prefix` for downloading and
loading pretrained models.
See :class:`~paddlenlp.transformers.model_utils.PretrainedModel` for more details. Initialization hook # In the dygraph mode, use the `set_value` to reset the parameter directly, # and reset the `state_dict` to update parameter in static mode. The bare NeZha Model transformer outputting raw hidden-states.
This model inherits from :class:`~paddlenlp.transformers.model_utils.PretrainedModel`.
Refer to the superclass documentation for the generic methods.
This model is also a Paddle `paddle.nn.Layer <https://www.paddlepaddle.org.cn/documentation
/docs/en/api/paddle/fluid/dygraph/layers/Layer_en.html>`__ subclass. Use it as a regular Paddle Layer
and refer to the Paddle documentation for all matter related to general usage and behavior.
Args:
vocab_size (int):
Vocabulary size of `inputs_ids` in `DistilBertModel`. Defines the number of different tokens that can
be represented by the `inputs_ids` passed when calling `DistilBertModel`.
hidden_size (int, optional):
Dimensionality of the embedding layer, encoder layers and the pooler layer. Defaults to `768`.
num_hidden_layers (int, optional):
Number of hidden layers in the Transformer encoder. Defaults to `12`.
num_attention_heads (int, optional):
Number of attention heads for each attention layer in the Transformer encoder.
Defaults to `12`.
intermediate_size (int, optional):
Dimensionality of the feed-forward (ff) layer in the encoder. Input tensors
to ff layers are firstly projected from `hidden_size` to `intermediate_size`,
and then projected back to `hidden_size`. Typically `intermediate_size` is larger than `hidden_size`.
Defaults to `3072`.
hidden_act (str, optional):
The non-linear activation function in the feed-forward layer.
``"gelu"``, ``"relu"`` and any other paddle supported activation functions
are supported. Defaults to `"gelu"`.
hidden_dropout_prob (float, optional):
The dropout probability for all fully connected layers in the embeddings and encoder.
Defaults to `0.1`.
attention_probs_dropout_prob (float, optional):
The dropout probability used in MultiHeadAttention in all encoder layers to drop some attention target.
Defaults to `0.1`.
max_position_embeddings (int, optional):
The maximum value of the dimensionality of position encoding, which dictates the maximum supported length of an input
sequence. Defaults to `512`.
type_vocab_size (int, optional):
The vocabulary size of `token_type_ids`.
Defaults to `16`.
initializer_range (float, optional):
The standard deviation of the normal initializer.
Defaults to `0.02`.
.. note::
A normal_initializer initializes weight matrices as normal distributions.
See :meth:`NeZhaPretrainedModel.init_weights()` for how weights are initialized in `NeZhaModel`.
max_relative_embeddings (int, optional):
The maximum value of the dimensionality of relative encoding, which dictates the maximum supported
relative distance of two sentences.
Defaults to `64`.
layer_norm_eps (float, optional):
The small value added to the variance in `LayerNorm` to prevent division by zero.
Defaults to `1e-12`.
use_relative_position (bool, optional):
Whether or not to use relative position embedding. Defaults to `True`. The NeZhaModel forward method, overrides the `__call__()` special method.
Args:
input_ids (Tensor):
Indices of input sequence tokens in the vocabulary. They are
numerical representations of tokens that build the input sequence.
Its data type should be `int64` and it has a shape of [batch_size, sequence_length].
token_type_ids (Tensor, optional):
Segment token indices to indicate different portions of the inputs.
Selected in the range ``[0, type_vocab_size - 1]``.
If `type_vocab_size` is 2, which means the inputs have two portions.
Indices can either be 0 or 1:
- 0 corresponds to a *sentence A* token,
- 1 corresponds to a *sentence B* token.
Its data type should be `int64` and it has a shape of [batch_size, sequence_length].
Defaults to `None`, which means we don't add segment embeddings.
attention_mask (Tensor, optional):
Mask used in multi-head attention to avoid performing attention to some unwanted positions,
usually the paddings or the subsequent positions.
Its data type can be int, float and bool.
When the data type is bool, the `masked` tokens have `False` values and the others have `True` values.
When the data type is int, the `masked` tokens have `0` values and the others have `1` values.
When the data type is float, the `masked` tokens have `-INF` values and the others have `0` values.
It is a tensor with shape broadcasted to `[batch_size, num_attention_heads, sequence_length, sequence_length]`.
For example, its shape can be [batch_size, sequence_length], [batch_size, sequence_length, sequence_length],
[batch_size, num_attention_heads, sequence_length, sequence_length].
We use whole-word-mask in NeZha, so the whole word will have the same value. For example, "使用" as a word,
"使" and "用" will have the same value.
Defaults to `None`, which means nothing needed to be prevented attention to.
Returns:
tuple: Returns tuple (`sequence_output`, `pooled_output`).
With the fields:
- `sequence_output` (Tensor):
Sequence of hidden-states at the last layer of the model.
It's data type should be float32 and its shape is [batch_size, sequence_length, hidden_size].
- `pooled_output` (Tensor):
The output of first token (`[CLS]`) in sequence.
We "pool" the model by simply taking the hidden state corresponding to the first token.
Its data type should be float32 and its shape is [batch_size, hidden_size].
Example:
.. code-block::
import paddle
from paddlenlp.transformers import NeZhaModel, NeZhaTokenizer
tokenizer = NeZhaTokenizer.from_pretrained('nezha-base-chinese')
model = NeZhaModel.from_pretrained('nezha-base-chinese')
inputs = tokenizer("欢迎使用百度飞浆!")
inputs = {k:paddle.to_tensor([v]) for (k, v) in inputs.items()}
output = model(**inputs) Perform language modeling task and next sentence classification task.
Args:
hidden_size (int):
See :class:`NeZhaModel`.
vocab_size (int):
See :class:`NeZhaModel`.
hidden_act (str):
Activation function used in the language modeling task.
embedding_weights (Tensor, optional):
Decoding weights used to map hidden_states to logits of the masked token prediction.
Its data type should be float32 and its shape is [vocab_size, hidden_size].
Defaults to `None`, which means use the same weights of the embedding layer. Args:
sequence_output(Tensor):
Sequence of hidden-states at the last layer of the model.
It's data type should be float32 and its shape is [batch_size, sequence_length, hidden_size].
pooled_output(Tensor):
The output of first token (`[CLS]`) in sequence.
We "pool" the model by simply taking the hidden state corresponding to the first token.
Its data type should be float32 and its shape is [batch_size, hidden_size].
Returns:
tuple: Returns tuple (``prediction_scores``, ``seq_relationship_score``).
With the fields:
- `prediction_scores` (Tensor):
The scores of masked token prediction. Its data type should be float32.
If `masked_positions` is None, its shape is [batch_size, sequence_length, vocab_size].
Otherwise, its shape is [batch_size, mask_token_num, vocab_size].
- `seq_relationship_score` (Tensor):
The scores of next sentence prediction.
Its data type should be float32 and its shape is [batch_size, 2]. NeZha Model with pretraining tasks on top.
Args:
nezha (:class:`NeZhaModel`):
An instance of :class:`NeZhaModel`. Args:
input_ids (Tensor):
See :class:`NeZhaModel`.
token_type_ids (Tensor, optional):
See :class:`NeZhaModel`.
attention_mask (Tensor, optional):
See :class:`NeZhaModel`.
masked_lm_labels (Tensor, optional):
The labels of the masked language modeling, its dimensionality is equal to `prediction_scores`.
Its data type should be int64 and its shape is [batch_size, sequence_length, 1].
next_sentence_label (Tensor, optional):
The labels of the next sentence prediction task, the dimensionality of `next_sentence_labels`
is equal to `seq_relation_labels`. Its data type should be int64 and its shape is [batch_size, 1].
Returns:
Tensor or tuple: Returns Tensor ``total_loss`` if `masked_lm_labels` is not None.
Returns tuple (``prediction_scores``, ``seq_relationship_score``) if `masked_lm_labels` is None.
With the fields:
- `total_loss` (Tensor):
- `prediction_scores` (Tensor):
The scores of masked token prediction. Its data type should be float32.
If `masked_positions` is None, its shape is [batch_size, sequence_length, vocab_size].
Otherwise, its shape is [batch_size, mask_token_num, vocab_size].
- `seq_relationship_score` (Tensor):
The scores of next sentence prediction.
Its data type should be float32 and its shape is [batch_size, 2]. NeZha with a linear layer on top of the hidden-states output to compute `span_start_logits`
and `span_end_logits`, designed for question-answering tasks like SQuAD.
Args:
nezha (:class:`NeZhaModel`):
An instance of NeZhaModel.
dropout (float, optional):
The dropout probability for output of NeZha.
If None, use the same value as `hidden_dropout_prob` of `NeZhaModel`
instance `nezha`. Defaults to `None`. The NeZhaForQuestionAnswering forward method, overrides the __call__() special method.
Args:
input_ids (Tensor):
See :class:`NeZhaModel`.
token_type_ids (Tensor, optional):
See :class:`NeZhaModel`.
attention_mask (Tensor, optional):
See :class:`NeZhaModel`.
Returns:
tuple: Returns tuple (`start_logits`, `end_logits`).
With the fields:
- `start_logits` (Tensor):
A tensor of the input token classification logits, indicates the start position of the labelled span.
Its data type should be float32 and its shape is [batch_size, sequence_length].
- `end_logits` (Tensor):
A tensor of the input token classification logits, indicates the end position of the labelled span.
Its data type should be float32 and its shape is [batch_size, sequence_length].
Example:
.. code-block::
import paddle
from paddlenlp.transformers import NeZhaForQuestionAnswering
from paddlenlp.transformers import NeZhaTokenizer
tokenizer = NeZhaTokenizer.from_pretrained('nezha-base-chinese')
model = NeZhaForQuestionAnswering.from_pretrained('nezha-base-chinese')
inputs = tokenizer("欢迎使用百度飞桨!")
inputs = {k:paddle.to_tensor([v]) for (k, v) in inputs.items()}
outputs = model(**inputs)
start_logits = outputs[0]
end_logits =outputs[1] NeZha Model with a linear layer on top of the output layer, designed for
sequence classification/regression tasks like GLUE tasks.
Args:
nezha (:class:`NeZhaModel`):
An instance of NeZhaModel.
num_classes (int, optional):
The number of classes. Defaults to `2`.
dropout (float, optional):
The dropout probability for output of NeZha.
If None, use the same value as `hidden_dropout_prob` of `NeZhaModel`
instance `nezha`. Defaults to None. The NeZhaForSequenceClassification forward method, overrides the __call__() special method.
Args:
input_ids (Tensor):
See :class:`NeZhaModel`.
token_type_ids (Tensor, optional):
See :class:`NeZhaModel`.
attention_mask (Tensor, optional):
See :class:`NeZhaModel`.
Returns:
Tensor: Returns tensor `logits`, a tensor of the input text classification logits.
Shape as `[batch_size, num_classes]` and dtype as float32.
Example:
.. code-block::
import paddle
from paddlenlp.transformers import NeZhaForSequenceClassification
from paddlenlp.transformers import NeZhaTokenizer
tokenizer = NeZhaTokenizer.from_pretrained('nezha-base-chinese')
model = NeZhaForSequenceClassification.from_pretrained('nezha-base-chinese')
inputs = tokenizer("欢迎使用百度飞桨!")
inputs = {k:paddle.to_tensor([v]) for (k, v) in inputs.items()}
outputs = model(**inputs)
logits =outputs[0] NeZha Model with a linear layer on top of the hidden-states output layer,
designed for token classification tasks like NER tasks.
Args:
nezha (:class:`NeZhaModel`):
An instance of NeZhaModel.
num_classes (int, optional):
The number of classes. Defaults to `2`.
dropout (float, optional):
The dropout probability for output of NeZha.
If None, use the same value as `hidden_dropout_prob` of `NeZhaModel`
instance `nezha`. Defaults to `None`. The NeZhaForTokenClassification forward method, overrides the __call__() special method.
Args:
input_ids (Tensor):
See :class:`NeZhaModel`.
token_type_ids (Tensor, optional):
See :class:`NeZhaModel`.
attention_mask (list, optional):
See :class:`NeZhaModel`.
Returns:
Tensor: Returns tensor `logits`, a tensor of the input token classification logits.
Shape as `[batch_size, sequence_length, num_classes]` and dtype as `float32`.
Example:
.. code-block::
import paddle
from paddlenlp.transformers import NeZhaForTokenClassification
from paddlenlp.transformers import NeZhaTokenizer
tokenizer = NeZhaTokenizer.from_pretrained('nezha-base-chinese')
model = NeZhaForTokenClassification.from_pretrained('nezha-base-chinese')
inputs = tokenizer("欢迎使用百度飞桨!")
inputs = {k:paddle.to_tensor([v]) for (k, v) in inputs.items()}
outputs = model(**inputs)
logits = outputs[0] NeZha Model with a linear layer on top of the hidden-states output layer,
designed for multiple choice tasks like RocStories/SWAG tasks.
Args:
nezha (:class:`NeZhaModel`):
An instance of NeZhaModel.
num_choices (int, optional):
The number of choices. Defaults to `2`.
dropout (float, optional):
The dropout probability for output of NeZha.
If None, use the same value as `hidden_dropout_prob` of `NeZhaModel`
instance `nezha`. Defaults to `None`. The NeZhaForMultipleChoice forward method, overrides the __call__() special method.
Args:
input_ids (Tensor):
See :class:`NeZhaModel`.
token_type_ids (Tensor, optional):
See :class:`NeZhaModel`.
attention_mask (list, optional):
See :class:`NeZhaModel`.
Returns:
Tensor: Returns tensor `reshaped_logits`, a tensor of the input multiple choice classification logits.
Shape as `[batch_size, num_classes]` and dtype as `float32`. # input_ids: [bs, num_choice, seq_l] # flat_input_ids: [bs*num_choice,seq_l] # logits: (bs*num_choice,1) # logits: (bs, num_choice) | 1.910507 | 2 |
app.py | mohammed-aladi/PyLocalHost | 0 | 6631877 | <reponame>mohammed-aladi/PyLocalHost<filename>app.py
from flask import Flask, render_template, request, url_for, session, redirect, flash
from werkzeug.utils import secure_filename
from functools import wraps
from markupsafe import escape
import os
import secrets
app = Flask(__name__)
app.secret_key = secrets.token_hex()
app.config['client_password'] = ''
# ---------------------------------------------------------------
# Decorators
def setup_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
client_password = app.config['client_password']
if not client_password:
return redirect(url_for('setup_page'))
return f(*args, **kwargs)
return decorated_function
def is_setup(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if 'admin_password' and 'client_password' in session:
return redirect(url_for('admin_page'))
return f(*args, **kwargs)
return decorated_function
def client_login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if 'login' in session:
return f(*args, **kwargs)
elif 'admin' in session:
return redirect(url_for('admin_page'))
return redirect(url_for('login_page'))
return decorated_function
def is_admin(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if 'admin' in session:
return f(*args, **kwargs)
return render_template('index_page')
return decorated_function
# ---------------------------------------------------------------
# ADMIN SECTION
# ---------------------------------------------------------------
# SETUP PAGE
@app.route('/setup', methods=['GET', 'POST'])
@is_setup
def setup_page():
if request.method == 'POST':
client_password = escape(request.form['client_password'])
app.config['client_password'] = <PASSWORD>
session['admin'] = True
return redirect(url_for('admin_page'))
return render_template('setup.html')
# -------------------------------------------------------------
# ADMIN PAGE
@app.route('/admin')
@setup_required
@is_admin
def admin_page():
client_password = app.config['client_password']
file_list = os.listdir(f'{os.path.abspath(os.getcwd())}\\static\\uploads')
return render_template('admin.html',
file_list=file_list,
client_password=client_password)
# --------------------------------------------------------------
@app.route('/admin/delete/file/<name>', methods=['GET'])
@is_admin
def delete_file(name):
os.remove(f'{os.path.abspath(os.getcwd())}\\static\\uploads\\{name}')
return redirect(url_for('admin_page'))
# --------------------------------------------------------------
@app.route('/admin/add/file', methods=['POST'])
def add_file():
file = request.files['file']
filename = secure_filename(file.filename)
file.save(os.path.join(f'{os.path.abspath(os.getcwd())}\\static\\uploads', filename))
return redirect(url_for('admin_page'))
# -------------------------------------------------------------
# ERROR HANDLING
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
# ---------------------------------------------------------------
# ---------------------------------------------------------------
# CLIENT SECTION
# ---------------------------------------------------------------
# ---------------------------------------------------------------
# CLIENT LOGIN
@app.route('/client/login', methods=['POST', 'GET'])
def login_page():
if request.method == 'POST':
password = escape(request.form['client_password'])
if not password:
flash('الرجاء إدخال كلمة المرور')
return redirect(url_for('login_page'))
if password == app.config['client_password']:
session['login'] = True
return redirect(url_for('index_page'))
return render_template('client_login.html')
# ---------------------------------------------------------------
# CLIENT INDEX PAGE
@app.route('/')
@setup_required
@client_login_required
def index_page():
file_list = os.listdir(f'{os.path.abspath(os.getcwd())}\\static\\uploads')
return render_template('index.html', file_list=file_list)
# --------------------------------------------------------------
if __name__ == '__main__':
app.run(host='0.0.0.0')
| from flask import Flask, render_template, request, url_for, session, redirect, flash
from werkzeug.utils import secure_filename
from functools import wraps
from markupsafe import escape
import os
import secrets
app = Flask(__name__)
app.secret_key = secrets.token_hex()
app.config['client_password'] = ''
# ---------------------------------------------------------------
# Decorators
def setup_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
client_password = app.config['client_password']
if not client_password:
return redirect(url_for('setup_page'))
return f(*args, **kwargs)
return decorated_function
def is_setup(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if 'admin_password' and 'client_password' in session:
return redirect(url_for('admin_page'))
return f(*args, **kwargs)
return decorated_function
def client_login_required(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if 'login' in session:
return f(*args, **kwargs)
elif 'admin' in session:
return redirect(url_for('admin_page'))
return redirect(url_for('login_page'))
return decorated_function
def is_admin(f):
@wraps(f)
def decorated_function(*args, **kwargs):
if 'admin' in session:
return f(*args, **kwargs)
return render_template('index_page')
return decorated_function
# ---------------------------------------------------------------
# ADMIN SECTION
# ---------------------------------------------------------------
# SETUP PAGE
@app.route('/setup', methods=['GET', 'POST'])
@is_setup
def setup_page():
if request.method == 'POST':
client_password = escape(request.form['client_password'])
app.config['client_password'] = <PASSWORD>
session['admin'] = True
return redirect(url_for('admin_page'))
return render_template('setup.html')
# -------------------------------------------------------------
# ADMIN PAGE
@app.route('/admin')
@setup_required
@is_admin
def admin_page():
client_password = app.config['client_password']
file_list = os.listdir(f'{os.path.abspath(os.getcwd())}\\static\\uploads')
return render_template('admin.html',
file_list=file_list,
client_password=client_password)
# --------------------------------------------------------------
@app.route('/admin/delete/file/<name>', methods=['GET'])
@is_admin
def delete_file(name):
os.remove(f'{os.path.abspath(os.getcwd())}\\static\\uploads\\{name}')
return redirect(url_for('admin_page'))
# --------------------------------------------------------------
@app.route('/admin/add/file', methods=['POST'])
def add_file():
file = request.files['file']
filename = secure_filename(file.filename)
file.save(os.path.join(f'{os.path.abspath(os.getcwd())}\\static\\uploads', filename))
return redirect(url_for('admin_page'))
# -------------------------------------------------------------
# ERROR HANDLING
@app.errorhandler(404)
def page_not_found(e):
return render_template('404.html'), 404
# ---------------------------------------------------------------
# ---------------------------------------------------------------
# CLIENT SECTION
# ---------------------------------------------------------------
# ---------------------------------------------------------------
# CLIENT LOGIN
@app.route('/client/login', methods=['POST', 'GET'])
def login_page():
if request.method == 'POST':
password = escape(request.form['client_password'])
if not password:
flash('الرجاء إدخال كلمة المرور')
return redirect(url_for('login_page'))
if password == app.config['client_password']:
session['login'] = True
return redirect(url_for('index_page'))
return render_template('client_login.html')
# ---------------------------------------------------------------
# CLIENT INDEX PAGE
@app.route('/')
@setup_required
@client_login_required
def index_page():
file_list = os.listdir(f'{os.path.abspath(os.getcwd())}\\static\\uploads')
return render_template('index.html', file_list=file_list)
# --------------------------------------------------------------
if __name__ == '__main__':
app.run(host='0.0.0.0') | en | 0.164461 | # --------------------------------------------------------------- # Decorators # --------------------------------------------------------------- # ADMIN SECTION # --------------------------------------------------------------- # SETUP PAGE # ------------------------------------------------------------- # ADMIN PAGE # -------------------------------------------------------------- # -------------------------------------------------------------- # ------------------------------------------------------------- # ERROR HANDLING # --------------------------------------------------------------- # --------------------------------------------------------------- # CLIENT SECTION # --------------------------------------------------------------- # --------------------------------------------------------------- # CLIENT LOGIN # --------------------------------------------------------------- # CLIENT INDEX PAGE # -------------------------------------------------------------- | 2.320508 | 2 |
pyequion2/gui/seqsolution.py | pyequion/pyequion | 0 | 6631878 | # -*- coding: utf-8 -*-
import sys
import matplotlib
matplotlib.use('Qt5Agg')
import itertools
from PyQt5.QtWidgets import (QApplication, QWidget, QLabel,
QTextEdit, QLineEdit,
QPushButton, QCheckBox,
QGridLayout, QVBoxLayout,
QHBoxLayout, QMessageBox,
QComboBox, QScrollArea,
QFrame, QFileDialog)
from PyQt5.QtGui import QFont
from PyQt5.QtCore import Qt
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg
from matplotlib.figure import Figure
import numpy as np
HEADER_COLOR = "cyan"
class SeqSolutionGUI(QWidget):
def __init__(self, solutions, solver_log, type_eq, pairs, parent=None):
super().__init__(parent)
self.parent_ = parent #TODO: There must be some PyQt actual solution
self.pairs = pairs
self.solutions = solutions
self.base_solution = solutions[0]
self.solver_log = solver_log
self.type_eq = type_eq
self.plot_windows = []
self.initializeUI()
def initializeUI(self):
if not self.has_parent:
self.setGeometry(100, 100, 300, 300)
self.setWindowTitle("PyEquion GUI Solver")
self.setupWidgets()
self.show()
def setupWidgets(self):
self.main_layout = QGridLayout()
save_log_button = QPushButton("Save log (header)")
save_log_button.clicked.connect(self.save_log_to_file)
properties_vbox = self.make_properties_vbox()
properties_vbox.setContentsMargins(5, 5, 25, 5)
species_grid = self.make_species_grid()
species_grid.setContentsMargins(25, 5, 25, 5)
if self.type_eq == "aqueous":
phases_grid = self.make_saturation_indexes()
elif self.type_eq == "phase":
phases_grid = self.make_phase_molals()
phases_grid.setContentsMargins(25, 5, 5, 5)
self.main_layout.addLayout(properties_vbox, 0, 0)
self.main_layout.addLayout(species_grid, 0, 1)
self.main_layout.addLayout(phases_grid, 0, 2)
self.main_layout.addWidget(save_log_button, 1, 0)
self.setLayout(self.main_layout)
def make_species_grid(self):
sorted_species = sorted(self.base_solution.molals,
key=lambda k : self.base_solution.molals[k],
reverse=True)
species_grid = QGridLayout()
title_species_label = QLabel("Component")
title_molals_label = QLabel("Molal")
title_act_label = QLabel("Activity")
title_fraction_label = QLabel("Mole fraction")
species_grid.addWidget(title_species_label, 0, 0)
species_grid.addWidget(title_molals_label, 0, 1)
species_grid.addWidget(title_act_label, 0, 2)
species_grid.addWidget(title_fraction_label, 0, 3)
for i, specie in enumerate(sorted_species, 1):
# specie_hbox = QHBoxLayout()
specie_label = QLabel(specie)
molals_label = self.make_value_plot(specie, 'molals')
# conc_label = self.show_value_label(specie, self.base_solution.concentrations)
act_label = self.make_value_plot(specie, 'activities')
fraction_label = self.make_value_plot(specie, 'mole_fractions')
species_grid.addWidget(specie_label, i, 0)
species_grid.addWidget(molals_label, i, 1)
species_grid.addWidget(act_label, i, 2)
species_grid.addWidget(fraction_label, i, 3)
sorted_elements = sorted(self.base_solution.elements_molals,
key=lambda k : self.base_solution.elements_molals[k],
reverse=True)
for j, element in enumerate(sorted_elements, i+1):
specie_label = QLabel(element)
molals_label = self.make_value_plot(element, 'elements_molals')
act_label = QLabel("")
fraction_label = QLabel("")
species_grid.addWidget(specie_label, j, 0)
species_grid.addWidget(molals_label, j, 1)
species_grid.addWidget(act_label, j, 2)
species_grid.addWidget(fraction_label, j, 3)
species_grid.setRowStretch(species_grid.rowCount(), 1)
# species_grid.setSpacing(0)
# items = (species_grid.itemAt(i) for i in range(species_grid.count()))
# for item in items:
# item.widget().setStyleSheet("border: 1px solid black;")
# for i in range(species_grid.columnCount()):
# species_grid.setColumnStretch(i, 1)
return species_grid
def make_saturation_indexes(self):
phases = sorted(self.base_solution.saturation_indexes,
key = lambda k : self.base_solution.saturation_indexes[k],
reverse=True)
phases_grid = QGridLayout()
title_phase = QLabel("Phase")
title_si = QLabel("SI")
title_satur = QLabel("Saturation")
phases_grid.addWidget(title_phase, 0, 0)
phases_grid.addWidget(title_si, 0, 1)
phases_grid.addWidget(title_satur, 0, 2)
for i, phase in enumerate(phases, 1):
phase_label = QLabel(phase)
si_label = self.make_value_plot(phase, 'saturation_indexes')
satur_label = self.make_value_plot(phase, 'saturations')
phases_grid.addWidget(phase_label, i, 0)
phases_grid.addWidget(si_label, i, 1)
phases_grid.addWidget(satur_label, i, 2)
phases_grid.setRowStretch(phases_grid.rowCount(), 1)
return phases_grid
def make_phase_molals(self):
phases_grid = QGridLayout()
solid_phases = sorted(self.base_solution.solid_molals,
key = lambda k : self.base_solution.solid_molals[k],
reverse=True)
gas_phases = sorted(self.base_solution.gas_molals,
key = lambda k : self.base_solution.gas_molals[k],
reverse=True)
title_phase = QLabel("Phase")
title_molal = QLabel("Molals")
phases_grid.addWidget(title_phase, 0, 0)
phases_grid.addWidget(title_molal, 0, 1)
i = 0
for i, solid_phase in enumerate(solid_phases, 1):
phase_label = QLabel(solid_phase)
molal_label = self.make_value_plot(solid_phase, 'solid_molals')
phases_grid.addWidget(phase_label, i, 0)
phases_grid.addWidget(molal_label, i, 1)
for j, gas_phase in enumerate(gas_phases, i+1):
phase_label = QLabel(gas_phase)
molal_label = self.make_value_plot(gas_phase, 'gas_molals')
phases_grid.addWidget(phase_label, j, 0)
phases_grid.addWidget(molal_label, j, 1)
phases_grid.setRowStretch(phases_grid.rowCount(), 1)
# phases_grid.setSpacing(0)
# items = (phases_grid.itemAt(i) for i in range(phases_grid.count()))
# for item in items:
# item.widget().setStyleSheet("border: 1px solid black;")
# for i in range(phases_grid.columnCount()):
# phases_grid.setColumnStretch(i, 1)
return phases_grid
def make_properties_vbox(self):
properties_vbox = QVBoxLayout()
ph_button = QPushButton("pH")
ph_button.clicked.connect(lambda : self.plot_single_property("ph"))
ionic_strength_button = QPushButton("I")
ionic_strength_button.clicked.connect(lambda : self.plot_single_property("ionic_strength", "mol/kg H2O"))
conductivity_button = QPushButton("\u03C3")
conductivity_button.clicked.connect(lambda : self.plot_single_property("electrical_conductivity", "S/m"))
type_equilibrium_marker = "aqueous only" if (self.type_eq == "aqueous") else "phases precipitation"
type_equilibrium_string = "Equilibrium type: {0}".format(type_equilibrium_marker)
properties_vbox.addWidget(QLabel("Properties:"))
properties_vbox.addWidget(ph_button)
properties_vbox.addWidget(ionic_strength_button)
properties_vbox.addWidget(conductivity_button)
properties_vbox.addWidget(QLabel(" "))
properties_vbox.addWidget(QLabel("Balance conditions"))
properties_vbox.addWidget(QLabel(self.solver_log))
properties_vbox.addWidget(QLabel(type_equilibrium_string))
properties_vbox.addStretch()
return properties_vbox
def show_value_label(self, val, d):
if val in d:
label_str = "{:.2e}".format(d[val])
else:
label_str = ""
label = QLabel(label_str)
return label
def make_value_plot(self, val, property_name):
if val in getattr(self.base_solution, property_name):
button_str = "Plot"
button = QPushButton(button_str)
button.clicked.connect(lambda : self.plot_dict_property(val, property_name))
else:
button = QPushButton("")
return button
def plot_dict_property(self, val, property_name, unit="mol/kg H2O"):
properties = np.array([getattr(solution, property_name)[val]
for solution in self.solutions])
plot_window = PlotWindow()
plot_window.plot_widget.plot_pairs(properties, self.pairs)
plot_window.plot_widget.axes.set_ylabel("{0}:{1} [{2}]".format(property_name, val, unit))
plot_window.show()
self.plot_windows.append(plot_window)
def plot_single_property(self, property_name, unit=" "):
properties = np.array([getattr(solution, property_name)
for solution in self.solutions])
plot_window = PlotWindow(self)
plot_widget = plot_window.plot_widget
plot_widget.plot_pairs(properties, self.pairs)
plot_widget.axes.set_ylabel("{0} [{1}]".format(property_name, unit))
plot_window.show()
self.plot_windows.append(plot_window)
def save_log_to_file(self):
#else
file_name, _ = QFileDialog.getSaveFileName(self, 'Save File',
"","Text File (*.txt)")
try:
with open(file_name, 'w') as f:
f.write(self.solver_log)
except:
QMessageBox.information(self, "Error",
"Unable to save file.", QMessageBox.Ok)
@property
def has_parent(self):
return self.parent_ is not None
class PlotWindow(QWidget):
def __init__(self, width=8, height=8, dpi=100):
super().__init__()
self.plot_widget = PlotWidget(width, height, dpi)
self.initializeUI()
def initializeUI(self):
self.setGeometry(100, 100, 600, 600)
self.setWindowTitle("Plot")
self.setupWidgets()
def setupWidgets(self):
layout = QVBoxLayout()
self.save_button = QPushButton("Save figure")
self.save_button.clicked.connect(self.savefig)
self.export_button = QPushButton("Export")
self.export_button.clicked.connect(self.export)
layout.addWidget(self.plot_widget)
layout.addWidget(self.save_button)
layout.addWidget(self.export_button)
self.setLayout(layout)
def savefig(self):
file_name, _ = QFileDialog.getSaveFileName(self, 'Save File',
"","PNG file (*.png)")
try:
self.plot_widget.fig.savefig(file_name)
except:
QMessageBox.information(self, "Error",
"Unable to save file.", QMessageBox.Ok)
def export(self):
file_name, _ = QFileDialog.getSaveFileName(self, 'Save File',
"","Text file (*.txt)")
try:
xy = self.plot_widget.axes.lines[0].get_xydata()
header = "{%s} {%s}"%(self.plot_widget.axes.get_xlabel(), self.plot_widget.axes.get_ylabel())
np.savetxt(file_name, xy, header=header)
except:
QMessageBox.information(self, "Error",
"Unable to save file.", QMessageBox.Ok)
class PlotWidget(FigureCanvasQTAgg):
def __init__(self, parent, width=8, height=8, dpi=100):
self.parent = parent
self.fig = Figure(figsize=(width, height), dpi=dpi)
self.axes = self.fig.add_subplot(111)
super().__init__(self.fig)
def plot(self, x, y):
self.axes.cla()
self.axes.plot(x, y)
def plot_single(self, x):
self.axes.cla()
self.axes.plot(x)
def plot_pairs(self, x, pairs):
if len(pairs) == 0:
self.plot_single(x)
else:
base_pair, other_pairs = pairs[0], pairs[1:]
n = len(x)
xbase = np.linspace(base_pair.bounds[0], base_pair.bounds[1], n)
self.axes.cla()
self.axes.plot(xbase, x, 'o')
self.axes.set_xlabel("{0} [{1}]".format(base_pair.name, base_pair.unit))
for i, pair in enumerate(other_pairs, start=2):
name = "{0} [{1}]".format(pair.name, pair.unit)
bounds = pair.bounds
self.add_secondary_axis(bounds, name, i)
def add_secondary_axis(self, bounds, name=None, n=2):
axnew = self.axes.twiny()
newlabel = np.linspace(bounds[0], bounds[1], len(self.axes.get_xticks()))
newlabel = np.round(newlabel, 5)
axnew.set_xticks(self.axes.get_xticks())
axnew.set_xticklabels(newlabel)
axnew.xaxis.set_ticks_position('bottom') # set the position of the second x-axis to bottom
axnew.xaxis.set_label_position('bottom') # set the position of the second x-axis to bottom
axnew.spines['bottom'].set_position(('outward', 36*(n-1)))
if name is not None:
axnew.set_xlabel(name)
axnew.set_xlim(self.axes.get_xlim())
| # -*- coding: utf-8 -*-
import sys
import matplotlib
matplotlib.use('Qt5Agg')
import itertools
from PyQt5.QtWidgets import (QApplication, QWidget, QLabel,
QTextEdit, QLineEdit,
QPushButton, QCheckBox,
QGridLayout, QVBoxLayout,
QHBoxLayout, QMessageBox,
QComboBox, QScrollArea,
QFrame, QFileDialog)
from PyQt5.QtGui import QFont
from PyQt5.QtCore import Qt
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg
from matplotlib.figure import Figure
import numpy as np
HEADER_COLOR = "cyan"
class SeqSolutionGUI(QWidget):
def __init__(self, solutions, solver_log, type_eq, pairs, parent=None):
super().__init__(parent)
self.parent_ = parent #TODO: There must be some PyQt actual solution
self.pairs = pairs
self.solutions = solutions
self.base_solution = solutions[0]
self.solver_log = solver_log
self.type_eq = type_eq
self.plot_windows = []
self.initializeUI()
def initializeUI(self):
if not self.has_parent:
self.setGeometry(100, 100, 300, 300)
self.setWindowTitle("PyEquion GUI Solver")
self.setupWidgets()
self.show()
def setupWidgets(self):
self.main_layout = QGridLayout()
save_log_button = QPushButton("Save log (header)")
save_log_button.clicked.connect(self.save_log_to_file)
properties_vbox = self.make_properties_vbox()
properties_vbox.setContentsMargins(5, 5, 25, 5)
species_grid = self.make_species_grid()
species_grid.setContentsMargins(25, 5, 25, 5)
if self.type_eq == "aqueous":
phases_grid = self.make_saturation_indexes()
elif self.type_eq == "phase":
phases_grid = self.make_phase_molals()
phases_grid.setContentsMargins(25, 5, 5, 5)
self.main_layout.addLayout(properties_vbox, 0, 0)
self.main_layout.addLayout(species_grid, 0, 1)
self.main_layout.addLayout(phases_grid, 0, 2)
self.main_layout.addWidget(save_log_button, 1, 0)
self.setLayout(self.main_layout)
def make_species_grid(self):
sorted_species = sorted(self.base_solution.molals,
key=lambda k : self.base_solution.molals[k],
reverse=True)
species_grid = QGridLayout()
title_species_label = QLabel("Component")
title_molals_label = QLabel("Molal")
title_act_label = QLabel("Activity")
title_fraction_label = QLabel("Mole fraction")
species_grid.addWidget(title_species_label, 0, 0)
species_grid.addWidget(title_molals_label, 0, 1)
species_grid.addWidget(title_act_label, 0, 2)
species_grid.addWidget(title_fraction_label, 0, 3)
for i, specie in enumerate(sorted_species, 1):
# specie_hbox = QHBoxLayout()
specie_label = QLabel(specie)
molals_label = self.make_value_plot(specie, 'molals')
# conc_label = self.show_value_label(specie, self.base_solution.concentrations)
act_label = self.make_value_plot(specie, 'activities')
fraction_label = self.make_value_plot(specie, 'mole_fractions')
species_grid.addWidget(specie_label, i, 0)
species_grid.addWidget(molals_label, i, 1)
species_grid.addWidget(act_label, i, 2)
species_grid.addWidget(fraction_label, i, 3)
sorted_elements = sorted(self.base_solution.elements_molals,
key=lambda k : self.base_solution.elements_molals[k],
reverse=True)
for j, element in enumerate(sorted_elements, i+1):
specie_label = QLabel(element)
molals_label = self.make_value_plot(element, 'elements_molals')
act_label = QLabel("")
fraction_label = QLabel("")
species_grid.addWidget(specie_label, j, 0)
species_grid.addWidget(molals_label, j, 1)
species_grid.addWidget(act_label, j, 2)
species_grid.addWidget(fraction_label, j, 3)
species_grid.setRowStretch(species_grid.rowCount(), 1)
# species_grid.setSpacing(0)
# items = (species_grid.itemAt(i) for i in range(species_grid.count()))
# for item in items:
# item.widget().setStyleSheet("border: 1px solid black;")
# for i in range(species_grid.columnCount()):
# species_grid.setColumnStretch(i, 1)
return species_grid
def make_saturation_indexes(self):
phases = sorted(self.base_solution.saturation_indexes,
key = lambda k : self.base_solution.saturation_indexes[k],
reverse=True)
phases_grid = QGridLayout()
title_phase = QLabel("Phase")
title_si = QLabel("SI")
title_satur = QLabel("Saturation")
phases_grid.addWidget(title_phase, 0, 0)
phases_grid.addWidget(title_si, 0, 1)
phases_grid.addWidget(title_satur, 0, 2)
for i, phase in enumerate(phases, 1):
phase_label = QLabel(phase)
si_label = self.make_value_plot(phase, 'saturation_indexes')
satur_label = self.make_value_plot(phase, 'saturations')
phases_grid.addWidget(phase_label, i, 0)
phases_grid.addWidget(si_label, i, 1)
phases_grid.addWidget(satur_label, i, 2)
phases_grid.setRowStretch(phases_grid.rowCount(), 1)
return phases_grid
def make_phase_molals(self):
phases_grid = QGridLayout()
solid_phases = sorted(self.base_solution.solid_molals,
key = lambda k : self.base_solution.solid_molals[k],
reverse=True)
gas_phases = sorted(self.base_solution.gas_molals,
key = lambda k : self.base_solution.gas_molals[k],
reverse=True)
title_phase = QLabel("Phase")
title_molal = QLabel("Molals")
phases_grid.addWidget(title_phase, 0, 0)
phases_grid.addWidget(title_molal, 0, 1)
i = 0
for i, solid_phase in enumerate(solid_phases, 1):
phase_label = QLabel(solid_phase)
molal_label = self.make_value_plot(solid_phase, 'solid_molals')
phases_grid.addWidget(phase_label, i, 0)
phases_grid.addWidget(molal_label, i, 1)
for j, gas_phase in enumerate(gas_phases, i+1):
phase_label = QLabel(gas_phase)
molal_label = self.make_value_plot(gas_phase, 'gas_molals')
phases_grid.addWidget(phase_label, j, 0)
phases_grid.addWidget(molal_label, j, 1)
phases_grid.setRowStretch(phases_grid.rowCount(), 1)
# phases_grid.setSpacing(0)
# items = (phases_grid.itemAt(i) for i in range(phases_grid.count()))
# for item in items:
# item.widget().setStyleSheet("border: 1px solid black;")
# for i in range(phases_grid.columnCount()):
# phases_grid.setColumnStretch(i, 1)
return phases_grid
def make_properties_vbox(self):
properties_vbox = QVBoxLayout()
ph_button = QPushButton("pH")
ph_button.clicked.connect(lambda : self.plot_single_property("ph"))
ionic_strength_button = QPushButton("I")
ionic_strength_button.clicked.connect(lambda : self.plot_single_property("ionic_strength", "mol/kg H2O"))
conductivity_button = QPushButton("\u03C3")
conductivity_button.clicked.connect(lambda : self.plot_single_property("electrical_conductivity", "S/m"))
type_equilibrium_marker = "aqueous only" if (self.type_eq == "aqueous") else "phases precipitation"
type_equilibrium_string = "Equilibrium type: {0}".format(type_equilibrium_marker)
properties_vbox.addWidget(QLabel("Properties:"))
properties_vbox.addWidget(ph_button)
properties_vbox.addWidget(ionic_strength_button)
properties_vbox.addWidget(conductivity_button)
properties_vbox.addWidget(QLabel(" "))
properties_vbox.addWidget(QLabel("Balance conditions"))
properties_vbox.addWidget(QLabel(self.solver_log))
properties_vbox.addWidget(QLabel(type_equilibrium_string))
properties_vbox.addStretch()
return properties_vbox
def show_value_label(self, val, d):
if val in d:
label_str = "{:.2e}".format(d[val])
else:
label_str = ""
label = QLabel(label_str)
return label
def make_value_plot(self, val, property_name):
if val in getattr(self.base_solution, property_name):
button_str = "Plot"
button = QPushButton(button_str)
button.clicked.connect(lambda : self.plot_dict_property(val, property_name))
else:
button = QPushButton("")
return button
def plot_dict_property(self, val, property_name, unit="mol/kg H2O"):
properties = np.array([getattr(solution, property_name)[val]
for solution in self.solutions])
plot_window = PlotWindow()
plot_window.plot_widget.plot_pairs(properties, self.pairs)
plot_window.plot_widget.axes.set_ylabel("{0}:{1} [{2}]".format(property_name, val, unit))
plot_window.show()
self.plot_windows.append(plot_window)
def plot_single_property(self, property_name, unit=" "):
properties = np.array([getattr(solution, property_name)
for solution in self.solutions])
plot_window = PlotWindow(self)
plot_widget = plot_window.plot_widget
plot_widget.plot_pairs(properties, self.pairs)
plot_widget.axes.set_ylabel("{0} [{1}]".format(property_name, unit))
plot_window.show()
self.plot_windows.append(plot_window)
def save_log_to_file(self):
#else
file_name, _ = QFileDialog.getSaveFileName(self, 'Save File',
"","Text File (*.txt)")
try:
with open(file_name, 'w') as f:
f.write(self.solver_log)
except:
QMessageBox.information(self, "Error",
"Unable to save file.", QMessageBox.Ok)
@property
def has_parent(self):
return self.parent_ is not None
class PlotWindow(QWidget):
def __init__(self, width=8, height=8, dpi=100):
super().__init__()
self.plot_widget = PlotWidget(width, height, dpi)
self.initializeUI()
def initializeUI(self):
self.setGeometry(100, 100, 600, 600)
self.setWindowTitle("Plot")
self.setupWidgets()
def setupWidgets(self):
layout = QVBoxLayout()
self.save_button = QPushButton("Save figure")
self.save_button.clicked.connect(self.savefig)
self.export_button = QPushButton("Export")
self.export_button.clicked.connect(self.export)
layout.addWidget(self.plot_widget)
layout.addWidget(self.save_button)
layout.addWidget(self.export_button)
self.setLayout(layout)
def savefig(self):
file_name, _ = QFileDialog.getSaveFileName(self, 'Save File',
"","PNG file (*.png)")
try:
self.plot_widget.fig.savefig(file_name)
except:
QMessageBox.information(self, "Error",
"Unable to save file.", QMessageBox.Ok)
def export(self):
file_name, _ = QFileDialog.getSaveFileName(self, 'Save File',
"","Text file (*.txt)")
try:
xy = self.plot_widget.axes.lines[0].get_xydata()
header = "{%s} {%s}"%(self.plot_widget.axes.get_xlabel(), self.plot_widget.axes.get_ylabel())
np.savetxt(file_name, xy, header=header)
except:
QMessageBox.information(self, "Error",
"Unable to save file.", QMessageBox.Ok)
class PlotWidget(FigureCanvasQTAgg):
def __init__(self, parent, width=8, height=8, dpi=100):
self.parent = parent
self.fig = Figure(figsize=(width, height), dpi=dpi)
self.axes = self.fig.add_subplot(111)
super().__init__(self.fig)
def plot(self, x, y):
self.axes.cla()
self.axes.plot(x, y)
def plot_single(self, x):
self.axes.cla()
self.axes.plot(x)
def plot_pairs(self, x, pairs):
if len(pairs) == 0:
self.plot_single(x)
else:
base_pair, other_pairs = pairs[0], pairs[1:]
n = len(x)
xbase = np.linspace(base_pair.bounds[0], base_pair.bounds[1], n)
self.axes.cla()
self.axes.plot(xbase, x, 'o')
self.axes.set_xlabel("{0} [{1}]".format(base_pair.name, base_pair.unit))
for i, pair in enumerate(other_pairs, start=2):
name = "{0} [{1}]".format(pair.name, pair.unit)
bounds = pair.bounds
self.add_secondary_axis(bounds, name, i)
def add_secondary_axis(self, bounds, name=None, n=2):
axnew = self.axes.twiny()
newlabel = np.linspace(bounds[0], bounds[1], len(self.axes.get_xticks()))
newlabel = np.round(newlabel, 5)
axnew.set_xticks(self.axes.get_xticks())
axnew.set_xticklabels(newlabel)
axnew.xaxis.set_ticks_position('bottom') # set the position of the second x-axis to bottom
axnew.xaxis.set_label_position('bottom') # set the position of the second x-axis to bottom
axnew.spines['bottom'].set_position(('outward', 36*(n-1)))
if name is not None:
axnew.set_xlabel(name)
axnew.set_xlim(self.axes.get_xlim())
| en | 0.509527 | # -*- coding: utf-8 -*- #TODO: There must be some PyQt actual solution # specie_hbox = QHBoxLayout() # conc_label = self.show_value_label(specie, self.base_solution.concentrations) # species_grid.setSpacing(0) # items = (species_grid.itemAt(i) for i in range(species_grid.count())) # for item in items: # item.widget().setStyleSheet("border: 1px solid black;") # for i in range(species_grid.columnCount()): # species_grid.setColumnStretch(i, 1) # phases_grid.setSpacing(0) # items = (phases_grid.itemAt(i) for i in range(phases_grid.count())) # for item in items: # item.widget().setStyleSheet("border: 1px solid black;") # for i in range(phases_grid.columnCount()): # phases_grid.setColumnStretch(i, 1) #else # set the position of the second x-axis to bottom # set the position of the second x-axis to bottom | 2.426636 | 2 |
junit_reporting/migrations/0010_JUnitReport_project_not_null.py | nashif/django-junit-reporting | 0 | 6631879 | <reponame>nashif/django-junit-reporting<filename>junit_reporting/migrations/0010_JUnitReport_project_not_null.py
# -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-10-14 15:09
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('junit_reporting', '0009_wildcard_project'),
]
operations = [
migrations.AlterField(
model_name='junitreport',
name='project',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='junit_reporting.JUnitProject'),
),
]
| # -*- coding: utf-8 -*-
# Generated by Django 1.11.6 on 2017-10-14 15:09
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('junit_reporting', '0009_wildcard_project'),
]
operations = [
migrations.AlterField(
model_name='junitreport',
name='project',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='junit_reporting.JUnitProject'),
),
] | en | 0.739132 | # -*- coding: utf-8 -*- # Generated by Django 1.11.6 on 2017-10-14 15:09 | 1.344611 | 1 |
src/models/tests/reach_point_test.py | pasin30055/planning-evaluation-framework | 0 | 6631880 | # Copyright 2021 The Private Cardinality Estimation Framework Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for reach_point.py."""
from absl.testing import absltest
from wfa_planning_evaluation_framework.models.reach_point import ReachPoint
class ReachPointTest(absltest.TestCase):
def test_impressions(self):
point = ReachPoint([200, 300], [150, 100])
self.assertEqual(point.impressions[0], 200)
self.assertEqual(point.impressions[1], 300)
def test_reach(self):
point = ReachPoint([200, 300], [100, 50])
self.assertEqual(point.reach(1), 100)
self.assertEqual(point.reach(2), 50)
self.assertRaises(ValueError, point.reach, 3)
def test_frequency(self):
point = ReachPoint([200, 300], [200, 125, 75])
self.assertEqual(point.frequency(1), 75)
self.assertEqual(point.frequency(2), 50)
self.assertRaises(ValueError, point.frequency, 3)
def test_spends(self):
point = ReachPoint([200, 300], [100, 50])
self.assertIsNone(point.spends)
point2 = ReachPoint([200, 300], [100, 50], [10.0, 20.0])
self.assertEqual(point2.spends[0], 10.0)
self.assertEqual(point2.spends[1], 20.0)
def test_frequencies_to_kplus_reaches(self):
self.assertEqual(ReachPoint.frequencies_to_kplus_reaches([]), [])
self.assertEqual(ReachPoint.frequencies_to_kplus_reaches([1]), [1])
self.assertEqual(ReachPoint.frequencies_to_kplus_reaches([2, 1]), [3, 1])
def test_user_counts_to_frequencies(self):
self.assertEqual(ReachPoint.user_counts_to_frequencies({}, 3), [0, 0, 0])
self.assertEqual(ReachPoint.user_counts_to_frequencies({3: 1}, 3), [1, 0, 0])
self.assertEqual(
ReachPoint.user_counts_to_frequencies({3: 1, 2: 1}, 3), [2, 0, 0]
)
self.assertEqual(
ReachPoint.user_counts_to_frequencies({3: 1, 2: 1, 1: 2, 4: 3, 5: 4}, 3),
[2, 1, 2],
)
def test_user_counts_to_kplus_reaches(self):
self.assertEqual(ReachPoint.user_counts_to_kplus_reaches({}, 3), [0, 0, 0])
self.assertEqual(ReachPoint.user_counts_to_kplus_reaches({3: 1}, 3), [1, 0, 0])
self.assertEqual(ReachPoint.user_counts_to_kplus_reaches({3: 2}, 3), [1, 1, 0])
self.assertEqual(
ReachPoint.user_counts_to_kplus_reaches({3: 1, 2: 1}, 3), [2, 0, 0]
)
self.assertEqual(
ReachPoint.user_counts_to_kplus_reaches({3: 1, 2: 1, 1: 2, 4: 3, 5: 4}, 3),
[5, 3, 2],
)
if __name__ == "__main__":
absltest.main()
| # Copyright 2021 The Private Cardinality Estimation Framework Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for reach_point.py."""
from absl.testing import absltest
from wfa_planning_evaluation_framework.models.reach_point import ReachPoint
class ReachPointTest(absltest.TestCase):
def test_impressions(self):
point = ReachPoint([200, 300], [150, 100])
self.assertEqual(point.impressions[0], 200)
self.assertEqual(point.impressions[1], 300)
def test_reach(self):
point = ReachPoint([200, 300], [100, 50])
self.assertEqual(point.reach(1), 100)
self.assertEqual(point.reach(2), 50)
self.assertRaises(ValueError, point.reach, 3)
def test_frequency(self):
point = ReachPoint([200, 300], [200, 125, 75])
self.assertEqual(point.frequency(1), 75)
self.assertEqual(point.frequency(2), 50)
self.assertRaises(ValueError, point.frequency, 3)
def test_spends(self):
point = ReachPoint([200, 300], [100, 50])
self.assertIsNone(point.spends)
point2 = ReachPoint([200, 300], [100, 50], [10.0, 20.0])
self.assertEqual(point2.spends[0], 10.0)
self.assertEqual(point2.spends[1], 20.0)
def test_frequencies_to_kplus_reaches(self):
self.assertEqual(ReachPoint.frequencies_to_kplus_reaches([]), [])
self.assertEqual(ReachPoint.frequencies_to_kplus_reaches([1]), [1])
self.assertEqual(ReachPoint.frequencies_to_kplus_reaches([2, 1]), [3, 1])
def test_user_counts_to_frequencies(self):
self.assertEqual(ReachPoint.user_counts_to_frequencies({}, 3), [0, 0, 0])
self.assertEqual(ReachPoint.user_counts_to_frequencies({3: 1}, 3), [1, 0, 0])
self.assertEqual(
ReachPoint.user_counts_to_frequencies({3: 1, 2: 1}, 3), [2, 0, 0]
)
self.assertEqual(
ReachPoint.user_counts_to_frequencies({3: 1, 2: 1, 1: 2, 4: 3, 5: 4}, 3),
[2, 1, 2],
)
def test_user_counts_to_kplus_reaches(self):
self.assertEqual(ReachPoint.user_counts_to_kplus_reaches({}, 3), [0, 0, 0])
self.assertEqual(ReachPoint.user_counts_to_kplus_reaches({3: 1}, 3), [1, 0, 0])
self.assertEqual(ReachPoint.user_counts_to_kplus_reaches({3: 2}, 3), [1, 1, 0])
self.assertEqual(
ReachPoint.user_counts_to_kplus_reaches({3: 1, 2: 1}, 3), [2, 0, 0]
)
self.assertEqual(
ReachPoint.user_counts_to_kplus_reaches({3: 1, 2: 1, 1: 2, 4: 3, 5: 4}, 3),
[5, 3, 2],
)
if __name__ == "__main__":
absltest.main()
| en | 0.843606 | # Copyright 2021 The Private Cardinality Estimation Framework Authors # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. Tests for reach_point.py. | 2.092627 | 2 |
test/utils/test_feasible_volume.py | talesa/botorch | 0 | 6631881 | #! /usr/bin/env python3
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import torch
from botorch.utils.feasible_volume import (
estimate_feasible_volume,
get_feasible_samples,
get_outcome_feasibility_probability,
)
from botorch.utils.testing import BotorchTestCase, MockModel, MockPosterior
class TestFeasibleVolumeEstimates(BotorchTestCase):
def test_feasible_samples(self):
# -X[0]+X[1]>=1
inequality_constraints = [(torch.tensor([0, 1]), torch.tensor([-1.0, 1.0]), 1)]
box_samples = torch.tensor([[1.1, 2.0], [0.9, 2.1], [1.5, 2], [1.8, 2.2]])
feasible_samples, p_linear = get_feasible_samples(
samples=box_samples, inequality_constraints=inequality_constraints
)
feasible = box_samples[:, 1] - box_samples[:, 0] >= 1
self.assertTrue(
torch.all(torch.eq(feasible_samples, box_samples[feasible])).item()
)
self.assertEqual(p_linear, feasible.sum(0).float().item() / feasible.size(0))
def test_outcome_feasibility_probability(self):
for dtype in (torch.float, torch.double):
samples = torch.zeros(1, 1, 1, device=self.device, dtype=dtype)
mm = MockModel(MockPosterior(samples=samples))
X = torch.zeros(1, 1, device=self.device, dtype=torch.double)
for outcome_constraints in [
[lambda y: y[..., 0] - 0.5],
[lambda y: y[..., 0] + 1.0],
]:
p_outcome = get_outcome_feasibility_probability(
model=mm,
X=X,
outcome_constraints=outcome_constraints,
nsample_outcome=2,
)
feasible = outcome_constraints[0](samples) <= 0
self.assertEqual(p_outcome, feasible)
def test_estimate_feasible_volume(self):
for dtype in (torch.float, torch.double):
for samples in (
torch.zeros(1, 2, 1, device=self.device, dtype=dtype),
torch.ones(1, 1, 1, device=self.device, dtype=dtype),
):
mm = MockModel(MockPosterior(samples=samples))
bounds = torch.ones((2, 1))
outcome_constraints = [lambda y: y[..., 0] - 0.5]
p_linear, p_outcome = estimate_feasible_volume(
bounds=bounds,
model=mm,
outcome_constraints=outcome_constraints,
nsample_feature=2,
nsample_outcome=1,
dtype=dtype,
)
self.assertEqual(p_linear, 1.0)
self.assertEqual(p_outcome, 1.0 - samples[0, 0].item())
p_linear, p_outcome = estimate_feasible_volume(
bounds=bounds,
model=mm,
outcome_constraints=None,
nsample_feature=2,
nsample_outcome=1,
dtype=dtype,
)
self.assertEqual(p_linear, 1.0)
self.assertEqual(p_outcome, 1.0)
| #! /usr/bin/env python3
# Copyright (c) Meta Platforms, Inc. and affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import torch
from botorch.utils.feasible_volume import (
estimate_feasible_volume,
get_feasible_samples,
get_outcome_feasibility_probability,
)
from botorch.utils.testing import BotorchTestCase, MockModel, MockPosterior
class TestFeasibleVolumeEstimates(BotorchTestCase):
def test_feasible_samples(self):
# -X[0]+X[1]>=1
inequality_constraints = [(torch.tensor([0, 1]), torch.tensor([-1.0, 1.0]), 1)]
box_samples = torch.tensor([[1.1, 2.0], [0.9, 2.1], [1.5, 2], [1.8, 2.2]])
feasible_samples, p_linear = get_feasible_samples(
samples=box_samples, inequality_constraints=inequality_constraints
)
feasible = box_samples[:, 1] - box_samples[:, 0] >= 1
self.assertTrue(
torch.all(torch.eq(feasible_samples, box_samples[feasible])).item()
)
self.assertEqual(p_linear, feasible.sum(0).float().item() / feasible.size(0))
def test_outcome_feasibility_probability(self):
for dtype in (torch.float, torch.double):
samples = torch.zeros(1, 1, 1, device=self.device, dtype=dtype)
mm = MockModel(MockPosterior(samples=samples))
X = torch.zeros(1, 1, device=self.device, dtype=torch.double)
for outcome_constraints in [
[lambda y: y[..., 0] - 0.5],
[lambda y: y[..., 0] + 1.0],
]:
p_outcome = get_outcome_feasibility_probability(
model=mm,
X=X,
outcome_constraints=outcome_constraints,
nsample_outcome=2,
)
feasible = outcome_constraints[0](samples) <= 0
self.assertEqual(p_outcome, feasible)
def test_estimate_feasible_volume(self):
for dtype in (torch.float, torch.double):
for samples in (
torch.zeros(1, 2, 1, device=self.device, dtype=dtype),
torch.ones(1, 1, 1, device=self.device, dtype=dtype),
):
mm = MockModel(MockPosterior(samples=samples))
bounds = torch.ones((2, 1))
outcome_constraints = [lambda y: y[..., 0] - 0.5]
p_linear, p_outcome = estimate_feasible_volume(
bounds=bounds,
model=mm,
outcome_constraints=outcome_constraints,
nsample_feature=2,
nsample_outcome=1,
dtype=dtype,
)
self.assertEqual(p_linear, 1.0)
self.assertEqual(p_outcome, 1.0 - samples[0, 0].item())
p_linear, p_outcome = estimate_feasible_volume(
bounds=bounds,
model=mm,
outcome_constraints=None,
nsample_feature=2,
nsample_outcome=1,
dtype=dtype,
)
self.assertEqual(p_linear, 1.0)
self.assertEqual(p_outcome, 1.0)
| en | 0.83541 | #! /usr/bin/env python3 # Copyright (c) Meta Platforms, Inc. and affiliates. # # This source code is licensed under the MIT license found in the # LICENSE file in the root directory of this source tree. # -X[0]+X[1]>=1 | 1.925522 | 2 |
examples/callbacks.py | JacobKosowski/mpl-point-clicker | 3 | 6631882 | """
---------
Callbacks
---------
Demonstration of how to set up callback functions.
"""
from typing import Tuple
import matplotlib.pyplot as plt
import numpy as np
from mpl_point_clicker import clicker
image = np.load("example_image.npy")
fig, ax = plt.subplots()
ax.imshow(image, cmap='gray')
klicker = clicker(ax, ['cells', 'pdms', 'media'], markers=['o', 'x', '*'])
def class_changed_cb(new_class: str):
print(f'The newly selected class is {new_class}')
def point_added_cb(position: Tuple[float, float], klass: str):
x, y = position
print(f"New point of class {klass} added at {x=}, {y=}")
def point_removed_cb(position: Tuple[float, float], klass: str, idx):
x, y = position
suffix = {1: 'st', 2: 'nd', 3: 'rd'}.get(idx, 'th')
print(
f"The {idx}{suffix} point of class {klass} with position {x=:.2f}, {y=:.2f} was removed"
)
klicker.on_class_changed(class_changed_cb)
klicker.on_point_added(point_added_cb)
klicker.on_point_removed(point_removed_cb)
plt.show()
print(klicker.get_positions())
| """
---------
Callbacks
---------
Demonstration of how to set up callback functions.
"""
from typing import Tuple
import matplotlib.pyplot as plt
import numpy as np
from mpl_point_clicker import clicker
image = np.load("example_image.npy")
fig, ax = plt.subplots()
ax.imshow(image, cmap='gray')
klicker = clicker(ax, ['cells', 'pdms', 'media'], markers=['o', 'x', '*'])
def class_changed_cb(new_class: str):
print(f'The newly selected class is {new_class}')
def point_added_cb(position: Tuple[float, float], klass: str):
x, y = position
print(f"New point of class {klass} added at {x=}, {y=}")
def point_removed_cb(position: Tuple[float, float], klass: str, idx):
x, y = position
suffix = {1: 'st', 2: 'nd', 3: 'rd'}.get(idx, 'th')
print(
f"The {idx}{suffix} point of class {klass} with position {x=:.2f}, {y=:.2f} was removed"
)
klicker.on_class_changed(class_changed_cb)
klicker.on_point_added(point_added_cb)
klicker.on_point_removed(point_removed_cb)
plt.show()
print(klicker.get_positions())
| en | 0.628585 | --------- Callbacks --------- Demonstration of how to set up callback functions. | 3.163352 | 3 |
appengine/app/shared/components/user/user_manager.py | zugaldia/capitalbikeshare | 0 | 6631883 | '''
Here we wrap any operations that go beyond the UserModel. For example,
launching tasks when users get added/deleted, or the process to
reset/change a password. This logic could change from app to app.
'''
from appython.components.user.base_manager import BaseManager
from shared.components.queue.queue_manager import QueueManager
from shared.config import Config
from shared.models.user_model import UserModel
import logging
class UserManager(BaseManager):
@classmethod
def create_from_email(cls, email, password):
# First we check if the user already exists. (This method is safe to
# call even on existing users, it won't create duplicates.)
user_model = UserModel.get_by_email(email=email)
if user_model:
return False
# Is admin?
email_ready = cls.prepare_email(email=email)
is_admin = (email_ready == Config.SUPERUSER_EMAIL)
# Create the user
user_model = UserModel.create_from_email(email=email, password=password)
user_model.is_admin = is_admin
user_model.api_key = cls.generate_api_key()
user_model.put()
# Launch the welcome task
QueueManager.launch_user_registration(user_id=user_model.get_id())
# Done
return user_model
| '''
Here we wrap any operations that go beyond the UserModel. For example,
launching tasks when users get added/deleted, or the process to
reset/change a password. This logic could change from app to app.
'''
from appython.components.user.base_manager import BaseManager
from shared.components.queue.queue_manager import QueueManager
from shared.config import Config
from shared.models.user_model import UserModel
import logging
class UserManager(BaseManager):
@classmethod
def create_from_email(cls, email, password):
# First we check if the user already exists. (This method is safe to
# call even on existing users, it won't create duplicates.)
user_model = UserModel.get_by_email(email=email)
if user_model:
return False
# Is admin?
email_ready = cls.prepare_email(email=email)
is_admin = (email_ready == Config.SUPERUSER_EMAIL)
# Create the user
user_model = UserModel.create_from_email(email=email, password=password)
user_model.is_admin = is_admin
user_model.api_key = cls.generate_api_key()
user_model.put()
# Launch the welcome task
QueueManager.launch_user_registration(user_id=user_model.get_id())
# Done
return user_model
| en | 0.868906 | Here we wrap any operations that go beyond the UserModel. For example, launching tasks when users get added/deleted, or the process to reset/change a password. This logic could change from app to app. # First we check if the user already exists. (This method is safe to # call even on existing users, it won't create duplicates.) # Is admin? # Create the user # Launch the welcome task # Done | 2.540791 | 3 |
ko_model/start.py | Ohjiwoo-lab/News-Articles-Recommendation | 0 | 6631884 | <gh_stars>0
from recommend_engine import Process, Cluster, Sentiment, Content
import pandas as pd
from konlpy.tag import Okt
# 이 파일을 실행하세요
turn = 10 # 이것이 사용자가 본 뉴스 개수
# 사용자 클러스터화
data = pd.read_excel('news_user1.xlsx')
data = data[data['valid']==1]
lines = data['text'].tolist()
df = pd.read_csv('../output/Article_IT과학_201701_201804.csv', header=None, names=['publish_year', 'catagory', 'publish', 'title', 'content', 'url'])
index_list = list(range(0, 20, 1))
df = pd.DataFrame(df[:20], index=index_list) # 500개의 뉴스만 가져옴
docs = list(df['content']) # 본 뉴스만 docs에 넣어서 작업 수행.
if (0 <= turn) & (turn < 10): # 본 뉴스의 개수가 10개가 되지 않으면 content만으로 추천이 이루어진다.
Ct = Content(df)
else:
pc = Process()
Str = pc.go(lines)
Clu = Cluster(Str)
clusters = Clu.division()
df = pd.DataFrame(columns=['cluster', 'texts', 'ptexts', 'corpus', 'docs'])
df['cluster'] = clusters
df['texts'] = lines
df['ptexts'] = Str
Sent = Sentiment(df['ptexts'])
corpus, docs, dictionary = Sent.Corpus()
df['corpus'] = corpus
df['docs'] = docs
UniqueNames = df['cluster'].unique()
# 클러스터화 한 뒤 분석
import numpy as np
from gensim.corpora.dictionary import Dictionary
DataFrameDict = {elem : pd.DataFrame for elem in UniqueNames}
docscluster={elem : pd.DataFrame for elem in UniqueNames}
corpuscluster={elem : pd.DataFrame for elem in UniqueNames}
dictionarycluster={elem : pd.DataFrame for elem in UniqueNames}
for key in DataFrameDict.keys():
DataFrameDict[key] = df[:][df.cluster == key]
docscluster[key]=df['docs'][df.cluster == key]
corpuscluster[key]=df['corpus'][df.cluster == key]
dictionarycluster[key]=Dictionary(docscluster[key])
# polarity(긍정적/부정적 감정 : -1~1 사이의 값), subjectivity(객관성/주관성 : 0~1 사이의 값)
from textblob import TextBlob
polaritycluster={elem : pd.DataFrame for elem in UniqueNames}
subjectivitycluster={elem : pd.DataFrame for elem in UniqueNames}
for i in DataFrameDict.keys():
polaritycluster[i]=TextBlob(' '.join(DataFrameDict[i]['texts'].astype('str'))).sentiment.polarity
subjectivitycluster[i]=TextBlob(' '.join(DataFrameDict[i]['texts'].astype('str'))).sentiment.subjectivity
# 사용자 데이터프레임 출력
with pd.option_context('display.max_rows', None, 'display.max_columns', None):
print('-'*100)
print('User가 본 뉴스에 대한 정보를 담고 있는 df 데이터프레임')
print('-'*100)
print(df)
# 토픽 모델링
import tomotopy as tp
model = tp.LDAModel(k=5, alpha=0.1, eta=0.01, min_cf=4)
# 전체 말뭉치에 4회 미만 등장한 단어들은 제거할 겁니다.
for line in df['ptexts']:
model.add_doc(line.strip().split(' ')) # 공백 기준으로 단어를 나누어 model에 추가합니다.
# model의 num_words나 num_vocabs 등은 train을 시작해야 확정됩니다.
# 따라서 이 값을 확인하기 위해서 train(0)을 하여 실제 train은 하지 않고 학습 준비만 시킵니다.
# num_words, num_vocabs에 관심 없다면 이부분은 생략해도 됩니다.
model.train(0)
# print('Total docs:', len(model.docs))
# print('Total words:', model.num_words)
# print('Vocab size:', model.num_vocabs)
# 다음 구문은 train을 총 200회 반복하면서, 매 단계별로 로그 가능도 값을 출력해줍니다.
# 혹은 단순히 model.train(200)으로 200회 반복도 가능합니다.
for i in range(200):
model.train(1)
# 학습된 토픽들을 출력해보도록 합시다.
for i in range(model.k):
# 토픽별 상위 단어 10개를 뽑아봅시다.
res = model.get_topic_words(i, top_n=10)
print('Topic #{}'.format(i), end='\t')
print(', '.join(w for w, p in res))
for i in range(len(model.docs)):
print('doc #{}'.format(i), end='\t')
print(model.docs[i].get_topics(top_n=2))
#-----------------------------------------------------------------
# 새로운 뉴스, 추천할 뉴스 골라내기
data2 = pd.read_excel('news_user1.xlsx')
data = data[data['valid']==0]
lines2 = data2['text'].tolist()
pc2 = Process()
Str2 = pc2.go(lines2)
df_text = pd.DataFrame(columns=['text'])
df_text['text'] = Str2
Sent2 = Sentiment(df_text['text'])
corpus2, docs2, dictionary2 = Sent.Corpus()
pol=[TextBlob(' '.join(df_text.iloc[i,0])).sentiment.polarity for i in range(df_text.shape[0])]
sub=[TextBlob(' '.join(df_text.iloc[i,0])).sentiment.subjectivity for i in range(df_text.shape[0])]
df_text['pol']=pol
df_text['sub']=sub
def format_topics_sentences(num, ldamodel):
Topic_num_ls = []
Perc_Contribution_ls = []
Topic_Keywords_ls = []
for j in range(len(ldamodel.docs)):
topic = list(ldamodel.docs[j].get_topics(top_n=1)[0])
if topic[0] == num:
topic_keywords = []
res = ldamodel.get_topic_words(num, top_n=10)
topic_keywords.append(', '.join(w for w, p in res))
Topic_num_ls.append(int(topic[0]))
Perc_Contribution_ls.append(round(topic[1],4))
Topic_Keywords_ls.append(topic_keywords)
sent_topics_df = pd.DataFrame({'Topic_num': Topic_num_ls,
'Perc_Contribution': Perc_Contribution_ls,
'Topic_Keywords': Topic_Keywords_ls})
return sent_topics_df
from sklearn.metrics.pairwise import cosine_similarity
# The percent contribution of each topic model considered as metric to assign topic score
df_topic_sents_keywords={elem : pd.DataFrame for elem in UniqueNames}
for i in range(len(UniqueNames)):
df_topic_sents_keywords[i] = format_topics_sentences(i, model)
df_topic_sents_keywords[i]['Diff']= df_topic_sents_keywords[i]['Perc_Contribution']-np.mean(df_topic_sents_keywords[i]['Perc_Contribution'])
sentiment={elem: pd.DataFrame for elem in UniqueNames}
subjectivit={elem: pd.DataFrame for elem in UniqueNames}
w1=0.8 # Topic modelling weight
w2=0.2 # Sentiment score weight
for i in range(len(UniqueNames)):
sentiment[i] = cosine_similarity(np.array(df_text.iloc[:, 2]).reshape(-1, 1), np.array([polaritycluster[i]]).reshape(-1, 1))
subjectivit[i] = cosine_similarity(np.array(df_text.iloc[:, 2]).reshape(-1, 1), np.array([subjectivitycluster[i]]).reshape(-1, 1))
sentiment[i] = np.array(sentiment[i]).flatten().tolist()
subjectivit[i] = np.array(subjectivit[i]).flatten().tolist()
a = []
b = []
for j in range(len(model.docs)):
topic = list(model.docs[j].get_topics(top_n=1)[0])
if topic[0] == i:
a.append(sentiment[i][j])
b.append(subjectivit[i][j])
df_topic_sents_keywords[i]['Polarity'] = a
df_topic_sents_keywords[i]['Subjectivity'] = b
df_topic_sents_keywords[i]['Metric']=w1*df_topic_sents_keywords[i]['Diff']+w2/2*(df_topic_sents_keywords[i]['Polarity']+df_topic_sents_keywords[i]['Subjectivity'])
print(df_topic_sents_keywords[0])
print(df_topic_sents_keywords[1])
# 추천 메트릭스
recommend=pd.DataFrame()
recommender=pd.DataFrame()
metric_value=pd.DataFrame()
rec=np.array([])
for j in range(len(model.docs)):
count = 0
topic = list(model.docs[j].get_topics(top_n=1)[0])
for i in range(len(UniqueNames)):
if topic[0] == i:
rec=np.append(rec, df_topic_sents_keywords[i].iloc[count,6])
count += 1
recommender=recommender.append(pd.Series(np.argmax(rec)),ignore_index=True)
metric_value=metric_value.append(pd.Series(np.amax(rec)),ignore_index=True)
rec=np.array([])
recommend['cluster']=recommender
recommend['metric']=metric_value
recommend['article_text']=df_text['text']
recommend.to_csv('recommend.csv',index=None)
| from recommend_engine import Process, Cluster, Sentiment, Content
import pandas as pd
from konlpy.tag import Okt
# 이 파일을 실행하세요
turn = 10 # 이것이 사용자가 본 뉴스 개수
# 사용자 클러스터화
data = pd.read_excel('news_user1.xlsx')
data = data[data['valid']==1]
lines = data['text'].tolist()
df = pd.read_csv('../output/Article_IT과학_201701_201804.csv', header=None, names=['publish_year', 'catagory', 'publish', 'title', 'content', 'url'])
index_list = list(range(0, 20, 1))
df = pd.DataFrame(df[:20], index=index_list) # 500개의 뉴스만 가져옴
docs = list(df['content']) # 본 뉴스만 docs에 넣어서 작업 수행.
if (0 <= turn) & (turn < 10): # 본 뉴스의 개수가 10개가 되지 않으면 content만으로 추천이 이루어진다.
Ct = Content(df)
else:
pc = Process()
Str = pc.go(lines)
Clu = Cluster(Str)
clusters = Clu.division()
df = pd.DataFrame(columns=['cluster', 'texts', 'ptexts', 'corpus', 'docs'])
df['cluster'] = clusters
df['texts'] = lines
df['ptexts'] = Str
Sent = Sentiment(df['ptexts'])
corpus, docs, dictionary = Sent.Corpus()
df['corpus'] = corpus
df['docs'] = docs
UniqueNames = df['cluster'].unique()
# 클러스터화 한 뒤 분석
import numpy as np
from gensim.corpora.dictionary import Dictionary
DataFrameDict = {elem : pd.DataFrame for elem in UniqueNames}
docscluster={elem : pd.DataFrame for elem in UniqueNames}
corpuscluster={elem : pd.DataFrame for elem in UniqueNames}
dictionarycluster={elem : pd.DataFrame for elem in UniqueNames}
for key in DataFrameDict.keys():
DataFrameDict[key] = df[:][df.cluster == key]
docscluster[key]=df['docs'][df.cluster == key]
corpuscluster[key]=df['corpus'][df.cluster == key]
dictionarycluster[key]=Dictionary(docscluster[key])
# polarity(긍정적/부정적 감정 : -1~1 사이의 값), subjectivity(객관성/주관성 : 0~1 사이의 값)
from textblob import TextBlob
polaritycluster={elem : pd.DataFrame for elem in UniqueNames}
subjectivitycluster={elem : pd.DataFrame for elem in UniqueNames}
for i in DataFrameDict.keys():
polaritycluster[i]=TextBlob(' '.join(DataFrameDict[i]['texts'].astype('str'))).sentiment.polarity
subjectivitycluster[i]=TextBlob(' '.join(DataFrameDict[i]['texts'].astype('str'))).sentiment.subjectivity
# 사용자 데이터프레임 출력
with pd.option_context('display.max_rows', None, 'display.max_columns', None):
print('-'*100)
print('User가 본 뉴스에 대한 정보를 담고 있는 df 데이터프레임')
print('-'*100)
print(df)
# 토픽 모델링
import tomotopy as tp
model = tp.LDAModel(k=5, alpha=0.1, eta=0.01, min_cf=4)
# 전체 말뭉치에 4회 미만 등장한 단어들은 제거할 겁니다.
for line in df['ptexts']:
model.add_doc(line.strip().split(' ')) # 공백 기준으로 단어를 나누어 model에 추가합니다.
# model의 num_words나 num_vocabs 등은 train을 시작해야 확정됩니다.
# 따라서 이 값을 확인하기 위해서 train(0)을 하여 실제 train은 하지 않고 학습 준비만 시킵니다.
# num_words, num_vocabs에 관심 없다면 이부분은 생략해도 됩니다.
model.train(0)
# print('Total docs:', len(model.docs))
# print('Total words:', model.num_words)
# print('Vocab size:', model.num_vocabs)
# 다음 구문은 train을 총 200회 반복하면서, 매 단계별로 로그 가능도 값을 출력해줍니다.
# 혹은 단순히 model.train(200)으로 200회 반복도 가능합니다.
for i in range(200):
model.train(1)
# 학습된 토픽들을 출력해보도록 합시다.
for i in range(model.k):
# 토픽별 상위 단어 10개를 뽑아봅시다.
res = model.get_topic_words(i, top_n=10)
print('Topic #{}'.format(i), end='\t')
print(', '.join(w for w, p in res))
for i in range(len(model.docs)):
print('doc #{}'.format(i), end='\t')
print(model.docs[i].get_topics(top_n=2))
#-----------------------------------------------------------------
# 새로운 뉴스, 추천할 뉴스 골라내기
data2 = pd.read_excel('news_user1.xlsx')
data = data[data['valid']==0]
lines2 = data2['text'].tolist()
pc2 = Process()
Str2 = pc2.go(lines2)
df_text = pd.DataFrame(columns=['text'])
df_text['text'] = Str2
Sent2 = Sentiment(df_text['text'])
corpus2, docs2, dictionary2 = Sent.Corpus()
pol=[TextBlob(' '.join(df_text.iloc[i,0])).sentiment.polarity for i in range(df_text.shape[0])]
sub=[TextBlob(' '.join(df_text.iloc[i,0])).sentiment.subjectivity for i in range(df_text.shape[0])]
df_text['pol']=pol
df_text['sub']=sub
def format_topics_sentences(num, ldamodel):
Topic_num_ls = []
Perc_Contribution_ls = []
Topic_Keywords_ls = []
for j in range(len(ldamodel.docs)):
topic = list(ldamodel.docs[j].get_topics(top_n=1)[0])
if topic[0] == num:
topic_keywords = []
res = ldamodel.get_topic_words(num, top_n=10)
topic_keywords.append(', '.join(w for w, p in res))
Topic_num_ls.append(int(topic[0]))
Perc_Contribution_ls.append(round(topic[1],4))
Topic_Keywords_ls.append(topic_keywords)
sent_topics_df = pd.DataFrame({'Topic_num': Topic_num_ls,
'Perc_Contribution': Perc_Contribution_ls,
'Topic_Keywords': Topic_Keywords_ls})
return sent_topics_df
from sklearn.metrics.pairwise import cosine_similarity
# The percent contribution of each topic model considered as metric to assign topic score
df_topic_sents_keywords={elem : pd.DataFrame for elem in UniqueNames}
for i in range(len(UniqueNames)):
df_topic_sents_keywords[i] = format_topics_sentences(i, model)
df_topic_sents_keywords[i]['Diff']= df_topic_sents_keywords[i]['Perc_Contribution']-np.mean(df_topic_sents_keywords[i]['Perc_Contribution'])
sentiment={elem: pd.DataFrame for elem in UniqueNames}
subjectivit={elem: pd.DataFrame for elem in UniqueNames}
w1=0.8 # Topic modelling weight
w2=0.2 # Sentiment score weight
for i in range(len(UniqueNames)):
sentiment[i] = cosine_similarity(np.array(df_text.iloc[:, 2]).reshape(-1, 1), np.array([polaritycluster[i]]).reshape(-1, 1))
subjectivit[i] = cosine_similarity(np.array(df_text.iloc[:, 2]).reshape(-1, 1), np.array([subjectivitycluster[i]]).reshape(-1, 1))
sentiment[i] = np.array(sentiment[i]).flatten().tolist()
subjectivit[i] = np.array(subjectivit[i]).flatten().tolist()
a = []
b = []
for j in range(len(model.docs)):
topic = list(model.docs[j].get_topics(top_n=1)[0])
if topic[0] == i:
a.append(sentiment[i][j])
b.append(subjectivit[i][j])
df_topic_sents_keywords[i]['Polarity'] = a
df_topic_sents_keywords[i]['Subjectivity'] = b
df_topic_sents_keywords[i]['Metric']=w1*df_topic_sents_keywords[i]['Diff']+w2/2*(df_topic_sents_keywords[i]['Polarity']+df_topic_sents_keywords[i]['Subjectivity'])
print(df_topic_sents_keywords[0])
print(df_topic_sents_keywords[1])
# 추천 메트릭스
recommend=pd.DataFrame()
recommender=pd.DataFrame()
metric_value=pd.DataFrame()
rec=np.array([])
for j in range(len(model.docs)):
count = 0
topic = list(model.docs[j].get_topics(top_n=1)[0])
for i in range(len(UniqueNames)):
if topic[0] == i:
rec=np.append(rec, df_topic_sents_keywords[i].iloc[count,6])
count += 1
recommender=recommender.append(pd.Series(np.argmax(rec)),ignore_index=True)
metric_value=metric_value.append(pd.Series(np.amax(rec)),ignore_index=True)
rec=np.array([])
recommend['cluster']=recommender
recommend['metric']=metric_value
recommend['article_text']=df_text['text']
recommend.to_csv('recommend.csv',index=None) | ko | 0.99743 | # 이 파일을 실행하세요 # 이것이 사용자가 본 뉴스 개수 # 사용자 클러스터화 # 500개의 뉴스만 가져옴 # 본 뉴스만 docs에 넣어서 작업 수행. # 본 뉴스의 개수가 10개가 되지 않으면 content만으로 추천이 이루어진다. # 클러스터화 한 뒤 분석 # polarity(긍정적/부정적 감정 : -1~1 사이의 값), subjectivity(객관성/주관성 : 0~1 사이의 값) # 사용자 데이터프레임 출력 # 토픽 모델링 # 전체 말뭉치에 4회 미만 등장한 단어들은 제거할 겁니다. # 공백 기준으로 단어를 나누어 model에 추가합니다. # model의 num_words나 num_vocabs 등은 train을 시작해야 확정됩니다. # 따라서 이 값을 확인하기 위해서 train(0)을 하여 실제 train은 하지 않고 학습 준비만 시킵니다. # num_words, num_vocabs에 관심 없다면 이부분은 생략해도 됩니다. # print('Total docs:', len(model.docs)) # print('Total words:', model.num_words) # print('Vocab size:', model.num_vocabs) # 다음 구문은 train을 총 200회 반복하면서, 매 단계별로 로그 가능도 값을 출력해줍니다. # 혹은 단순히 model.train(200)으로 200회 반복도 가능합니다. # 학습된 토픽들을 출력해보도록 합시다. # 토픽별 상위 단어 10개를 뽑아봅시다. #{}'.format(i), end='\t') #{}'.format(i), end='\t') #----------------------------------------------------------------- # 새로운 뉴스, 추천할 뉴스 골라내기 # The percent contribution of each topic model considered as metric to assign topic score # Topic modelling weight # Sentiment score weight # 추천 메트릭스 | 2.692791 | 3 |
machine_learning_as_a_service/lepo/operation.py | apinf/ml-rest | 69 | 6631885 | from collections import OrderedDict
from django.utils.functional import cached_property
from lepo.utils import maybe_resolve
class Operation:
def __init__(self, router, path, method, data):
"""
:type router: lepo.router.Router
:type path: lepo.path.Path
:type method: str
:type data: dict
"""
self.router = router
self.path = path
self.method = method
self.data = data
@property
def id(self):
return self.data['operationId']
@cached_property
def parameters(self):
"""
Combined path-level and operation-level parameters.
Any $refs are resolved here.
Note that this implementation differs from the spec in that we only use
the _name_ of a parameter to consider its uniqueness, not the name and location.
This is because we end up passing parameters to the handler by name anyway,
so any duplicate names, even if they had different locations, would be horribly mangled.
:rtype: list[dict]
"""
parameters = OrderedDict()
for source in (
self.path.mapping.get('parameters', ()),
self.data.get('parameters', {}),
):
source = maybe_resolve(source, self.router.resolve_reference)
for parameter in source:
parameter = maybe_resolve(parameter, self.router.resolve_reference)
parameters[parameter['name']] = parameter
return list(parameters.values())
def _get_overridable(self, key, default=None):
# TODO: This probes a little too deeply into the specifics of these objects, I think...
for obj in (
self.data,
self.path.mapping,
self.router.api,
):
if key in obj:
return obj[key]
return default
@cached_property
def consumes(self):
value = self._get_overridable('consumes', [])
if not isinstance(value, (list, tuple)):
raise TypeError('`consumes` must be a list, got %r' % value) # pragma: no cover
return value
@cached_property
def produces(self):
value = self._get_overridable('produces', [])
if not isinstance(value, (list, tuple)):
raise TypeError('`produces` must be a list, got %r' % value) # pragma: no cover
return value
| from collections import OrderedDict
from django.utils.functional import cached_property
from lepo.utils import maybe_resolve
class Operation:
def __init__(self, router, path, method, data):
"""
:type router: lepo.router.Router
:type path: lepo.path.Path
:type method: str
:type data: dict
"""
self.router = router
self.path = path
self.method = method
self.data = data
@property
def id(self):
return self.data['operationId']
@cached_property
def parameters(self):
"""
Combined path-level and operation-level parameters.
Any $refs are resolved here.
Note that this implementation differs from the spec in that we only use
the _name_ of a parameter to consider its uniqueness, not the name and location.
This is because we end up passing parameters to the handler by name anyway,
so any duplicate names, even if they had different locations, would be horribly mangled.
:rtype: list[dict]
"""
parameters = OrderedDict()
for source in (
self.path.mapping.get('parameters', ()),
self.data.get('parameters', {}),
):
source = maybe_resolve(source, self.router.resolve_reference)
for parameter in source:
parameter = maybe_resolve(parameter, self.router.resolve_reference)
parameters[parameter['name']] = parameter
return list(parameters.values())
def _get_overridable(self, key, default=None):
# TODO: This probes a little too deeply into the specifics of these objects, I think...
for obj in (
self.data,
self.path.mapping,
self.router.api,
):
if key in obj:
return obj[key]
return default
@cached_property
def consumes(self):
value = self._get_overridable('consumes', [])
if not isinstance(value, (list, tuple)):
raise TypeError('`consumes` must be a list, got %r' % value) # pragma: no cover
return value
@cached_property
def produces(self):
value = self._get_overridable('produces', [])
if not isinstance(value, (list, tuple)):
raise TypeError('`produces` must be a list, got %r' % value) # pragma: no cover
return value
| en | 0.854597 | :type router: lepo.router.Router :type path: lepo.path.Path :type method: str :type data: dict Combined path-level and operation-level parameters. Any $refs are resolved here. Note that this implementation differs from the spec in that we only use the _name_ of a parameter to consider its uniqueness, not the name and location. This is because we end up passing parameters to the handler by name anyway, so any duplicate names, even if they had different locations, would be horribly mangled. :rtype: list[dict] # TODO: This probes a little too deeply into the specifics of these objects, I think... # pragma: no cover # pragma: no cover | 2.054106 | 2 |
GenomicConsensus/quiver/__init__.py | RADnovogene/GenomicConsensus | 96 | 6631886 | # Author: <NAME>
from __future__ import absolute_import, division, print_function
from . import utils
from . import model
| # Author: <NAME>
from __future__ import absolute_import, division, print_function
from . import utils
from . import model
| en | 0.831508 | # Author: <NAME> | 1.085547 | 1 |
rules/repositories.bzl | cdsurfer0212/rules_ios | 0 | 6631887 | """Definitions for handling Bazel repositories used by the Apple rules."""
load(
"@bazel_tools//tools/build_defs/repo:http.bzl",
"http_archive",
)
def _maybe(repo_rule, name, **kwargs):
"""Executes the given repository rule if it hasn't been executed already.
Args:
repo_rule: The repository rule to be executed (e.g.,
`http_archive`.)
name: The name of the repository to be defined by the rule.
**kwargs: Additional arguments passed directly to the repository rule.
"""
if not native.existing_rule(name):
repo_rule(name = name, **kwargs)
def github_repo(name, project, repo, ref, sha256 = None):
"""Downloads a repository from GitHub as a tarball.
Args:
name: The name of the repository.
project: The project (user or organization) on GitHub that hosts the repository.
repo: The name of the repository on GitHub.
ref: The reference to be downloaded. Can be any named ref, e.g. a commit, branch, or tag.
sha256: The sha256 of the downloaded tarball.
"""
github_url = "https://github.com/{project}/{repo}/archive/{ref}.zip".format(
project = project,
repo = repo,
ref = ref,
)
http_archive(
name = name,
strip_prefix = "%s-%s" % (repo, ref.replace("/", "-")),
url = github_url,
sha256 = sha256,
canonical_id = github_url,
)
def rules_ios_dependencies():
"""Fetches repositories that are dependencies of the `rules_apple` workspace.
"""
_maybe(
github_repo,
name = "build_bazel_rules_apple",
ref = "ed2bceef7ac5a3071b023e3122a045a133d2245c",
project = "bazelbuild",
repo = "rules_apple",
sha256 = "3bbbc0ffa8aad392bc9a5032bccc366edb96723544dbdf89137d0223cf7350c1",
)
# Note: this ref is a cherry-pick of the rules_swift PR
# https://github.com/bazelbuild/rules_swift/pull/567
_maybe(
github_repo,
name = "build_bazel_rules_swift",
ref = "14d26dcedf0290bd777f6fe83cde3586dc616513",
project = "bazel-ios",
repo = "rules_swift",
sha256 = "8d87afbb43fa4f12ffd02c639bbc5a80eda0141bfaf74e4028d8f570d25d032c",
)
_maybe(
http_archive,
name = "bazel_skylib",
urls = [
"https://github.com/bazelbuild/bazel-skylib/releases/download/1.0.3/bazel-skylib-1.0.3.tar.gz",
"https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.0.3/bazel-skylib-1.0.3.tar.gz",
],
sha256 = "1c531376ac7e5a180e0237938a2536de0c54d93f5c278634818e0efc952dd56c",
)
# Note: it relies on `index-import` to import indexes. Longer term this
# dependency may be added by rules_swift
# This release is a build of this PR https://github.com/lyft/index-import/pull/53
_maybe(
http_archive,
name = "build_bazel_rules_swift_index_import",
build_file_content = """\
load("@bazel_skylib//rules:native_binary.bzl", "native_binary")
native_binary(
name = "index_import",
src = "index-import",
out = "index-import",
visibility = ["//visibility:public"],
)
native_binary(
name = "validate_index",
src = "validate-index",
out = "validate-index",
visibility = ["//visibility:public"],
)
native_binary(
name = "absolute_unit",
src = "absolute-unit",
out = "absolute-unit",
visibility = ["//visibility:public"],
)
""",
canonical_id = "index-import-5.3.2.5",
urls = ["https://github.com/bazel-ios/index-import/releases/download/5.3.2.5/index-import.zip"],
sha256 = "79e9b2cd3e988155b86668c56d95705e1a4a7c7b6d702ff5ded3a18d1291a39a",
)
_maybe(
http_archive,
name = "com_github_yonaskolb_xcodegen",
build_file_content = """\
load("@bazel_skylib//rules:native_binary.bzl", "native_binary")
native_binary(
name = "xcodegen",
src = "bin/xcodegen",
out = "xcodegen",
data = glob(["share/**/*"]),
visibility = ["//visibility:public"],
)
""",
canonical_id = "xcodegen-2.18.0-12-g04d6749",
sha256 = "3742eee89850cea75367b0f67662a58da5765f66c1be9b4189a59529b4e5099e",
strip_prefix = "xcodegen",
urls = ["https://github.com/segiddins/XcodeGen/releases/download/2.18.0-12-g04d6749/xcodegen.zip"],
)
| """Definitions for handling Bazel repositories used by the Apple rules."""
load(
"@bazel_tools//tools/build_defs/repo:http.bzl",
"http_archive",
)
def _maybe(repo_rule, name, **kwargs):
"""Executes the given repository rule if it hasn't been executed already.
Args:
repo_rule: The repository rule to be executed (e.g.,
`http_archive`.)
name: The name of the repository to be defined by the rule.
**kwargs: Additional arguments passed directly to the repository rule.
"""
if not native.existing_rule(name):
repo_rule(name = name, **kwargs)
def github_repo(name, project, repo, ref, sha256 = None):
"""Downloads a repository from GitHub as a tarball.
Args:
name: The name of the repository.
project: The project (user or organization) on GitHub that hosts the repository.
repo: The name of the repository on GitHub.
ref: The reference to be downloaded. Can be any named ref, e.g. a commit, branch, or tag.
sha256: The sha256 of the downloaded tarball.
"""
github_url = "https://github.com/{project}/{repo}/archive/{ref}.zip".format(
project = project,
repo = repo,
ref = ref,
)
http_archive(
name = name,
strip_prefix = "%s-%s" % (repo, ref.replace("/", "-")),
url = github_url,
sha256 = sha256,
canonical_id = github_url,
)
def rules_ios_dependencies():
"""Fetches repositories that are dependencies of the `rules_apple` workspace.
"""
_maybe(
github_repo,
name = "build_bazel_rules_apple",
ref = "ed2bceef7ac5a3071b023e3122a045a133d2245c",
project = "bazelbuild",
repo = "rules_apple",
sha256 = "3bbbc0ffa8aad392bc9a5032bccc366edb96723544dbdf89137d0223cf7350c1",
)
# Note: this ref is a cherry-pick of the rules_swift PR
# https://github.com/bazelbuild/rules_swift/pull/567
_maybe(
github_repo,
name = "build_bazel_rules_swift",
ref = "14d26dcedf0290bd777f6fe83cde3586dc616513",
project = "bazel-ios",
repo = "rules_swift",
sha256 = "8d87afbb43fa4f12ffd02c639bbc5a80eda0141bfaf74e4028d8f570d25d032c",
)
_maybe(
http_archive,
name = "bazel_skylib",
urls = [
"https://github.com/bazelbuild/bazel-skylib/releases/download/1.0.3/bazel-skylib-1.0.3.tar.gz",
"https://mirror.bazel.build/github.com/bazelbuild/bazel-skylib/releases/download/1.0.3/bazel-skylib-1.0.3.tar.gz",
],
sha256 = "1c531376ac7e5a180e0237938a2536de0c54d93f5c278634818e0efc952dd56c",
)
# Note: it relies on `index-import` to import indexes. Longer term this
# dependency may be added by rules_swift
# This release is a build of this PR https://github.com/lyft/index-import/pull/53
_maybe(
http_archive,
name = "build_bazel_rules_swift_index_import",
build_file_content = """\
load("@bazel_skylib//rules:native_binary.bzl", "native_binary")
native_binary(
name = "index_import",
src = "index-import",
out = "index-import",
visibility = ["//visibility:public"],
)
native_binary(
name = "validate_index",
src = "validate-index",
out = "validate-index",
visibility = ["//visibility:public"],
)
native_binary(
name = "absolute_unit",
src = "absolute-unit",
out = "absolute-unit",
visibility = ["//visibility:public"],
)
""",
canonical_id = "index-import-5.3.2.5",
urls = ["https://github.com/bazel-ios/index-import/releases/download/5.3.2.5/index-import.zip"],
sha256 = "79e9b2cd3e988155b86668c56d95705e1a4a7c7b6d702ff5ded3a18d1291a39a",
)
_maybe(
http_archive,
name = "com_github_yonaskolb_xcodegen",
build_file_content = """\
load("@bazel_skylib//rules:native_binary.bzl", "native_binary")
native_binary(
name = "xcodegen",
src = "bin/xcodegen",
out = "xcodegen",
data = glob(["share/**/*"]),
visibility = ["//visibility:public"],
)
""",
canonical_id = "xcodegen-2.18.0-12-g04d6749",
sha256 = "3742eee89850cea75367b0f67662a58da5765f66c1be9b4189a59529b4e5099e",
strip_prefix = "xcodegen",
urls = ["https://github.com/segiddins/XcodeGen/releases/download/2.18.0-12-g04d6749/xcodegen.zip"],
)
| en | 0.760197 | Definitions for handling Bazel repositories used by the Apple rules. Executes the given repository rule if it hasn't been executed already. Args: repo_rule: The repository rule to be executed (e.g., `http_archive`.) name: The name of the repository to be defined by the rule. **kwargs: Additional arguments passed directly to the repository rule. Downloads a repository from GitHub as a tarball. Args: name: The name of the repository. project: The project (user or organization) on GitHub that hosts the repository. repo: The name of the repository on GitHub. ref: The reference to be downloaded. Can be any named ref, e.g. a commit, branch, or tag. sha256: The sha256 of the downloaded tarball. Fetches repositories that are dependencies of the `rules_apple` workspace. # Note: this ref is a cherry-pick of the rules_swift PR # https://github.com/bazelbuild/rules_swift/pull/567 # Note: it relies on `index-import` to import indexes. Longer term this # dependency may be added by rules_swift # This release is a build of this PR https://github.com/lyft/index-import/pull/53 \ load("@bazel_skylib//rules:native_binary.bzl", "native_binary") native_binary( name = "index_import", src = "index-import", out = "index-import", visibility = ["//visibility:public"], ) native_binary( name = "validate_index", src = "validate-index", out = "validate-index", visibility = ["//visibility:public"], ) native_binary( name = "absolute_unit", src = "absolute-unit", out = "absolute-unit", visibility = ["//visibility:public"], ) \ load("@bazel_skylib//rules:native_binary.bzl", "native_binary") native_binary( name = "xcodegen", src = "bin/xcodegen", out = "xcodegen", data = glob(["share/**/*"]), visibility = ["//visibility:public"], ) | 2.264277 | 2 |
mara_db/shell.py | leo-schick/mara-db | 0 | 6631888 | """
Shell command generation for
- running queries in databases via their command line clients
- copying data from, into and between databases
"""
import shlex
from functools import singledispatch
import sys
from mara_db import dbs, config
from mara_db.bigquery import bigquery_credentials
from multimethod import multidispatch
@singledispatch
def query_command(db: object, timezone: str = None, echo_queries: bool = None) -> str:
"""
Creates a shell command that receives a sql query from stdin and executes it
Args:
db: The database in which to run the query (either an alias or a `dbs.DB` object
timezone: Sets the timezone of the client, if applicable
echo_queries: Whether the client should print executed queries, if applicable
Returns:
A shell command string
Example:
>>> print(query_command('mara', 'America/New_York'))
PGTZ=America/New_York PGOPTIONS=--client-min-messages=warning psql --username=root --host=localhost \
--echo-all --no-psqlrc --set ON_ERROR_STOP=on mara
>>> print(query_command(dbs.MysqlDB(host='localhost', database='test')))
mysql --default-character-set=utf8mb4 --host=localhost test
"""
raise NotImplementedError(f'Please implement query_command for type "{db.__class__.__name__}"')
@query_command.register(str)
def __(alias: str, timezone: str = None, echo_queries: bool = None):
return query_command(dbs.db(alias), timezone=timezone, echo_queries=echo_queries)
@query_command.register(dbs.PostgreSQLDB)
def __(db: dbs.PostgreSQLDB, timezone: str = None, echo_queries: bool = None):
if echo_queries is None:
echo_queries = config.default_echo_queries()
return (f'PGTZ={timezone or config.default_timezone()} '
+ (f"PGPASSWORD='{db.password}' " if db.password else '')
+ (f'PGSSLMODE={db.sslmode} ' if db.sslmode else '')
+ (f'PGSSLROOTCERT={db.sslrootcert} ' if db.sslrootcert else '')
+ (f'PGSSLCERT={db.sslcert} ' if db.sslcert else '')
+ (f'PGSSLKEY={db.sslkey} ' if db.sslkey else '')
+ 'PGOPTIONS=--client-min-messages=warning psql'
+ (f' --username={db.user}' if db.user else '')
+ (f' --host={db.host}' if db.host else '')
+ (f' --port={db.port}' if db.port else '')
+ (' --echo-all' if echo_queries else ' ')
+ ' --no-psqlrc --set ON_ERROR_STOP=on '
+ (db.database or ''))
@query_command.register(dbs.RedshiftDB)
def __(db: dbs.RedshiftDB, timezone: str = None, echo_queries: bool = None):
if echo_queries is None:
echo_queries = config.default_echo_queries()
return (f'PGTZ={timezone or config.default_timezone()} '
+ (f"PGPASSWORD='{db.password}' " if db.password else '')
+ ' psql'
+ (f' --username={db.user}' if db.user else '')
+ (f' --host={db.host}' if db.host else '')
+ (f' --port={db.port}' if db.port else '')
+ (' --echo-all' if echo_queries else ' ')
+ ' --no-psqlrc --set ON_ERROR_STOP=on '
+ (db.database or ''))
@query_command.register(dbs.BigQueryDB)
def __(db: dbs.BigQueryDB, timezone: str = None, echo_queries: bool = None):
from .bigquery import bigquery_credentials
service_account_email = bigquery_credentials(db).service_account_email
return (f'CLOUDSDK_CORE_ACCOUNT={service_account_email}'
+ ' bq query'
+ ' --max_rows=50000000' # run without user interaction
+ ' --headless' # run without user interaction
+ ' --quiet' # no job progress updates
+ ' --format=csv' # no job progress updates
+ (f' --use_legacy_sql=' + ('true' if db.use_legacy_sql else 'false'))
+ (f' --project_id={db.project}' if db.project else '')
+ (f' --location={db.location}' if db.location else '')
+ (f' --dataset_id={db.dataset}' if db.dataset else '')
+ ' ')
@query_command.register(dbs.MysqlDB)
def __(db: dbs.MysqlDB, timezone: str = None, echo_queries: bool = None):
assert all(v is None for v in [timezone, echo_queries]), "unimplemented parameter for MysqlDB"
return ((f"MYSQL_PWD='{<PASSWORD>}' " if db.password else '')
+ 'mysql '
+ (f' --user={db.user}' if db.user else '')
+ (f' --host={db.host}' if db.host else '')
+ (f' --port={db.port}' if db.port else '')
+ (' --ssl' if db.ssl else '')
+ (f' {db.database}' if db.database else ''))
@query_command.register(dbs.SQLServerDB)
def __(db: dbs.SQLServerDB, timezone: str = None, echo_queries: bool = None):
assert all(v is None for v in [timezone]), "unimplemented parameter for SQLServerDB"
if echo_queries is None:
echo_queries = config.default_echo_queries()
# sqsh does not do anything when a statement is not terminated by a ';', add one to be sure
command = "(cat && echo ';') \\\n | "
command += "(cat && echo ';\n\go') \\\n | "
return (command + 'sqsh -a 1 -d 0 -f 10'
+ (f' -U {db.user}' if db.user else '')
+ (f' -P {db.password}' if db.password else '')
+ (f' -S {db.host}' if db.host else '')
+ (f' -D {db.database}' if db.database else '')
+ (f' -e' if echo_queries else ''))
@query_command.register(dbs.OracleDB)
def __(db: dbs.OracleDB, timezone: str = None, echo_queries: bool = None):
assert all(v is None for v in [timezone, echo_queries]), "unimplemented parameter for OracleDB"
# sqlplus does not do anything when a statement is not terminated by a ';', add one to be sure
return ( # Oracle needs a semicolon at the end, with no newlines before
# Remove all trailing whitespace and then add a semicolon if not there yet
shlex.quote(sys.executable)
+ ''' -c "import sys; sql = sys.stdin.read().strip(); sql = sql + ';' if not sql[-1]==';' else sql; print(sql)" '''
+ ' \\\n | sqlplus64 -s '
+ f'{db.user}/{db.password}@{db.host}:{db.port or 1521}/{db.endpoint}')
@query_command.register(dbs.SQLiteDB)
def __(db: dbs.SQLiteDB, timezone: str = None, echo_queries: bool = None):
assert all(v is None for v in [timezone, echo_queries]), "unimplemented parameter for SQLiteDB"
# sqlite does not complain if a file does not exist. Therefore check file existence first
file_name = shlex.quote(str(db.file_name))
return f'(test -f {file_name} && cat || >&2 echo {file_name} not found) \\\n' \
+ ' | sqlite3 -bail ' + shlex.quote(str(db.file_name))
# -------------------------------
@singledispatch
def copy_to_stdout_command(db: object,
header: bool = None,
footer: bool = None,
delimiter_char: str = None,
csv_format: bool = None) -> str:
"""
Creates a shell command that receives a query from stdin, executes it and writes the output to stdout
Args:
db: The database in which to run the query (either an alias or a `dbs.DB` object
header: Whether a csv header with the column name(s) will be included or not.
No header, by default. (not implemented in sqsh for SQLServerDB)
footer: Whether a footer will be included or not. False by default. (Only implemented for PostgreSQLDB)
delimiter_char: str to delimit the fields in one row. Default: tab character
csv_format: Double quote 'difficult' strings (Only implemented for PostgreSQLDB)
Returns:
The composed shell command
Example:
>>> print(copy_to_stdout_command(dbs.PostgreSQLDB(host='localhost', database='test')))
PGTZ=Europe/Berlin PGOPTIONS=--client-min-messages=warning psql --host=localhost --no-psqlrc --set ON_ERROR_STOP=on test --tuples-only --no-align --field-separator=' ' \
| grep -a -v -e '^$'
"""
raise NotImplementedError(f'Please implement function copy_to_stdout_command for type "{db.__class__.__name__}"')
@copy_to_stdout_command.register(str)
def __(alias: str, header: bool = None, footer: bool = None, delimiter_char: str = None, csv_format: bool = None):
return copy_to_stdout_command(dbs.db(alias), header=header, footer=footer,
delimiter_char=delimiter_char, csv_format=csv_format)
@copy_to_stdout_command.register(dbs.PostgreSQLDB)
def __(db: dbs.PostgreSQLDB, header: bool = None, footer: bool = None,
delimiter_char: str = None, csv_format: bool = None):
if header is None:
header = False
if footer is None:
footer = False
if delimiter_char is None:
delimiter_char = '\t'
if csv_format:
assert not (footer or header), 'unsupported when csv_format = True'
return r" sed '/\;/q' | sed 's/\;.*//' " + '\\\n' \
+ f'''| (echo "COPY (" && cat && echo ") TO STDOUT WITH {'CSV ' if csv_format else ''} DELIMITER '{delimiter_char}' ") \\\n''' \
+ ' | ' + query_command(db, echo_queries=False) + ' --variable=FETCH_COUNT=10000 \\\n' \
+ " | sed '/^$/d'" # remove empty lines
else:
header_argument = '--tuples-only' if not header else ''
footer_argument = '--pset="footer=off"' if not footer else ''
return (query_command(db, echo_queries=False) + ' --variable=FETCH_COUNT=10000'
+ " " + header_argument + " " + footer_argument
+ f" --no-align --field-separator='{delimiter_char}' \\\n"
+ " | sed '/^$/d'" # remove empty lines
)
@copy_to_stdout_command.register(dbs.BigQueryDB)
def __(db: dbs.BigQueryDB, header: bool = None, footer: bool = None, delimiter_char: str = None,
csv_format: bool = None):
assert all(v is None for v in [header, footer]), "unimplemented parameter for BigQuery"
remove_header = 'sed 1d'
return query_command(db) + f' | {remove_header}'
@copy_to_stdout_command.register(dbs.MysqlDB)
def __(db: dbs.MysqlDB, header: bool = None, footer: bool = None, delimiter_char: str = None, csv_format: bool = None):
if header is None:
header = False
assert all(v is None for v in [footer, delimiter_char, csv_format]), "unimplemented parameter for MysqlDB"
header_argument = '--skip-column-names' if header is False else ''
return query_command(db) + ' ' + header_argument
@copy_to_stdout_command.register(dbs.SQLServerDB)
def __(db: dbs.SQLServerDB, header: bool = None, footer: bool = None, delimiter_char: str = None,
csv_format: bool = None):
assert all(
v is None for v in [header, footer, delimiter_char, csv_format]), "unimplemented parameter for SQLServerDB"
return query_command(db, echo_queries=False) + " -m csv"
@copy_to_stdout_command.register(dbs.OracleDB)
def __(db: dbs.OracleDB, header: bool = None, footer: bool = None, delimiter_char: str = None, csv_format: bool = None):
assert all(v is None for v in [header, footer, delimiter_char, csv_format]), "unimplemented parameter for OracleDB"
return "(echo 'set markup csv on\nset feedback off\nset heading off' && cat)" \
+ " \\\n | " + query_command(db)
@copy_to_stdout_command.register(dbs.SQLiteDB)
def __(db: dbs.SQLiteDB, header: bool = None, footer: bool = None, delimiter_char: str = None, csv_format: bool = None):
if header is None:
header = False
if delimiter_char is None:
delimiter_char = '\t'
assert all(v is None for v in [footer, csv_format]), "unimplemented parameter for SQLiteDB"
header_argument = '-noheader' if not header else ''
return query_command(db) + " " + header_argument + f" -separator '{delimiter_char}' -quote"
# -------------------------------
@singledispatch
def copy_from_stdin_command(db: object, target_table: str,
csv_format: bool = None, skip_header: bool = None,
delimiter_char: str = None, quote_char: str = None,
null_value_string: str = None, timezone: str = None) -> str:
"""
Creates a shell command that receives data from stdin and writes it to a table.
Options are tailored for the PostgreSQL `COPY FROM STDIN` command, adaptions might be needed for other databases.
https://www.postgresql.org/docs/current/static/sql-copy.html
Args:
db: The database to use (either an alias or a `dbs.DB` object
target_table: The table in which the data is written
csv_format: Treat the input as a CSV file (comma separated, double quoted literals)
skip_header: When true, skip the first line
delimiter_char: The character that separates columns
quote_char: The character for quoting strings
null_value_string: The string that denotes NULL values
timezone: Sets the timezone of the client, if applicable
Returns:
The composed shell command
Examples:
>>>> print(copy_from_stdin_command('mara', target_table='foo'))
PGTZ=Europe/Berlin PGOPTIONS=--client-min-messages=warning psql --username=root --host=localhost \
--echo-all --no-psqlrc --set ON_ERROR_STOP=on mara \
--command="COPY foo FROM STDIN WITH CSV"
"""
raise NotImplementedError(f'Please implement copy_from_stdin_command for type "{db.__class__.__name__}"')
@copy_from_stdin_command.register(str)
def __(alias: str, target_table: str, csv_format: bool = None, skip_header: bool = None,
delimiter_char: str = None, quote_char: str = None, null_value_string: str = None, timezone: str = None):
return copy_from_stdin_command(
dbs.db(alias), target_table=target_table, csv_format=csv_format, skip_header=skip_header,
delimiter_char=delimiter_char, quote_char=quote_char,
null_value_string=null_value_string, timezone=timezone)
@copy_from_stdin_command.register(dbs.PostgreSQLDB)
def __(db: dbs.PostgreSQLDB, target_table: str, csv_format: bool = None, skip_header: bool = None,
delimiter_char: str = None, quote_char: str = None, null_value_string: str = None, timezone: str = None):
sql = f'COPY {target_table} FROM STDIN WITH'
if csv_format:
sql += ' CSV'
if skip_header:
sql += ' HEADER'
if delimiter_char is not None:
sql += f" DELIMITER AS '{delimiter_char}'"
if null_value_string is not None:
sql += f" NULL AS '{null_value_string}'"
if quote_char is not None:
sql += f" QUOTE AS '{quote_char}'"
# escape double quotes
sql = sql.replace('"', '\\"')
return f'{query_command(db, timezone)} \\\n --command="{sql}"'
@copy_from_stdin_command.register(dbs.RedshiftDB)
def __(db: dbs.RedshiftDB, target_table: str, csv_format: bool = None, skip_header: bool = None,
delimiter_char: str = None, quote_char: str = None, null_value_string: str = None, timezone: str = None):
import uuid
import datetime
tmp_file_name = f'tmp-{datetime.datetime.now().isoformat()}-{uuid.uuid4().hex}.csv'
s3_write_command = f'AWS_ACCESS_KEY_ID={db.aws_access_key_id} AWS_SECRET_ACCESS_KEY={db.aws_secret_access_key} aws s3 cp - s3://{db.aws_s3_bucket_name}/{tmp_file_name}'
s3_delete_tmp_file_command = f'AWS_ACCESS_KEY_ID={db.aws_access_key_id} AWS_SECRET_ACCESS_KEY={db.aws_secret_access_key} aws s3 rm s3://{db.aws_s3_bucket_name}/{tmp_file_name}'
sql = f"COPY {target_table} FROM 's3://{db.aws_s3_bucket_name}/{tmp_file_name}' access_key_id '{db.aws_access_key_id}' secret_access_key '{db.aws_secret_access_key}'"
if csv_format:
sql += ' CSV'
if skip_header:
sql += ' HEADER'
if delimiter_char is not None:
sql += f" DELIMITER AS '{delimiter_char}'"
if null_value_string is not None:
sql += f" NULL AS '{null_value_string}'"
if quote_char is not None:
sql += f" QUOTE AS '{quote_char}'"
return s3_write_command + ' &&\n\n' \
+ f'{query_command(db, timezone)} \\\n --command="{sql}" \\\n || /bin/false \\\n ; RC=$?\n\n' \
+ s3_delete_tmp_file_command+' &&\n $(exit $RC) || /bin/false'
@copy_from_stdin_command.register(dbs.BigQueryDB)
def __(db: dbs.BigQueryDB, target_table: str, csv_format: bool = None, skip_header: bool = None,
delimiter_char: str = None, quote_char: str = None, null_value_string: str = None, timezone: str = None):
assert db.gcloud_gcs_bucket_name, f"Please provide the 'gcloud_gcs_bucket_name' parameter to database '{db}' "
import uuid
import datetime
tmp_file_name = f'tmp-{datetime.datetime.now().isoformat()}-{uuid.uuid4().hex}.' + (
'csv' if csv_format else 'json')
service_account_email = bigquery_credentials(db).service_account_email
set_env_prefix = f'CLOUDSDK_CORE_ACCOUNT={service_account_email}'
bq_load_command = (set_env_prefix
+ ' bq load'
+ ' --headless'
+ ' --quiet'
+ (f' --location={db.location}' if db.location else '')
+ (f' --project_id={db.project}' if db.project else '')
+ (f' --dataset_id={db.dataset}' if db.dataset else '')
+ (f' --skip_leading_rows=1' if skip_header else '')
)
if csv_format:
bq_load_command += ' --source_format=CSV'
else:
bq_load_command += ' --source_format=NEWLINE_DELIMITED_JSON'
if delimiter_char is not None:
bq_load_command += f" --field_delimiter='{delimiter_char}'"
if null_value_string is not None:
bq_load_command += f" --null_marker='{null_value_string}'"
if quote_char is not None:
bq_load_command += f" --quote='{quote_char}'"
bq_load_command += f" '{target_table}' gs://{db.gcloud_gcs_bucket_name}/{tmp_file_name}"
gcs_write_command = f'{set_env_prefix} gsutil -q cp - gs://{db.gcloud_gcs_bucket_name}/{tmp_file_name}'
gcs_delete_temp_file_command = f'{set_env_prefix} gsutil -q rm gs://{db.gcloud_gcs_bucket_name}/{tmp_file_name}'
return gcs_write_command + '\\\n \\\n && ' \
+ bq_load_command + '\\\n \\\n && ' \
+ gcs_delete_temp_file_command
# -------------------------------
@multidispatch
def copy_command(source_db: object, target_db: object, target_table: str,
timezone=None, csv_format=None, delimiter_char=None) -> str:
"""
Creates a shell command that
- receives a sql query from stdin
- executes the query in `source_db`
- writes the results of the query to `target_table` in `target_db`
Args:
source_db: The database in which to run the query (either an alias or a `dbs.DB` object
target_db: The database where to write the query results (alias or db configuration)
target_table: The table in which to write the query results
timezone: Sets the timezone of the client, if applicable
csv_format: double quote 'difficult' strings
delimiter_char: The character that separates columns, default '\t'
Returns:
A shell command string
Examples:
>>>> print(copy_command(dbs.SQLServerDB(database='source_db'), dbs.PostgreSQLDB(database='target_db'), \
'target_table'))
sqsh -D source_db -m csv \
| PGTZ=Europe/Berlin PGOPTIONS=--client-min-messages=warning psql --echo-all --no-psqlrc \
--set ON_ERROR_STOP=on target_db \
--command="COPY target_table FROM STDIN WITH CSV HEADER"
"""
raise NotImplementedError(
f'Please implement copy_command for types "{source_db.__class__.__name__}" and "{target_db.__class__.__name__}"'
)
@copy_command.register(str, str)
def __(source_db_alias: str, target_db_alias: str, target_table: str, timezone: str = None,
csv_format: bool = None, delimiter_char: str = None):
return copy_command(dbs.db(source_db_alias), dbs.db(target_db_alias),
target_table=target_table, timezone=timezone,
csv_format=csv_format, delimiter_char=delimiter_char)
@copy_command.register(dbs.DB, str)
def __(source_db: dbs.DB, target_db_alias: str, target_table: str, timezone: str = None,
csv_format: bool = None, delimiter_char: str = None):
return copy_command(source_db, dbs.db(target_db_alias),
target_table=target_table, timezone=timezone,
csv_format=csv_format, delimiter_char=delimiter_char)
@copy_command.register(dbs.PostgreSQLDB, dbs.PostgreSQLDB)
def __(source_db: dbs.PostgreSQLDB, target_db: dbs.PostgreSQLDB, target_table: str,
timezone: str = None, csv_format: bool = None, delimiter_char: str = None):
return (copy_to_stdout_command(source_db, delimiter_char=delimiter_char, csv_format=csv_format) + ' \\\n'
+ ' | ' + copy_from_stdin_command(target_db, target_table=target_table,
null_value_string='', timezone=timezone, csv_format=csv_format,
delimiter_char=delimiter_char))
@copy_command.register(dbs.PostgreSQLDB, dbs.BigQueryDB)
def __(source_db: dbs.PostgreSQLDB, target_db: dbs.BigQueryDB, target_table: str,
timezone: str = None, csv_format: bool = None, delimiter_char: str = None):
if csv_format is None:
csv_format = True
return (copy_to_stdout_command(source_db, delimiter_char=delimiter_char, csv_format=csv_format) + ' \\\n'
+ ' | ' + copy_from_stdin_command(target_db, target_table=target_table,
timezone=timezone, csv_format=csv_format,
delimiter_char='\t' if not delimiter_char and csv_format else delimiter_char))
@copy_command.register(dbs.BigQueryDB, dbs.PostgreSQLDB)
def __(source_db: dbs.BigQueryDB, target_db: dbs.PostgreSQLDB, target_table: str,
timezone: str = None, csv_format: bool = None, delimiter_char: str = None):
if csv_format is None:
csv_format = True
return (copy_to_stdout_command(source_db, delimiter_char=delimiter_char, csv_format=csv_format) + ' \\\n'
+ ' | ' + copy_from_stdin_command(target_db, target_table=target_table,
timezone=timezone, csv_format=csv_format,
delimiter_char='\t' if not delimiter_char and csv_format else delimiter_char))
@copy_command.register(dbs.MysqlDB, dbs.PostgreSQLDB)
def __(source_db: dbs.MysqlDB, target_db: dbs.PostgreSQLDB, target_table: str,
timezone: str = None, csv_format: bool = None, delimiter_char: str = None):
return (copy_to_stdout_command(source_db) + ' \\\n'
+ ' | ' + copy_from_stdin_command(target_db, target_table=target_table,
null_value_string='NULL', timezone=timezone,
csv_format=csv_format, delimiter_char=delimiter_char))
@copy_command.register(dbs.MysqlDB, dbs.BigQueryDB)
def __(source_db: dbs.MysqlDB, target_db: dbs.PostgreSQLDB, target_table: str,
timezone: str = None, csv_format: bool = None, delimiter_char: str = None):
if csv_format is None:
csv_format = True
return (copy_to_stdout_command(source_db) + ' \\\n'
+ ' | ' + copy_from_stdin_command(target_db, target_table=target_table,
null_value_string='NULL', timezone=timezone,
csv_format=csv_format, delimiter_char=delimiter_char))
@copy_command.register(dbs.SQLServerDB, dbs.PostgreSQLDB)
def __(source_db: dbs.SQLServerDB, target_db: dbs.PostgreSQLDB, target_table: str,
timezone: str = None, csv_format: bool = None, delimiter_char: str = None):
if csv_format is None:
csv_format = True
return (copy_to_stdout_command(source_db) + ' \\\n'
+ ' | ' + copy_from_stdin_command(target_db, target_table=target_table, csv_format=csv_format,
delimiter_char=delimiter_char,
skip_header=True, timezone=timezone))
@copy_command.register(dbs.SQLServerDB, dbs.BigQueryDB)
def __(source_db: dbs.SQLServerDB, target_db: dbs.PostgreSQLDB, target_table: str,
timezone: str = None, csv_format: bool = None, delimiter_char: str = None):
if csv_format is None:
csv_format = True
return (copy_to_stdout_command(source_db) + ' \\\n'
+ ' | ' + copy_from_stdin_command(target_db, target_table=target_table, csv_format=csv_format,
delimiter_char=delimiter_char,
skip_header=True, timezone=timezone))
@copy_command.register(dbs.OracleDB, dbs.PostgreSQLDB)
def __(source_db: dbs.OracleDB, target_db: dbs.PostgreSQLDB, target_table: str,
timezone: str = None, csv_format: bool = None, delimiter_char: str = None):
if csv_format is None:
csv_format = True
return (copy_to_stdout_command(source_db) + ' \\\n'
+ ' | ' + copy_from_stdin_command(target_db, target_table=target_table,
csv_format=csv_format, skip_header=False, delimiter_char=delimiter_char,
null_value_string='NULL', timezone=timezone))
@copy_command.register(dbs.OracleDB, dbs.BigQueryDB)
def __(source_db: dbs.OracleDB, target_db: dbs.PostgreSQLDB, target_table: str,
timezone: str = None, csv_format: bool = None, delimiter_char: str = None):
if csv_format is None:
csv_format = True
return (copy_to_stdout_command(source_db) + ' \\\n'
+ ' | ' + copy_from_stdin_command(target_db, target_table=target_table,
csv_format=csv_format, skip_header=False, delimiter_char=delimiter_char,
null_value_string='NULL', timezone=timezone))
@copy_command.register(dbs.SQLiteDB, dbs.PostgreSQLDB)
def __(source_db: dbs.SQLiteDB, target_db: dbs.PostgreSQLDB, target_table: str,
timezone: str = None, csv_format: bool = None, delimiter_char: str = None):
if csv_format is None:
csv_format = True
return (copy_to_stdout_command(source_db) + ' \\\n'
+ ' | ' + copy_from_stdin_command(target_db, target_table=target_table, timezone=timezone,
null_value_string='NULL', quote_char="''", csv_format=csv_format,
delimiter_char=delimiter_char))
@copy_command.register(dbs.SQLiteDB, dbs.BigQueryDB)
def __(source_db: dbs.SQLiteDB, target_db: dbs.PostgreSQLDB, target_table: str,
timezone: str = None, csv_format: bool = None, delimiter_char: str = None):
if csv_format is None:
csv_format = True
return (copy_to_stdout_command(source_db) + ' \\\n'
+ ' | ' + copy_from_stdin_command(target_db, target_table=target_table, timezone=timezone,
null_value_string='NULL', quote_char="''", csv_format=csv_format,
delimiter_char=delimiter_char))
| """
Shell command generation for
- running queries in databases via their command line clients
- copying data from, into and between databases
"""
import shlex
from functools import singledispatch
import sys
from mara_db import dbs, config
from mara_db.bigquery import bigquery_credentials
from multimethod import multidispatch
@singledispatch
def query_command(db: object, timezone: str = None, echo_queries: bool = None) -> str:
"""
Creates a shell command that receives a sql query from stdin and executes it
Args:
db: The database in which to run the query (either an alias or a `dbs.DB` object
timezone: Sets the timezone of the client, if applicable
echo_queries: Whether the client should print executed queries, if applicable
Returns:
A shell command string
Example:
>>> print(query_command('mara', 'America/New_York'))
PGTZ=America/New_York PGOPTIONS=--client-min-messages=warning psql --username=root --host=localhost \
--echo-all --no-psqlrc --set ON_ERROR_STOP=on mara
>>> print(query_command(dbs.MysqlDB(host='localhost', database='test')))
mysql --default-character-set=utf8mb4 --host=localhost test
"""
raise NotImplementedError(f'Please implement query_command for type "{db.__class__.__name__}"')
@query_command.register(str)
def __(alias: str, timezone: str = None, echo_queries: bool = None):
return query_command(dbs.db(alias), timezone=timezone, echo_queries=echo_queries)
@query_command.register(dbs.PostgreSQLDB)
def __(db: dbs.PostgreSQLDB, timezone: str = None, echo_queries: bool = None):
if echo_queries is None:
echo_queries = config.default_echo_queries()
return (f'PGTZ={timezone or config.default_timezone()} '
+ (f"PGPASSWORD='{db.password}' " if db.password else '')
+ (f'PGSSLMODE={db.sslmode} ' if db.sslmode else '')
+ (f'PGSSLROOTCERT={db.sslrootcert} ' if db.sslrootcert else '')
+ (f'PGSSLCERT={db.sslcert} ' if db.sslcert else '')
+ (f'PGSSLKEY={db.sslkey} ' if db.sslkey else '')
+ 'PGOPTIONS=--client-min-messages=warning psql'
+ (f' --username={db.user}' if db.user else '')
+ (f' --host={db.host}' if db.host else '')
+ (f' --port={db.port}' if db.port else '')
+ (' --echo-all' if echo_queries else ' ')
+ ' --no-psqlrc --set ON_ERROR_STOP=on '
+ (db.database or ''))
@query_command.register(dbs.RedshiftDB)
def __(db: dbs.RedshiftDB, timezone: str = None, echo_queries: bool = None):
if echo_queries is None:
echo_queries = config.default_echo_queries()
return (f'PGTZ={timezone or config.default_timezone()} '
+ (f"PGPASSWORD='{db.password}' " if db.password else '')
+ ' psql'
+ (f' --username={db.user}' if db.user else '')
+ (f' --host={db.host}' if db.host else '')
+ (f' --port={db.port}' if db.port else '')
+ (' --echo-all' if echo_queries else ' ')
+ ' --no-psqlrc --set ON_ERROR_STOP=on '
+ (db.database or ''))
@query_command.register(dbs.BigQueryDB)
def __(db: dbs.BigQueryDB, timezone: str = None, echo_queries: bool = None):
from .bigquery import bigquery_credentials
service_account_email = bigquery_credentials(db).service_account_email
return (f'CLOUDSDK_CORE_ACCOUNT={service_account_email}'
+ ' bq query'
+ ' --max_rows=50000000' # run without user interaction
+ ' --headless' # run without user interaction
+ ' --quiet' # no job progress updates
+ ' --format=csv' # no job progress updates
+ (f' --use_legacy_sql=' + ('true' if db.use_legacy_sql else 'false'))
+ (f' --project_id={db.project}' if db.project else '')
+ (f' --location={db.location}' if db.location else '')
+ (f' --dataset_id={db.dataset}' if db.dataset else '')
+ ' ')
@query_command.register(dbs.MysqlDB)
def __(db: dbs.MysqlDB, timezone: str = None, echo_queries: bool = None):
assert all(v is None for v in [timezone, echo_queries]), "unimplemented parameter for MysqlDB"
return ((f"MYSQL_PWD='{<PASSWORD>}' " if db.password else '')
+ 'mysql '
+ (f' --user={db.user}' if db.user else '')
+ (f' --host={db.host}' if db.host else '')
+ (f' --port={db.port}' if db.port else '')
+ (' --ssl' if db.ssl else '')
+ (f' {db.database}' if db.database else ''))
@query_command.register(dbs.SQLServerDB)
def __(db: dbs.SQLServerDB, timezone: str = None, echo_queries: bool = None):
assert all(v is None for v in [timezone]), "unimplemented parameter for SQLServerDB"
if echo_queries is None:
echo_queries = config.default_echo_queries()
# sqsh does not do anything when a statement is not terminated by a ';', add one to be sure
command = "(cat && echo ';') \\\n | "
command += "(cat && echo ';\n\go') \\\n | "
return (command + 'sqsh -a 1 -d 0 -f 10'
+ (f' -U {db.user}' if db.user else '')
+ (f' -P {db.password}' if db.password else '')
+ (f' -S {db.host}' if db.host else '')
+ (f' -D {db.database}' if db.database else '')
+ (f' -e' if echo_queries else ''))
@query_command.register(dbs.OracleDB)
def __(db: dbs.OracleDB, timezone: str = None, echo_queries: bool = None):
assert all(v is None for v in [timezone, echo_queries]), "unimplemented parameter for OracleDB"
# sqlplus does not do anything when a statement is not terminated by a ';', add one to be sure
return ( # Oracle needs a semicolon at the end, with no newlines before
# Remove all trailing whitespace and then add a semicolon if not there yet
shlex.quote(sys.executable)
+ ''' -c "import sys; sql = sys.stdin.read().strip(); sql = sql + ';' if not sql[-1]==';' else sql; print(sql)" '''
+ ' \\\n | sqlplus64 -s '
+ f'{db.user}/{db.password}@{db.host}:{db.port or 1521}/{db.endpoint}')
@query_command.register(dbs.SQLiteDB)
def __(db: dbs.SQLiteDB, timezone: str = None, echo_queries: bool = None):
assert all(v is None for v in [timezone, echo_queries]), "unimplemented parameter for SQLiteDB"
# sqlite does not complain if a file does not exist. Therefore check file existence first
file_name = shlex.quote(str(db.file_name))
return f'(test -f {file_name} && cat || >&2 echo {file_name} not found) \\\n' \
+ ' | sqlite3 -bail ' + shlex.quote(str(db.file_name))
# -------------------------------
@singledispatch
def copy_to_stdout_command(db: object,
header: bool = None,
footer: bool = None,
delimiter_char: str = None,
csv_format: bool = None) -> str:
"""
Creates a shell command that receives a query from stdin, executes it and writes the output to stdout
Args:
db: The database in which to run the query (either an alias or a `dbs.DB` object
header: Whether a csv header with the column name(s) will be included or not.
No header, by default. (not implemented in sqsh for SQLServerDB)
footer: Whether a footer will be included or not. False by default. (Only implemented for PostgreSQLDB)
delimiter_char: str to delimit the fields in one row. Default: tab character
csv_format: Double quote 'difficult' strings (Only implemented for PostgreSQLDB)
Returns:
The composed shell command
Example:
>>> print(copy_to_stdout_command(dbs.PostgreSQLDB(host='localhost', database='test')))
PGTZ=Europe/Berlin PGOPTIONS=--client-min-messages=warning psql --host=localhost --no-psqlrc --set ON_ERROR_STOP=on test --tuples-only --no-align --field-separator=' ' \
| grep -a -v -e '^$'
"""
raise NotImplementedError(f'Please implement function copy_to_stdout_command for type "{db.__class__.__name__}"')
@copy_to_stdout_command.register(str)
def __(alias: str, header: bool = None, footer: bool = None, delimiter_char: str = None, csv_format: bool = None):
return copy_to_stdout_command(dbs.db(alias), header=header, footer=footer,
delimiter_char=delimiter_char, csv_format=csv_format)
@copy_to_stdout_command.register(dbs.PostgreSQLDB)
def __(db: dbs.PostgreSQLDB, header: bool = None, footer: bool = None,
delimiter_char: str = None, csv_format: bool = None):
if header is None:
header = False
if footer is None:
footer = False
if delimiter_char is None:
delimiter_char = '\t'
if csv_format:
assert not (footer or header), 'unsupported when csv_format = True'
return r" sed '/\;/q' | sed 's/\;.*//' " + '\\\n' \
+ f'''| (echo "COPY (" && cat && echo ") TO STDOUT WITH {'CSV ' if csv_format else ''} DELIMITER '{delimiter_char}' ") \\\n''' \
+ ' | ' + query_command(db, echo_queries=False) + ' --variable=FETCH_COUNT=10000 \\\n' \
+ " | sed '/^$/d'" # remove empty lines
else:
header_argument = '--tuples-only' if not header else ''
footer_argument = '--pset="footer=off"' if not footer else ''
return (query_command(db, echo_queries=False) + ' --variable=FETCH_COUNT=10000'
+ " " + header_argument + " " + footer_argument
+ f" --no-align --field-separator='{delimiter_char}' \\\n"
+ " | sed '/^$/d'" # remove empty lines
)
@copy_to_stdout_command.register(dbs.BigQueryDB)
def __(db: dbs.BigQueryDB, header: bool = None, footer: bool = None, delimiter_char: str = None,
csv_format: bool = None):
assert all(v is None for v in [header, footer]), "unimplemented parameter for BigQuery"
remove_header = 'sed 1d'
return query_command(db) + f' | {remove_header}'
@copy_to_stdout_command.register(dbs.MysqlDB)
def __(db: dbs.MysqlDB, header: bool = None, footer: bool = None, delimiter_char: str = None, csv_format: bool = None):
if header is None:
header = False
assert all(v is None for v in [footer, delimiter_char, csv_format]), "unimplemented parameter for MysqlDB"
header_argument = '--skip-column-names' if header is False else ''
return query_command(db) + ' ' + header_argument
@copy_to_stdout_command.register(dbs.SQLServerDB)
def __(db: dbs.SQLServerDB, header: bool = None, footer: bool = None, delimiter_char: str = None,
csv_format: bool = None):
assert all(
v is None for v in [header, footer, delimiter_char, csv_format]), "unimplemented parameter for SQLServerDB"
return query_command(db, echo_queries=False) + " -m csv"
@copy_to_stdout_command.register(dbs.OracleDB)
def __(db: dbs.OracleDB, header: bool = None, footer: bool = None, delimiter_char: str = None, csv_format: bool = None):
assert all(v is None for v in [header, footer, delimiter_char, csv_format]), "unimplemented parameter for OracleDB"
return "(echo 'set markup csv on\nset feedback off\nset heading off' && cat)" \
+ " \\\n | " + query_command(db)
@copy_to_stdout_command.register(dbs.SQLiteDB)
def __(db: dbs.SQLiteDB, header: bool = None, footer: bool = None, delimiter_char: str = None, csv_format: bool = None):
if header is None:
header = False
if delimiter_char is None:
delimiter_char = '\t'
assert all(v is None for v in [footer, csv_format]), "unimplemented parameter for SQLiteDB"
header_argument = '-noheader' if not header else ''
return query_command(db) + " " + header_argument + f" -separator '{delimiter_char}' -quote"
# -------------------------------
@singledispatch
def copy_from_stdin_command(db: object, target_table: str,
csv_format: bool = None, skip_header: bool = None,
delimiter_char: str = None, quote_char: str = None,
null_value_string: str = None, timezone: str = None) -> str:
"""
Creates a shell command that receives data from stdin and writes it to a table.
Options are tailored for the PostgreSQL `COPY FROM STDIN` command, adaptions might be needed for other databases.
https://www.postgresql.org/docs/current/static/sql-copy.html
Args:
db: The database to use (either an alias or a `dbs.DB` object
target_table: The table in which the data is written
csv_format: Treat the input as a CSV file (comma separated, double quoted literals)
skip_header: When true, skip the first line
delimiter_char: The character that separates columns
quote_char: The character for quoting strings
null_value_string: The string that denotes NULL values
timezone: Sets the timezone of the client, if applicable
Returns:
The composed shell command
Examples:
>>>> print(copy_from_stdin_command('mara', target_table='foo'))
PGTZ=Europe/Berlin PGOPTIONS=--client-min-messages=warning psql --username=root --host=localhost \
--echo-all --no-psqlrc --set ON_ERROR_STOP=on mara \
--command="COPY foo FROM STDIN WITH CSV"
"""
raise NotImplementedError(f'Please implement copy_from_stdin_command for type "{db.__class__.__name__}"')
@copy_from_stdin_command.register(str)
def __(alias: str, target_table: str, csv_format: bool = None, skip_header: bool = None,
delimiter_char: str = None, quote_char: str = None, null_value_string: str = None, timezone: str = None):
return copy_from_stdin_command(
dbs.db(alias), target_table=target_table, csv_format=csv_format, skip_header=skip_header,
delimiter_char=delimiter_char, quote_char=quote_char,
null_value_string=null_value_string, timezone=timezone)
@copy_from_stdin_command.register(dbs.PostgreSQLDB)
def __(db: dbs.PostgreSQLDB, target_table: str, csv_format: bool = None, skip_header: bool = None,
delimiter_char: str = None, quote_char: str = None, null_value_string: str = None, timezone: str = None):
sql = f'COPY {target_table} FROM STDIN WITH'
if csv_format:
sql += ' CSV'
if skip_header:
sql += ' HEADER'
if delimiter_char is not None:
sql += f" DELIMITER AS '{delimiter_char}'"
if null_value_string is not None:
sql += f" NULL AS '{null_value_string}'"
if quote_char is not None:
sql += f" QUOTE AS '{quote_char}'"
# escape double quotes
sql = sql.replace('"', '\\"')
return f'{query_command(db, timezone)} \\\n --command="{sql}"'
@copy_from_stdin_command.register(dbs.RedshiftDB)
def __(db: dbs.RedshiftDB, target_table: str, csv_format: bool = None, skip_header: bool = None,
delimiter_char: str = None, quote_char: str = None, null_value_string: str = None, timezone: str = None):
import uuid
import datetime
tmp_file_name = f'tmp-{datetime.datetime.now().isoformat()}-{uuid.uuid4().hex}.csv'
s3_write_command = f'AWS_ACCESS_KEY_ID={db.aws_access_key_id} AWS_SECRET_ACCESS_KEY={db.aws_secret_access_key} aws s3 cp - s3://{db.aws_s3_bucket_name}/{tmp_file_name}'
s3_delete_tmp_file_command = f'AWS_ACCESS_KEY_ID={db.aws_access_key_id} AWS_SECRET_ACCESS_KEY={db.aws_secret_access_key} aws s3 rm s3://{db.aws_s3_bucket_name}/{tmp_file_name}'
sql = f"COPY {target_table} FROM 's3://{db.aws_s3_bucket_name}/{tmp_file_name}' access_key_id '{db.aws_access_key_id}' secret_access_key '{db.aws_secret_access_key}'"
if csv_format:
sql += ' CSV'
if skip_header:
sql += ' HEADER'
if delimiter_char is not None:
sql += f" DELIMITER AS '{delimiter_char}'"
if null_value_string is not None:
sql += f" NULL AS '{null_value_string}'"
if quote_char is not None:
sql += f" QUOTE AS '{quote_char}'"
return s3_write_command + ' &&\n\n' \
+ f'{query_command(db, timezone)} \\\n --command="{sql}" \\\n || /bin/false \\\n ; RC=$?\n\n' \
+ s3_delete_tmp_file_command+' &&\n $(exit $RC) || /bin/false'
@copy_from_stdin_command.register(dbs.BigQueryDB)
def __(db: dbs.BigQueryDB, target_table: str, csv_format: bool = None, skip_header: bool = None,
delimiter_char: str = None, quote_char: str = None, null_value_string: str = None, timezone: str = None):
assert db.gcloud_gcs_bucket_name, f"Please provide the 'gcloud_gcs_bucket_name' parameter to database '{db}' "
import uuid
import datetime
tmp_file_name = f'tmp-{datetime.datetime.now().isoformat()}-{uuid.uuid4().hex}.' + (
'csv' if csv_format else 'json')
service_account_email = bigquery_credentials(db).service_account_email
set_env_prefix = f'CLOUDSDK_CORE_ACCOUNT={service_account_email}'
bq_load_command = (set_env_prefix
+ ' bq load'
+ ' --headless'
+ ' --quiet'
+ (f' --location={db.location}' if db.location else '')
+ (f' --project_id={db.project}' if db.project else '')
+ (f' --dataset_id={db.dataset}' if db.dataset else '')
+ (f' --skip_leading_rows=1' if skip_header else '')
)
if csv_format:
bq_load_command += ' --source_format=CSV'
else:
bq_load_command += ' --source_format=NEWLINE_DELIMITED_JSON'
if delimiter_char is not None:
bq_load_command += f" --field_delimiter='{delimiter_char}'"
if null_value_string is not None:
bq_load_command += f" --null_marker='{null_value_string}'"
if quote_char is not None:
bq_load_command += f" --quote='{quote_char}'"
bq_load_command += f" '{target_table}' gs://{db.gcloud_gcs_bucket_name}/{tmp_file_name}"
gcs_write_command = f'{set_env_prefix} gsutil -q cp - gs://{db.gcloud_gcs_bucket_name}/{tmp_file_name}'
gcs_delete_temp_file_command = f'{set_env_prefix} gsutil -q rm gs://{db.gcloud_gcs_bucket_name}/{tmp_file_name}'
return gcs_write_command + '\\\n \\\n && ' \
+ bq_load_command + '\\\n \\\n && ' \
+ gcs_delete_temp_file_command
# -------------------------------
@multidispatch
def copy_command(source_db: object, target_db: object, target_table: str,
timezone=None, csv_format=None, delimiter_char=None) -> str:
"""
Creates a shell command that
- receives a sql query from stdin
- executes the query in `source_db`
- writes the results of the query to `target_table` in `target_db`
Args:
source_db: The database in which to run the query (either an alias or a `dbs.DB` object
target_db: The database where to write the query results (alias or db configuration)
target_table: The table in which to write the query results
timezone: Sets the timezone of the client, if applicable
csv_format: double quote 'difficult' strings
delimiter_char: The character that separates columns, default '\t'
Returns:
A shell command string
Examples:
>>>> print(copy_command(dbs.SQLServerDB(database='source_db'), dbs.PostgreSQLDB(database='target_db'), \
'target_table'))
sqsh -D source_db -m csv \
| PGTZ=Europe/Berlin PGOPTIONS=--client-min-messages=warning psql --echo-all --no-psqlrc \
--set ON_ERROR_STOP=on target_db \
--command="COPY target_table FROM STDIN WITH CSV HEADER"
"""
raise NotImplementedError(
f'Please implement copy_command for types "{source_db.__class__.__name__}" and "{target_db.__class__.__name__}"'
)
@copy_command.register(str, str)
def __(source_db_alias: str, target_db_alias: str, target_table: str, timezone: str = None,
csv_format: bool = None, delimiter_char: str = None):
return copy_command(dbs.db(source_db_alias), dbs.db(target_db_alias),
target_table=target_table, timezone=timezone,
csv_format=csv_format, delimiter_char=delimiter_char)
@copy_command.register(dbs.DB, str)
def __(source_db: dbs.DB, target_db_alias: str, target_table: str, timezone: str = None,
csv_format: bool = None, delimiter_char: str = None):
return copy_command(source_db, dbs.db(target_db_alias),
target_table=target_table, timezone=timezone,
csv_format=csv_format, delimiter_char=delimiter_char)
@copy_command.register(dbs.PostgreSQLDB, dbs.PostgreSQLDB)
def __(source_db: dbs.PostgreSQLDB, target_db: dbs.PostgreSQLDB, target_table: str,
timezone: str = None, csv_format: bool = None, delimiter_char: str = None):
return (copy_to_stdout_command(source_db, delimiter_char=delimiter_char, csv_format=csv_format) + ' \\\n'
+ ' | ' + copy_from_stdin_command(target_db, target_table=target_table,
null_value_string='', timezone=timezone, csv_format=csv_format,
delimiter_char=delimiter_char))
@copy_command.register(dbs.PostgreSQLDB, dbs.BigQueryDB)
def __(source_db: dbs.PostgreSQLDB, target_db: dbs.BigQueryDB, target_table: str,
timezone: str = None, csv_format: bool = None, delimiter_char: str = None):
if csv_format is None:
csv_format = True
return (copy_to_stdout_command(source_db, delimiter_char=delimiter_char, csv_format=csv_format) + ' \\\n'
+ ' | ' + copy_from_stdin_command(target_db, target_table=target_table,
timezone=timezone, csv_format=csv_format,
delimiter_char='\t' if not delimiter_char and csv_format else delimiter_char))
@copy_command.register(dbs.BigQueryDB, dbs.PostgreSQLDB)
def __(source_db: dbs.BigQueryDB, target_db: dbs.PostgreSQLDB, target_table: str,
timezone: str = None, csv_format: bool = None, delimiter_char: str = None):
if csv_format is None:
csv_format = True
return (copy_to_stdout_command(source_db, delimiter_char=delimiter_char, csv_format=csv_format) + ' \\\n'
+ ' | ' + copy_from_stdin_command(target_db, target_table=target_table,
timezone=timezone, csv_format=csv_format,
delimiter_char='\t' if not delimiter_char and csv_format else delimiter_char))
@copy_command.register(dbs.MysqlDB, dbs.PostgreSQLDB)
def __(source_db: dbs.MysqlDB, target_db: dbs.PostgreSQLDB, target_table: str,
timezone: str = None, csv_format: bool = None, delimiter_char: str = None):
return (copy_to_stdout_command(source_db) + ' \\\n'
+ ' | ' + copy_from_stdin_command(target_db, target_table=target_table,
null_value_string='NULL', timezone=timezone,
csv_format=csv_format, delimiter_char=delimiter_char))
@copy_command.register(dbs.MysqlDB, dbs.BigQueryDB)
def __(source_db: dbs.MysqlDB, target_db: dbs.PostgreSQLDB, target_table: str,
timezone: str = None, csv_format: bool = None, delimiter_char: str = None):
if csv_format is None:
csv_format = True
return (copy_to_stdout_command(source_db) + ' \\\n'
+ ' | ' + copy_from_stdin_command(target_db, target_table=target_table,
null_value_string='NULL', timezone=timezone,
csv_format=csv_format, delimiter_char=delimiter_char))
@copy_command.register(dbs.SQLServerDB, dbs.PostgreSQLDB)
def __(source_db: dbs.SQLServerDB, target_db: dbs.PostgreSQLDB, target_table: str,
timezone: str = None, csv_format: bool = None, delimiter_char: str = None):
if csv_format is None:
csv_format = True
return (copy_to_stdout_command(source_db) + ' \\\n'
+ ' | ' + copy_from_stdin_command(target_db, target_table=target_table, csv_format=csv_format,
delimiter_char=delimiter_char,
skip_header=True, timezone=timezone))
@copy_command.register(dbs.SQLServerDB, dbs.BigQueryDB)
def __(source_db: dbs.SQLServerDB, target_db: dbs.PostgreSQLDB, target_table: str,
timezone: str = None, csv_format: bool = None, delimiter_char: str = None):
if csv_format is None:
csv_format = True
return (copy_to_stdout_command(source_db) + ' \\\n'
+ ' | ' + copy_from_stdin_command(target_db, target_table=target_table, csv_format=csv_format,
delimiter_char=delimiter_char,
skip_header=True, timezone=timezone))
@copy_command.register(dbs.OracleDB, dbs.PostgreSQLDB)
def __(source_db: dbs.OracleDB, target_db: dbs.PostgreSQLDB, target_table: str,
timezone: str = None, csv_format: bool = None, delimiter_char: str = None):
if csv_format is None:
csv_format = True
return (copy_to_stdout_command(source_db) + ' \\\n'
+ ' | ' + copy_from_stdin_command(target_db, target_table=target_table,
csv_format=csv_format, skip_header=False, delimiter_char=delimiter_char,
null_value_string='NULL', timezone=timezone))
@copy_command.register(dbs.OracleDB, dbs.BigQueryDB)
def __(source_db: dbs.OracleDB, target_db: dbs.PostgreSQLDB, target_table: str,
timezone: str = None, csv_format: bool = None, delimiter_char: str = None):
if csv_format is None:
csv_format = True
return (copy_to_stdout_command(source_db) + ' \\\n'
+ ' | ' + copy_from_stdin_command(target_db, target_table=target_table,
csv_format=csv_format, skip_header=False, delimiter_char=delimiter_char,
null_value_string='NULL', timezone=timezone))
@copy_command.register(dbs.SQLiteDB, dbs.PostgreSQLDB)
def __(source_db: dbs.SQLiteDB, target_db: dbs.PostgreSQLDB, target_table: str,
timezone: str = None, csv_format: bool = None, delimiter_char: str = None):
if csv_format is None:
csv_format = True
return (copy_to_stdout_command(source_db) + ' \\\n'
+ ' | ' + copy_from_stdin_command(target_db, target_table=target_table, timezone=timezone,
null_value_string='NULL', quote_char="''", csv_format=csv_format,
delimiter_char=delimiter_char))
@copy_command.register(dbs.SQLiteDB, dbs.BigQueryDB)
def __(source_db: dbs.SQLiteDB, target_db: dbs.PostgreSQLDB, target_table: str,
timezone: str = None, csv_format: bool = None, delimiter_char: str = None):
if csv_format is None:
csv_format = True
return (copy_to_stdout_command(source_db) + ' \\\n'
+ ' | ' + copy_from_stdin_command(target_db, target_table=target_table, timezone=timezone,
null_value_string='NULL', quote_char="''", csv_format=csv_format,
delimiter_char=delimiter_char))
| en | 0.585494 | Shell command generation for - running queries in databases via their command line clients - copying data from, into and between databases Creates a shell command that receives a sql query from stdin and executes it Args: db: The database in which to run the query (either an alias or a `dbs.DB` object timezone: Sets the timezone of the client, if applicable echo_queries: Whether the client should print executed queries, if applicable Returns: A shell command string Example: >>> print(query_command('mara', 'America/New_York')) PGTZ=America/New_York PGOPTIONS=--client-min-messages=warning psql --username=root --host=localhost \ --echo-all --no-psqlrc --set ON_ERROR_STOP=on mara >>> print(query_command(dbs.MysqlDB(host='localhost', database='test'))) mysql --default-character-set=utf8mb4 --host=localhost test # run without user interaction # run without user interaction # no job progress updates # no job progress updates # sqsh does not do anything when a statement is not terminated by a ';', add one to be sure # sqlplus does not do anything when a statement is not terminated by a ';', add one to be sure # Oracle needs a semicolon at the end, with no newlines before # Remove all trailing whitespace and then add a semicolon if not there yet -c "import sys; sql = sys.stdin.read().strip(); sql = sql + ';' if not sql[-1]==';' else sql; print(sql)" # sqlite does not complain if a file does not exist. Therefore check file existence first # ------------------------------- Creates a shell command that receives a query from stdin, executes it and writes the output to stdout Args: db: The database in which to run the query (either an alias or a `dbs.DB` object header: Whether a csv header with the column name(s) will be included or not. No header, by default. (not implemented in sqsh for SQLServerDB) footer: Whether a footer will be included or not. False by default. (Only implemented for PostgreSQLDB) delimiter_char: str to delimit the fields in one row. Default: tab character csv_format: Double quote 'difficult' strings (Only implemented for PostgreSQLDB) Returns: The composed shell command Example: >>> print(copy_to_stdout_command(dbs.PostgreSQLDB(host='localhost', database='test'))) PGTZ=Europe/Berlin PGOPTIONS=--client-min-messages=warning psql --host=localhost --no-psqlrc --set ON_ERROR_STOP=on test --tuples-only --no-align --field-separator=' ' \ | grep -a -v -e '^$' | (echo "COPY (" && cat && echo ") TO STDOUT WITH {'CSV ' if csv_format else ''} DELIMITER '{delimiter_char}' ") \\\n # remove empty lines # remove empty lines # ------------------------------- Creates a shell command that receives data from stdin and writes it to a table. Options are tailored for the PostgreSQL `COPY FROM STDIN` command, adaptions might be needed for other databases. https://www.postgresql.org/docs/current/static/sql-copy.html Args: db: The database to use (either an alias or a `dbs.DB` object target_table: The table in which the data is written csv_format: Treat the input as a CSV file (comma separated, double quoted literals) skip_header: When true, skip the first line delimiter_char: The character that separates columns quote_char: The character for quoting strings null_value_string: The string that denotes NULL values timezone: Sets the timezone of the client, if applicable Returns: The composed shell command Examples: >>>> print(copy_from_stdin_command('mara', target_table='foo')) PGTZ=Europe/Berlin PGOPTIONS=--client-min-messages=warning psql --username=root --host=localhost \ --echo-all --no-psqlrc --set ON_ERROR_STOP=on mara \ --command="COPY foo FROM STDIN WITH CSV" # escape double quotes # ------------------------------- Creates a shell command that - receives a sql query from stdin - executes the query in `source_db` - writes the results of the query to `target_table` in `target_db` Args: source_db: The database in which to run the query (either an alias or a `dbs.DB` object target_db: The database where to write the query results (alias or db configuration) target_table: The table in which to write the query results timezone: Sets the timezone of the client, if applicable csv_format: double quote 'difficult' strings delimiter_char: The character that separates columns, default '\t' Returns: A shell command string Examples: >>>> print(copy_command(dbs.SQLServerDB(database='source_db'), dbs.PostgreSQLDB(database='target_db'), \ 'target_table')) sqsh -D source_db -m csv \ | PGTZ=Europe/Berlin PGOPTIONS=--client-min-messages=warning psql --echo-all --no-psqlrc \ --set ON_ERROR_STOP=on target_db \ --command="COPY target_table FROM STDIN WITH CSV HEADER" | 3.20785 | 3 |
src/stackoverflow/49350821/Foo/bar/posts.py | mrdulin/python-codelab | 0 | 6631889 | <filename>src/stackoverflow/49350821/Foo/bar/posts.py
from collections import namedtuple
Post = namedtuple('Post', 'id text')
async def get_post(id: str):
return Post(id=int(id), text='Text for the post body.')
| <filename>src/stackoverflow/49350821/Foo/bar/posts.py
from collections import namedtuple
Post = namedtuple('Post', 'id text')
async def get_post(id: str):
return Post(id=int(id), text='Text for the post body.')
| none | 1 | 2.141926 | 2 |
|
PhysEngines/ode/ode-0.13/bindings/python/setup.py | netpipe/IrrPAL | 58 | 6631890 | <reponame>netpipe/IrrPAL
#! /usr/bin/env python
from distutils.core import setup
from distutils.extension import Extension
from subprocess import Popen, PIPE, CalledProcessError
try:
from Cython.Distutils import build_ext
except ImportError:
raise SystemExit("Requires Cython (http://cython.org/)")
try:
ode_cflags = Popen(
["pkg-config", "--cflags", "ode"],
stdout=PIPE).stdout.read().split()
ode_libs = Popen(
["pkg-config", "--libs", "ode"],
stdout=PIPE).stdout.read().split()
except (OSError, CalledProcessError):
raise SystemExit("Failed to find ODE with 'pkg-config'. Please make sure "
"that it is installed and available on your system path.")
ode_ext = Extension("ode", ["ode.pyx"],
extra_compile_args=ode_cflags,
extra_link_args=ode_libs)
if __name__ == "__main__":
setup(
name="Open Dynamics Engine",
version="0.12",
author="<NAME>",
# author_email="",
# maintainer="",
# maintainer_email="",
url="http://www.ode.org",
description="Bindings for the Open Dynamics Engine",
long_description=(
"A free, industrial quality library for simulating articulated "
"rigid body dynamics - for example ground vehicles, legged "
"creatures, and moving objects in VR environments. It's fast, "
"flexible & robust. Built-in collision detection."),
# download_url="https://opende.svn.sourceforge.net/svnroot/opende",
# classifiers=[],
# platforms=[],
license="BSD License, GNU Lesser General Public License (LGPL)",
cmdclass={"build_ext": build_ext},
ext_modules=[ode_ext]
)
| #! /usr/bin/env python
from distutils.core import setup
from distutils.extension import Extension
from subprocess import Popen, PIPE, CalledProcessError
try:
from Cython.Distutils import build_ext
except ImportError:
raise SystemExit("Requires Cython (http://cython.org/)")
try:
ode_cflags = Popen(
["pkg-config", "--cflags", "ode"],
stdout=PIPE).stdout.read().split()
ode_libs = Popen(
["pkg-config", "--libs", "ode"],
stdout=PIPE).stdout.read().split()
except (OSError, CalledProcessError):
raise SystemExit("Failed to find ODE with 'pkg-config'. Please make sure "
"that it is installed and available on your system path.")
ode_ext = Extension("ode", ["ode.pyx"],
extra_compile_args=ode_cflags,
extra_link_args=ode_libs)
if __name__ == "__main__":
setup(
name="Open Dynamics Engine",
version="0.12",
author="<NAME>",
# author_email="",
# maintainer="",
# maintainer_email="",
url="http://www.ode.org",
description="Bindings for the Open Dynamics Engine",
long_description=(
"A free, industrial quality library for simulating articulated "
"rigid body dynamics - for example ground vehicles, legged "
"creatures, and moving objects in VR environments. It's fast, "
"flexible & robust. Built-in collision detection."),
# download_url="https://opende.svn.sourceforge.net/svnroot/opende",
# classifiers=[],
# platforms=[],
license="BSD License, GNU Lesser General Public License (LGPL)",
cmdclass={"build_ext": build_ext},
ext_modules=[ode_ext]
) | en | 0.211043 | #! /usr/bin/env python # author_email="", # maintainer="", # maintainer_email="", # download_url="https://opende.svn.sourceforge.net/svnroot/opende", # classifiers=[], # platforms=[], | 1.81 | 2 |
ege_theme/apps.py | suap-ead/lib_theme | 0 | 6631891 | from django.apps import AppConfig
class TemaConfig(AppConfig):
name = 'ege_theme'
| from django.apps import AppConfig
class TemaConfig(AppConfig):
name = 'ege_theme'
| none | 1 | 1.036399 | 1 |
|
apps/combine-service/src/handlers/combine/create.py | biosimulations/Biosimulations | 7 | 6631892 | <gh_stars>1-10
from ...exceptions import BadRequestException
from ...utils import get_temp_dir
from biosimulators_utils.combine.data_model import (
CombineArchive,
CombineArchiveContent,
)
from biosimulators_utils.combine.io import (
CombineArchiveWriter,
)
from biosimulators_utils.sedml.data_model import (
SedDocument,
Model,
ModelAttributeChange,
OneStepSimulation,
SteadyStateSimulation,
UniformTimeCourseSimulation,
Algorithm,
AlgorithmParameterChange,
Task,
DataGenerator,
Variable,
Report,
Plot2D,
Plot3D,
DataSet,
Curve,
Surface,
AxisScale,
)
from biosimulators_utils.sedml.io import (
SedmlSimulationWriter,
)
import connexion
import flask
import os
import requests
import requests.exceptions
import src.utils
import werkzeug.datastructures # noqa: F401
import werkzeug.wrappers.response # noqa: F401
def handler(body, files=None):
''' Create a COMBINE/OMEX archive.
Args:
body (:obj:`dict`): dictionary with schema ``CombineArchiveSpecsAndFiles`` with the
specifications of the COMBINE/OMEX archive to create
files (:obj:`list` of :obj:`werkzeug.datastructures.FileStorage`, optional): files (e.g., SBML
file)
Returns:
:obj:`werkzeug.wrappers.response.Response` or :obj:`str`: response with COMBINE/OMEX
archive or a URL to a COMBINE/OMEX archive
'''
download = body.get('download', False)
archive_specs = body['specs']
files = connexion.request.files.getlist('files')
# create temporary working directory
temp_dirname = get_temp_dir()
# create temporary files for archive
archive_dirname = os.path.join(temp_dirname, 'archive')
archive_filename = os.path.join(temp_dirname, 'project.omex')
# initialize archive
archive = CombineArchive()
# build map from model filenames to file objects
filename_map = {
file.filename: file
for file in files
}
# add files to archive
for content in archive_specs['contents']:
content_type = content['location']['value']['_type']
if content_type == 'SedDocument':
sed_doc = export_sed_doc(content['location']['value'])
# save SED document to file
try:
SedmlSimulationWriter().run(
sed_doc,
os.path.join(archive_dirname, content['location']['path']),
validate_models_with_languages=False)
except ValueError as exception:
raise BadRequestException(
title='`{}` does not contain a configuration for a valid SED-ML document.'.format(
content['location']['value']),
instance=exception,
)
elif content_type == 'CombineArchiveContentFile':
file = filename_map.get(
content['location']['value']['filename'], None)
if not file:
raise BadRequestException(
title='File with name `{}` was not uploaded'.format(
content['location']['value']['filename']),
instance=ValueError(),
)
filename = os.path.join(archive_dirname,
content['location']['path'])
if not os.path.isdir(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename))
file.save(filename)
elif content_type == 'CombineArchiveContentUrl':
filename = os.path.join(archive_dirname,
content['location']['path'])
if not os.path.isdir(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename))
content_url = content['location']['value']['url']
try:
response = requests.get(content_url)
response.raise_for_status()
except requests.exceptions.RequestException as exception:
title = 'COMBINE/OMEX archive content could not be loaded from `{}`'.format(
content_url)
raise BadRequestException(
title=title,
instance=exception,
)
with open(filename, 'wb') as file:
file.write(response.content)
else:
raise BadRequestException(
title='Content of type `{}` is not supported'.format(
content_type),
instance=NotImplementedError('Invalid content')
) # pragma: no cover: unreachable due to schema validation
content = CombineArchiveContent(
location=content['location']['path'],
format=content['format'],
master=content['master'],
)
archive.contents.append(content)
# package COMBINE/OMEX archive
CombineArchiveWriter().run(archive, archive_dirname, archive_filename)
if download:
return flask.send_file(archive_filename,
mimetype='application/zip',
as_attachment=True,
attachment_filename='project.omex')
else:
# save COMBINE/OMEX archive to S3 bucket
archive_url = src.utils.save_file_to_s3_bucket(archive_filename, public=True)
# return URL for archive in S3 bucket
return archive_url
def export_sed_doc(sed_doc_specs):
""" Export the specifications of SED document to SED-ML
Args:
sed_doc_specs (``SedDocument``)
Returns:
:obj:`SedDocument`
"""
sed_doc = SedDocument(
level=sed_doc_specs['level'],
version=sed_doc_specs['version'],
)
# add models to SED document
model_id_map = {}
for model_spec in sed_doc_specs['models']:
model = Model(
id=model_spec.get('id'),
name=model_spec.get('name', None),
language=model_spec.get('language'),
source=model_spec.get('source'),
)
sed_doc.models.append(model)
model_id_map[model.id] = model
for change_spec in model_spec['changes']:
change = ModelAttributeChange(
target=change_spec.get('target').get('value'),
new_value=change_spec.get('newValue'),
)
model.changes.append(change)
for ns in change_spec.get('target').get('namespaces', []):
change.target_namespaces[ns.get('prefix', None)] = ns['uri']
# add simulations to SED document
simulation_id_map = {}
for sim_spec in sed_doc_specs['simulations']:
if sim_spec['_type'] == 'SedOneStepSimulation':
sim = OneStepSimulation(
id=sim_spec.get('id'),
name=sim_spec.get('name', None),
step=sim_spec.get('step'),
)
elif sim_spec['_type'] == 'SedSteadyStateSimulation':
sim = SteadyStateSimulation(
id=sim_spec.get('id'),
name=sim_spec.get('name', None),
)
elif sim_spec['_type'] == 'SedUniformTimeCourseSimulation':
sim = UniformTimeCourseSimulation(
id=sim_spec.get('id'),
name=sim_spec.get('name', None),
initial_time=sim_spec.get('initialTime'),
output_start_time=sim_spec.get('outputStartTime'),
output_end_time=sim_spec.get('outputEndTime'),
number_of_steps=sim_spec.get('numberOfSteps'),
)
else:
raise BadRequestException(
title='Simulations of type `{}` are not supported'.format(
sim_spec['_type']),
instance=NotImplementedError('Invalid simulation')
) # pragma: no cover: unreachable due to schema validation
alg_spec = sim_spec.get('algorithm')
sim.algorithm = Algorithm(kisao_id=alg_spec.get('kisaoId'))
for change_spec in alg_spec.get('changes'):
sim.algorithm.changes.append(
AlgorithmParameterChange(
kisao_id=change_spec.get('kisaoId'),
new_value=change_spec.get('newValue'),
)
)
sed_doc.simulations.append(sim)
simulation_id_map[sim.id] = sim
# add tasks to SED document
task_id_map = {}
for task_spec in sed_doc_specs['tasks']:
if task_spec['_type'] == 'SedTask':
model_id = task_spec.get('model').get('id')
sim_id = task_spec.get('simulation').get('id')
model = model_id_map.get(model_id, None)
sim = simulation_id_map.get(sim_id, None)
if not model:
raise BadRequestException(
title='Model `{}` for task `{}` does not exist'.format(
model_id, task_spec.get('id')),
instance=ValueError('Model does not exist'),
)
if not sim:
raise BadRequestException(
title='Simulation `{}` for task `{}` does not exist'.format(
sim_id, task_spec.get('id')),
instance=ValueError('Simulation does not exist'),
)
task = Task(
id=task_spec.get('id'),
name=task_spec.get('name', None),
model=model,
simulation=sim,
)
else:
raise BadRequestException(
title='Tasks of type `{}` are not supported'.format(
task_spec['_type']),
instance=NotImplementedError('Invalid task')
) # pragma: no cover: unreachable due to schema validation
sed_doc.tasks.append(task)
task_id_map[task.id] = task
# add data generators to SED document
data_gen_id_map = {}
for data_gen_spec in sed_doc_specs['dataGenerators']:
data_gen = DataGenerator(
id=data_gen_spec.get('id'),
name=data_gen_spec.get('name', None),
math=data_gen_spec.get('math'),
)
for var_spec in data_gen_spec['variables']:
task_id = var_spec.get('task').get('id')
task = task_id_map.get(task_id, None)
if not task:
raise BadRequestException(
title='Task `{}` for variable `{}` does not exist'.format(
task_id, var_spec.get('id')),
instance=ValueError('Task does not exist'),
)
var = Variable(
id=var_spec.get('id'),
name=var_spec.get('name', None),
task=task,
symbol=var_spec.get('symbol', None),
)
target_spec = var_spec.get('target', None)
if target_spec:
var.target = target_spec['value']
for ns in target_spec.get('namespaces', []):
var.target_namespaces[ns.get('prefix', None)] = ns['uri']
data_gen.variables.append(var)
sed_doc.data_generators.append(data_gen)
data_gen_id_map[data_gen.id] = data_gen
# add outputs to SED document
for output_spec in sed_doc_specs['outputs']:
if output_spec['_type'] == 'SedReport':
output = Report(
id=output_spec.get('id'),
name=output_spec.get('name', None),
)
for data_set_spec in output_spec['dataSets']:
data_gen_id = data_set_spec['dataGenerator']['id']
data_gen = data_gen_id_map.get(
data_gen_id, None)
if not data_gen:
raise BadRequestException(
title='Data generator `{}` for output `{}` does not exist'.format(
data_gen_id, output_spec.get('id')),
instance=ValueError('Data generator does not exist'),
)
data_set = DataSet(
id=data_set_spec.get('id'),
name=data_set_spec.get('name', None),
label=data_set_spec.get('label', None),
data_generator=data_gen,
)
output.data_sets.append(data_set)
elif output_spec['_type'] == 'SedPlot2D':
output = Plot2D(
id=output_spec.get('id'),
name=output_spec.get('name', None),
)
for curve_spec in output_spec['curves']:
x_data_gen_id = curve_spec['xDataGenerator']['id']
y_data_gen_id = curve_spec['yDataGenerator']['id']
x_data_gen = data_gen_id_map.get(x_data_gen_id, None)
y_data_gen = data_gen_id_map.get(y_data_gen_id, None)
if not x_data_gen:
raise BadRequestException(
title='X data generator `{}` for curve `{}` does not exist'.format(
x_data_gen_id, output_spec.get('id')),
instance=ValueError('Data generator does not exist'),
)
if not y_data_gen:
raise BadRequestException(
title='Y data generator `{}` for curve `{}` does not exist'.format(
y_data_gen_id, output_spec.get('id')),
instance=ValueError('Data generator does not exist'),
)
curve = Curve(
id=curve_spec.get('id'),
name=curve_spec.get('name', None),
x_data_generator=x_data_gen,
y_data_generator=y_data_gen,
x_scale=AxisScale[output_spec['xScale']],
y_scale=AxisScale[output_spec['yScale']],
)
output.curves.append(curve)
elif output_spec['_type'] == 'SedPlot3D':
output = Plot3D(
id=output_spec.get('id'),
name=output_spec.get('name', None),
)
for surface_spec in output_spec['surfaces']:
x_data_gen_id = surface_spec['xDataGenerator']['id']
y_data_gen_id = surface_spec['yDataGenerator']['id']
z_data_gen_id = surface_spec['zDataGenerator']['id']
x_data_gen = data_gen_id_map.get(x_data_gen_id, None)
y_data_gen = data_gen_id_map.get(y_data_gen_id, None)
z_data_gen = data_gen_id_map.get(z_data_gen_id, None)
if not x_data_gen:
raise BadRequestException(
title='X data generator `{}` for surface `{}` does not exist'.format(
x_data_gen_id, output_spec.get('id')),
instance=ValueError('Data generator does not exist'),
)
if not y_data_gen:
raise BadRequestException(
title='Y data generator `{}` for surface `{}` does not exist'.format(
y_data_gen_id, output_spec.get('id')),
instance=ValueError('Data generator does not exist'),
)
if not z_data_gen:
raise BadRequestException(
title='X data generator `{}` for surface `{}` does not exist'.format(
z_data_gen_id, output_spec.get('id')),
instance=ValueError('Data generator does not exist'),
)
surface = Surface(
id=surface_spec.get('id'),
name=surface_spec.get('name', None),
x_data_generator=x_data_gen,
y_data_generator=y_data_gen,
z_data_generator=z_data_gen,
x_scale=AxisScale[output_spec['xScale']],
y_scale=AxisScale[output_spec['yScale']],
z_scale=AxisScale[output_spec['zScale']],
)
output.surfaces.append(surface)
else:
raise BadRequestException(
title='Outputs of type `{}` are not supported'.format(
output_spec['_type']),
instance=NotImplementedError('Invalid output')
) # pragma: no cover: unreachable due to schema validation
sed_doc.outputs.append(output)
return sed_doc
| from ...exceptions import BadRequestException
from ...utils import get_temp_dir
from biosimulators_utils.combine.data_model import (
CombineArchive,
CombineArchiveContent,
)
from biosimulators_utils.combine.io import (
CombineArchiveWriter,
)
from biosimulators_utils.sedml.data_model import (
SedDocument,
Model,
ModelAttributeChange,
OneStepSimulation,
SteadyStateSimulation,
UniformTimeCourseSimulation,
Algorithm,
AlgorithmParameterChange,
Task,
DataGenerator,
Variable,
Report,
Plot2D,
Plot3D,
DataSet,
Curve,
Surface,
AxisScale,
)
from biosimulators_utils.sedml.io import (
SedmlSimulationWriter,
)
import connexion
import flask
import os
import requests
import requests.exceptions
import src.utils
import werkzeug.datastructures # noqa: F401
import werkzeug.wrappers.response # noqa: F401
def handler(body, files=None):
''' Create a COMBINE/OMEX archive.
Args:
body (:obj:`dict`): dictionary with schema ``CombineArchiveSpecsAndFiles`` with the
specifications of the COMBINE/OMEX archive to create
files (:obj:`list` of :obj:`werkzeug.datastructures.FileStorage`, optional): files (e.g., SBML
file)
Returns:
:obj:`werkzeug.wrappers.response.Response` or :obj:`str`: response with COMBINE/OMEX
archive or a URL to a COMBINE/OMEX archive
'''
download = body.get('download', False)
archive_specs = body['specs']
files = connexion.request.files.getlist('files')
# create temporary working directory
temp_dirname = get_temp_dir()
# create temporary files for archive
archive_dirname = os.path.join(temp_dirname, 'archive')
archive_filename = os.path.join(temp_dirname, 'project.omex')
# initialize archive
archive = CombineArchive()
# build map from model filenames to file objects
filename_map = {
file.filename: file
for file in files
}
# add files to archive
for content in archive_specs['contents']:
content_type = content['location']['value']['_type']
if content_type == 'SedDocument':
sed_doc = export_sed_doc(content['location']['value'])
# save SED document to file
try:
SedmlSimulationWriter().run(
sed_doc,
os.path.join(archive_dirname, content['location']['path']),
validate_models_with_languages=False)
except ValueError as exception:
raise BadRequestException(
title='`{}` does not contain a configuration for a valid SED-ML document.'.format(
content['location']['value']),
instance=exception,
)
elif content_type == 'CombineArchiveContentFile':
file = filename_map.get(
content['location']['value']['filename'], None)
if not file:
raise BadRequestException(
title='File with name `{}` was not uploaded'.format(
content['location']['value']['filename']),
instance=ValueError(),
)
filename = os.path.join(archive_dirname,
content['location']['path'])
if not os.path.isdir(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename))
file.save(filename)
elif content_type == 'CombineArchiveContentUrl':
filename = os.path.join(archive_dirname,
content['location']['path'])
if not os.path.isdir(os.path.dirname(filename)):
os.makedirs(os.path.dirname(filename))
content_url = content['location']['value']['url']
try:
response = requests.get(content_url)
response.raise_for_status()
except requests.exceptions.RequestException as exception:
title = 'COMBINE/OMEX archive content could not be loaded from `{}`'.format(
content_url)
raise BadRequestException(
title=title,
instance=exception,
)
with open(filename, 'wb') as file:
file.write(response.content)
else:
raise BadRequestException(
title='Content of type `{}` is not supported'.format(
content_type),
instance=NotImplementedError('Invalid content')
) # pragma: no cover: unreachable due to schema validation
content = CombineArchiveContent(
location=content['location']['path'],
format=content['format'],
master=content['master'],
)
archive.contents.append(content)
# package COMBINE/OMEX archive
CombineArchiveWriter().run(archive, archive_dirname, archive_filename)
if download:
return flask.send_file(archive_filename,
mimetype='application/zip',
as_attachment=True,
attachment_filename='project.omex')
else:
# save COMBINE/OMEX archive to S3 bucket
archive_url = src.utils.save_file_to_s3_bucket(archive_filename, public=True)
# return URL for archive in S3 bucket
return archive_url
def export_sed_doc(sed_doc_specs):
""" Export the specifications of SED document to SED-ML
Args:
sed_doc_specs (``SedDocument``)
Returns:
:obj:`SedDocument`
"""
sed_doc = SedDocument(
level=sed_doc_specs['level'],
version=sed_doc_specs['version'],
)
# add models to SED document
model_id_map = {}
for model_spec in sed_doc_specs['models']:
model = Model(
id=model_spec.get('id'),
name=model_spec.get('name', None),
language=model_spec.get('language'),
source=model_spec.get('source'),
)
sed_doc.models.append(model)
model_id_map[model.id] = model
for change_spec in model_spec['changes']:
change = ModelAttributeChange(
target=change_spec.get('target').get('value'),
new_value=change_spec.get('newValue'),
)
model.changes.append(change)
for ns in change_spec.get('target').get('namespaces', []):
change.target_namespaces[ns.get('prefix', None)] = ns['uri']
# add simulations to SED document
simulation_id_map = {}
for sim_spec in sed_doc_specs['simulations']:
if sim_spec['_type'] == 'SedOneStepSimulation':
sim = OneStepSimulation(
id=sim_spec.get('id'),
name=sim_spec.get('name', None),
step=sim_spec.get('step'),
)
elif sim_spec['_type'] == 'SedSteadyStateSimulation':
sim = SteadyStateSimulation(
id=sim_spec.get('id'),
name=sim_spec.get('name', None),
)
elif sim_spec['_type'] == 'SedUniformTimeCourseSimulation':
sim = UniformTimeCourseSimulation(
id=sim_spec.get('id'),
name=sim_spec.get('name', None),
initial_time=sim_spec.get('initialTime'),
output_start_time=sim_spec.get('outputStartTime'),
output_end_time=sim_spec.get('outputEndTime'),
number_of_steps=sim_spec.get('numberOfSteps'),
)
else:
raise BadRequestException(
title='Simulations of type `{}` are not supported'.format(
sim_spec['_type']),
instance=NotImplementedError('Invalid simulation')
) # pragma: no cover: unreachable due to schema validation
alg_spec = sim_spec.get('algorithm')
sim.algorithm = Algorithm(kisao_id=alg_spec.get('kisaoId'))
for change_spec in alg_spec.get('changes'):
sim.algorithm.changes.append(
AlgorithmParameterChange(
kisao_id=change_spec.get('kisaoId'),
new_value=change_spec.get('newValue'),
)
)
sed_doc.simulations.append(sim)
simulation_id_map[sim.id] = sim
# add tasks to SED document
task_id_map = {}
for task_spec in sed_doc_specs['tasks']:
if task_spec['_type'] == 'SedTask':
model_id = task_spec.get('model').get('id')
sim_id = task_spec.get('simulation').get('id')
model = model_id_map.get(model_id, None)
sim = simulation_id_map.get(sim_id, None)
if not model:
raise BadRequestException(
title='Model `{}` for task `{}` does not exist'.format(
model_id, task_spec.get('id')),
instance=ValueError('Model does not exist'),
)
if not sim:
raise BadRequestException(
title='Simulation `{}` for task `{}` does not exist'.format(
sim_id, task_spec.get('id')),
instance=ValueError('Simulation does not exist'),
)
task = Task(
id=task_spec.get('id'),
name=task_spec.get('name', None),
model=model,
simulation=sim,
)
else:
raise BadRequestException(
title='Tasks of type `{}` are not supported'.format(
task_spec['_type']),
instance=NotImplementedError('Invalid task')
) # pragma: no cover: unreachable due to schema validation
sed_doc.tasks.append(task)
task_id_map[task.id] = task
# add data generators to SED document
data_gen_id_map = {}
for data_gen_spec in sed_doc_specs['dataGenerators']:
data_gen = DataGenerator(
id=data_gen_spec.get('id'),
name=data_gen_spec.get('name', None),
math=data_gen_spec.get('math'),
)
for var_spec in data_gen_spec['variables']:
task_id = var_spec.get('task').get('id')
task = task_id_map.get(task_id, None)
if not task:
raise BadRequestException(
title='Task `{}` for variable `{}` does not exist'.format(
task_id, var_spec.get('id')),
instance=ValueError('Task does not exist'),
)
var = Variable(
id=var_spec.get('id'),
name=var_spec.get('name', None),
task=task,
symbol=var_spec.get('symbol', None),
)
target_spec = var_spec.get('target', None)
if target_spec:
var.target = target_spec['value']
for ns in target_spec.get('namespaces', []):
var.target_namespaces[ns.get('prefix', None)] = ns['uri']
data_gen.variables.append(var)
sed_doc.data_generators.append(data_gen)
data_gen_id_map[data_gen.id] = data_gen
# add outputs to SED document
for output_spec in sed_doc_specs['outputs']:
if output_spec['_type'] == 'SedReport':
output = Report(
id=output_spec.get('id'),
name=output_spec.get('name', None),
)
for data_set_spec in output_spec['dataSets']:
data_gen_id = data_set_spec['dataGenerator']['id']
data_gen = data_gen_id_map.get(
data_gen_id, None)
if not data_gen:
raise BadRequestException(
title='Data generator `{}` for output `{}` does not exist'.format(
data_gen_id, output_spec.get('id')),
instance=ValueError('Data generator does not exist'),
)
data_set = DataSet(
id=data_set_spec.get('id'),
name=data_set_spec.get('name', None),
label=data_set_spec.get('label', None),
data_generator=data_gen,
)
output.data_sets.append(data_set)
elif output_spec['_type'] == 'SedPlot2D':
output = Plot2D(
id=output_spec.get('id'),
name=output_spec.get('name', None),
)
for curve_spec in output_spec['curves']:
x_data_gen_id = curve_spec['xDataGenerator']['id']
y_data_gen_id = curve_spec['yDataGenerator']['id']
x_data_gen = data_gen_id_map.get(x_data_gen_id, None)
y_data_gen = data_gen_id_map.get(y_data_gen_id, None)
if not x_data_gen:
raise BadRequestException(
title='X data generator `{}` for curve `{}` does not exist'.format(
x_data_gen_id, output_spec.get('id')),
instance=ValueError('Data generator does not exist'),
)
if not y_data_gen:
raise BadRequestException(
title='Y data generator `{}` for curve `{}` does not exist'.format(
y_data_gen_id, output_spec.get('id')),
instance=ValueError('Data generator does not exist'),
)
curve = Curve(
id=curve_spec.get('id'),
name=curve_spec.get('name', None),
x_data_generator=x_data_gen,
y_data_generator=y_data_gen,
x_scale=AxisScale[output_spec['xScale']],
y_scale=AxisScale[output_spec['yScale']],
)
output.curves.append(curve)
elif output_spec['_type'] == 'SedPlot3D':
output = Plot3D(
id=output_spec.get('id'),
name=output_spec.get('name', None),
)
for surface_spec in output_spec['surfaces']:
x_data_gen_id = surface_spec['xDataGenerator']['id']
y_data_gen_id = surface_spec['yDataGenerator']['id']
z_data_gen_id = surface_spec['zDataGenerator']['id']
x_data_gen = data_gen_id_map.get(x_data_gen_id, None)
y_data_gen = data_gen_id_map.get(y_data_gen_id, None)
z_data_gen = data_gen_id_map.get(z_data_gen_id, None)
if not x_data_gen:
raise BadRequestException(
title='X data generator `{}` for surface `{}` does not exist'.format(
x_data_gen_id, output_spec.get('id')),
instance=ValueError('Data generator does not exist'),
)
if not y_data_gen:
raise BadRequestException(
title='Y data generator `{}` for surface `{}` does not exist'.format(
y_data_gen_id, output_spec.get('id')),
instance=ValueError('Data generator does not exist'),
)
if not z_data_gen:
raise BadRequestException(
title='X data generator `{}` for surface `{}` does not exist'.format(
z_data_gen_id, output_spec.get('id')),
instance=ValueError('Data generator does not exist'),
)
surface = Surface(
id=surface_spec.get('id'),
name=surface_spec.get('name', None),
x_data_generator=x_data_gen,
y_data_generator=y_data_gen,
z_data_generator=z_data_gen,
x_scale=AxisScale[output_spec['xScale']],
y_scale=AxisScale[output_spec['yScale']],
z_scale=AxisScale[output_spec['zScale']],
)
output.surfaces.append(surface)
else:
raise BadRequestException(
title='Outputs of type `{}` are not supported'.format(
output_spec['_type']),
instance=NotImplementedError('Invalid output')
) # pragma: no cover: unreachable due to schema validation
sed_doc.outputs.append(output)
return sed_doc | en | 0.502367 | # noqa: F401 # noqa: F401 Create a COMBINE/OMEX archive. Args: body (:obj:`dict`): dictionary with schema ``CombineArchiveSpecsAndFiles`` with the specifications of the COMBINE/OMEX archive to create files (:obj:`list` of :obj:`werkzeug.datastructures.FileStorage`, optional): files (e.g., SBML file) Returns: :obj:`werkzeug.wrappers.response.Response` or :obj:`str`: response with COMBINE/OMEX archive or a URL to a COMBINE/OMEX archive # create temporary working directory # create temporary files for archive # initialize archive # build map from model filenames to file objects # add files to archive # save SED document to file # pragma: no cover: unreachable due to schema validation # package COMBINE/OMEX archive # save COMBINE/OMEX archive to S3 bucket # return URL for archive in S3 bucket Export the specifications of SED document to SED-ML Args: sed_doc_specs (``SedDocument``) Returns: :obj:`SedDocument` # add models to SED document # add simulations to SED document # pragma: no cover: unreachable due to schema validation # add tasks to SED document # pragma: no cover: unreachable due to schema validation # add data generators to SED document # add outputs to SED document # pragma: no cover: unreachable due to schema validation | 2.086315 | 2 |
configuration.py | cronos23/streamlink_helper | 0 | 6631893 | <reponame>cronos23/streamlink_helper
import yaml
import requests
import os
from util import HEADERS
class Configuration:
def __init__(self):
if os.name == "posix":
directory = os.path.join(os.environ['HOME'], ".config", "streamlink_helper")
else:
directory = os.path.join(os.environ['APPDATA'], "streamlink_helper")
if not os.path.exists(directory):
os.makedirs(directory)
config_path = os.path.join(directory, "config.yml")
try:
f = open(config_path, "r")
f.close()
except FileNotFoundError:
configfile = open(config_path, "w+")
yaml.dump({"user_id": "", "ask_on_startup": True}, configfile)
configfile.close()
self.__read_configuration(config_path)
@property
def user_id(self):
return self._user_id
@property
def quality_order_of_preference(self):
return self._quality_order_of_preference
def __read_configuration(self, config_path):
with open(config_path, 'r') as configfile:
cfgdata = yaml.load(configfile)
if cfgdata["ask_on_startup"]:
self.__user_configure()
if not self.user_id:
return
else:
self._user_id = cfgdata["user_id"]
self._quality_order_of_preference = cfgdata["quality_order_of_preference"]
with open(config_path, "w") as configfile:
yaml.dump({"user_id": self._user_id,
"quality_order_of_preference": self._quality_order_of_preference,
"ask_on_startup": False}, configfile)
def __user_configure(self):
print("Your settings have not been configured yet. Set them here in the console,"
" or edit the config.yml by hand.")
self._user_id = self.__get_user_id()
if not self._user_id:
return
self._quality_order_of_preference = self.__get_quality()
@staticmethod
def __get_user_id():
username = input("Please enter the name of your twitch.tv account: ")
params = {
'login': username
}
user_id_api_url = "https://api.twitch.tv/helix/users"
user_data_response = requests.get(user_id_api_url, params=params, headers=HEADERS)
if not user_data_response.json()["data"]:
print("Couldn't get id for entered username")
return False
else:
user_id = user_data_response.json()["data"][0]["id"]
return user_id
@staticmethod
def __get_quality():
preferred_quality = False
quality_list = ["160p", "360p", "480p", "720p", "1080p", "720p60", "1080p60"]
while preferred_quality not in quality_list:
preferred_quality = input("Please enter your preferred stream quality. Leave blank for 1080p60 or enter s "
"to (s)how options: ")
if preferred_quality == "s":
for i in quality_list:
print(i)
if not preferred_quality:
preferred_quality = "1080p60"
return quality_list[:(quality_list.index(preferred_quality)+1)]
| import yaml
import requests
import os
from util import HEADERS
class Configuration:
def __init__(self):
if os.name == "posix":
directory = os.path.join(os.environ['HOME'], ".config", "streamlink_helper")
else:
directory = os.path.join(os.environ['APPDATA'], "streamlink_helper")
if not os.path.exists(directory):
os.makedirs(directory)
config_path = os.path.join(directory, "config.yml")
try:
f = open(config_path, "r")
f.close()
except FileNotFoundError:
configfile = open(config_path, "w+")
yaml.dump({"user_id": "", "ask_on_startup": True}, configfile)
configfile.close()
self.__read_configuration(config_path)
@property
def user_id(self):
return self._user_id
@property
def quality_order_of_preference(self):
return self._quality_order_of_preference
def __read_configuration(self, config_path):
with open(config_path, 'r') as configfile:
cfgdata = yaml.load(configfile)
if cfgdata["ask_on_startup"]:
self.__user_configure()
if not self.user_id:
return
else:
self._user_id = cfgdata["user_id"]
self._quality_order_of_preference = cfgdata["quality_order_of_preference"]
with open(config_path, "w") as configfile:
yaml.dump({"user_id": self._user_id,
"quality_order_of_preference": self._quality_order_of_preference,
"ask_on_startup": False}, configfile)
def __user_configure(self):
print("Your settings have not been configured yet. Set them here in the console,"
" or edit the config.yml by hand.")
self._user_id = self.__get_user_id()
if not self._user_id:
return
self._quality_order_of_preference = self.__get_quality()
@staticmethod
def __get_user_id():
username = input("Please enter the name of your twitch.tv account: ")
params = {
'login': username
}
user_id_api_url = "https://api.twitch.tv/helix/users"
user_data_response = requests.get(user_id_api_url, params=params, headers=HEADERS)
if not user_data_response.json()["data"]:
print("Couldn't get id for entered username")
return False
else:
user_id = user_data_response.json()["data"][0]["id"]
return user_id
@staticmethod
def __get_quality():
preferred_quality = False
quality_list = ["160p", "360p", "480p", "720p", "1080p", "720p60", "1080p60"]
while preferred_quality not in quality_list:
preferred_quality = input("Please enter your preferred stream quality. Leave blank for 1080p60 or enter s "
"to (s)how options: ")
if preferred_quality == "s":
for i in quality_list:
print(i)
if not preferred_quality:
preferred_quality = "1080p60"
return quality_list[:(quality_list.index(preferred_quality)+1)] | none | 1 | 2.412511 | 2 |
|
gnome/gnome2/gedit/plugins.symlink/FindInProject/__init__.py | icebreaker/dotfiles | 4 | 6631894 | <reponame>icebreaker/dotfiles
import gedit
from FindInProject import FindInProjectPluginInstance
class FindInProjectPlugin(gedit.Plugin):
def __init__(self):
gedit.Plugin.__init__(self)
self._instances = {}
def activate(self, window):
self._instances[window] = FindInProjectPluginInstance(window)
def deactivate(self, window):
self._instances[window].deactivate()
del self._instances[window]
def update_ui(self, window):
pass
| import gedit
from FindInProject import FindInProjectPluginInstance
class FindInProjectPlugin(gedit.Plugin):
def __init__(self):
gedit.Plugin.__init__(self)
self._instances = {}
def activate(self, window):
self._instances[window] = FindInProjectPluginInstance(window)
def deactivate(self, window):
self._instances[window].deactivate()
del self._instances[window]
def update_ui(self, window):
pass | none | 1 | 1.889433 | 2 |
|
vcenter_operator/cmd.py | fwiesel/vcenter-operator | 5 | 6631895 | import argparse
import logging
import os
import re
import sys
from time import sleep
from kubernetes import config as k8s_config
# Import discovery before configurator as there is some monkeypatching going on
from .discovery import DnsDiscovery
from .configurator import Configurator
LOG = logging.getLogger(__name__)
def _build_arg_parser():
args = argparse.ArgumentParser()
args.add_argument('--dry-run', action='store_true', default=False)
return args
def main():
args = _build_arg_parser().parse_args(sys.argv[1:])
global_options = {'dry_run': str(args.dry_run)}
log_level = logging.INFO
if 'LOG_LEVEL' in os.environ:
try:
log_level = getattr(logging, os.environ.get('LOG_LEVEL'))
except AttributeError:
msg = 'The configured log-level "{}" is not available.'
raise RuntimeError(msg.format(os.environ.get('LOG_LEVEL')))
logging.basicConfig(
level=log_level,
format='%(asctime)-15s %(process)d %(levelname)s %(name)s %(message)s')
logging.getLogger('kubernetes').setLevel(logging.WARNING)
logging.getLogger('keystoneauth').setLevel(logging.WARNING)
try:
k8s_config.load_kube_config()
_, context = k8s_config.list_kube_config_contexts()
region = context['context']['cluster']
domain = 'cc.{}.cloud.sap'.format(region)
global_options['own_namespace'] = 'kube-system'
global_options['incluster'] = False
except (IOError, k8s_config.config_exception.ConfigException):
if 'KUBERNETES_SERVICE_HOST' not in os.environ:
os.environ['KUBERNETES_SERVICE_HOST'] = 'kubernetes.default'
k8s_config.load_incluster_config()
global_options['incluster'] = True
with open('/var/run/secrets/kubernetes.io/serviceaccount/namespace',
'r') as f:
global_options['own_namespace'] = f.read()
with open('/etc/resolv.conf', 'r') as f:
for line in f:
if re.match(r'^search\s+', line):
_, domain = line.rsplit(' ', 1)
if 'SERVICE_DOMAIN' in os.environ:
domain = os.environ['SERVICE_DOMAIN']
configurator = Configurator(domain, global_options)
configurator.poll_config()
discovery = DnsDiscovery(domain, configurator.global_options)
discovery.register(re.compile(br'\Avc-[a-z]+-\d+\Z'), configurator)
while True:
discovery.discover()
configurator.poll()
sleep(10)
| import argparse
import logging
import os
import re
import sys
from time import sleep
from kubernetes import config as k8s_config
# Import discovery before configurator as there is some monkeypatching going on
from .discovery import DnsDiscovery
from .configurator import Configurator
LOG = logging.getLogger(__name__)
def _build_arg_parser():
args = argparse.ArgumentParser()
args.add_argument('--dry-run', action='store_true', default=False)
return args
def main():
args = _build_arg_parser().parse_args(sys.argv[1:])
global_options = {'dry_run': str(args.dry_run)}
log_level = logging.INFO
if 'LOG_LEVEL' in os.environ:
try:
log_level = getattr(logging, os.environ.get('LOG_LEVEL'))
except AttributeError:
msg = 'The configured log-level "{}" is not available.'
raise RuntimeError(msg.format(os.environ.get('LOG_LEVEL')))
logging.basicConfig(
level=log_level,
format='%(asctime)-15s %(process)d %(levelname)s %(name)s %(message)s')
logging.getLogger('kubernetes').setLevel(logging.WARNING)
logging.getLogger('keystoneauth').setLevel(logging.WARNING)
try:
k8s_config.load_kube_config()
_, context = k8s_config.list_kube_config_contexts()
region = context['context']['cluster']
domain = 'cc.{}.cloud.sap'.format(region)
global_options['own_namespace'] = 'kube-system'
global_options['incluster'] = False
except (IOError, k8s_config.config_exception.ConfigException):
if 'KUBERNETES_SERVICE_HOST' not in os.environ:
os.environ['KUBERNETES_SERVICE_HOST'] = 'kubernetes.default'
k8s_config.load_incluster_config()
global_options['incluster'] = True
with open('/var/run/secrets/kubernetes.io/serviceaccount/namespace',
'r') as f:
global_options['own_namespace'] = f.read()
with open('/etc/resolv.conf', 'r') as f:
for line in f:
if re.match(r'^search\s+', line):
_, domain = line.rsplit(' ', 1)
if 'SERVICE_DOMAIN' in os.environ:
domain = os.environ['SERVICE_DOMAIN']
configurator = Configurator(domain, global_options)
configurator.poll_config()
discovery = DnsDiscovery(domain, configurator.global_options)
discovery.register(re.compile(br'\Avc-[a-z]+-\d+\Z'), configurator)
while True:
discovery.discover()
configurator.poll()
sleep(10)
| en | 0.952721 | # Import discovery before configurator as there is some monkeypatching going on | 2.099982 | 2 |
configdict.py | s-light/OLA_simple_fader | 0 | 6631896 | <filename>configdict.py
#!/usr/bin/env python
# coding=utf-8
"""
simple package to read and write configs in dict format to file.
supports two formats:
json (preferred)
ini (not implemented jet)
history:
see git commits
todo:
~ all fine :-)
"""
import sys
import os
import time
import json
try:
# python3
from configparser import ConfigParser
# print("loaded python3 ConfigParser")
except:
# python2
from ConfigParser import ConfigParser
# print("loaded python2 ConfigParser")
version = """09.03.2016 18:00 stefan"""
##########################################
# globals
##########################################
# functions
##########################################
# classes
class ConfigDict():
"""abstract the reading / writing of configuration parameters."""
def __init__(self, default_config={}, filename=None):
"""initialize config to defaults."""
self.default_config = default_config
self.filename = filename
self.config = {}
if self.filename is not None:
if os.path.isfile(filename):
self.read_from_file()
else:
self.config = self.default_config.copy()
self.write_to_file()
else:
self.config = self.default_config.copy()
def merge_deep(self, obj_1, obj_2):
"""
merge dicts deeply.
obj_2 overwrittes keys with same values in obj_1.
(if they are dicts its recusive merged.)
"""
# work on obj_1
result = obj_1
# make copy
# result = obj_1.copy()
if (isinstance(result, dict) and isinstance(obj_2, dict)):
for key in obj_2:
if key in result:
result[key] = self.merge_deep(result[key], obj_2[key])
else:
result[key] = obj_2[key]
else:
result = obj_2
return result
def set_filename(self, filename):
"""set new filename."""
self.filename = filename
def _read_from_json_file(self, filename):
config_temp = {}
with open(self.filename, 'r') as f:
config_temp = json.load(f)
f.closed
return config_temp
# def _list_to_string(self, value_list):
# """try to convert string to a meaningfull datatype."""
# value_str = ""
# value_str = ", ".join(value_list)
# value_str = "[{}]".format(value_str)
# return value_str
#
# def _string_to_list(self, value_str):
# """try to convert string to a meaningfull datatype."""
# list = []
# if value_str.startswith("[") and value_str.endswith("]"):
# value_str = value_str.strip()
# list = value_str.split(",")
# else:
# raise TypeError("input string is not a valid list format.")
# return list
#
# def _try_to_convert_string(self, value_str):
# """try to convert string to a meaningfull datatype."""
# value = None
# try:
# value = self._string_to_list(value_str)
# except Exception as e:
# print("value not a list. ({})".format(e))
# else:
# try:
# value = self._string_to_dict(value_str)
# except Exception as e:
# print("value not a list. ({})".format(e))
# return value
def _convert_string_to_None(self, value_str):
"""test if string is None."""
value = None
value_str = value_str.strip()
if value_str in ["None", "none", "NONE", "Null", "NULL", "null"]:
value = None
else:
value = value_str
raise TypeError("input string is not valid None")
return value
def _try_to_interpret_string(self, value_str):
"""try to interprete string as something meaningfull."""
value = None
try:
value = json.loads(value_str)
except Exception as e:
# print("value not valid json. ({})".format(e))
try:
value = self._convert_string_to_None(value_str)
except Exception as e:
# print("value not None. ({})".format(e))
value = value_str
return value
def _configparser_get_converted(self, cp, section, option):
"""get option and try to convert it to a meaningfull datatype."""
# with this we try to convert the value to a meaningfull value..
value = None
try:
# try to read as int
value = cp.getint(section, option)
except Exception as e:
# print("value not a int. ({})".format(e))
try:
# try to read as float
value = cp.getfloat(section, option)
except Exception as e:
# print("value not a float. ({})".format(e))
# try to read as int
try:
value = cp.getboolean(section, option)
except Exception as e:
# print("value not a boolean. ({})".format(e))
# read as string
value = cp.get(section, option)
# try to convert it to something meaningfull
value = self._try_to_interpret_string(value)
# return value
return value
def _read_from_ini_file(self, filename):
config_temp = {}
cp = ConfigParser()
with open(self.filename, 'r') as f:
cp.readfp(f)
f.closed
# now converte ConfigParser to dict.
for section in cp.sections():
# print("section: {}".format(section))
config_temp[section] = {}
for option in cp.options(section):
# get option and add it to the dict
# print("option: {}".format(option))
value = self._configparser_get_converted(
cp,
section,
option
)
# print("value: {}".format(value))
config_temp[section][option] = value
return config_temp
def read_from_file(self, filename=None):
"""read configuration from file."""
if filename is not None:
self.filename = filename
config_temp = {}
if self.filename is not None:
# read file
filename_ext = os.path.splitext(self.filename)[1]
if filename_ext is not "" and filename_ext in '.json .js':
config_temp = self._read_from_json_file(self.filename)
else:
config_temp = self._read_from_ini_file(self.filename)
# do a merge with the defaults.
self.config = self.default_config.copy()
self.merge_deep(self.config, config_temp)
def _write_to_json_file(self, filename, config):
with open(filename, 'w') as f:
json.dump(
config,
f,
sort_keys=True,
indent=4,
separators=(',', ': ')
)
f.closed
def _value_to_string(self, value):
value_str = ""
if (
isinstance(value, object) or
isinstance(value, dict) or
isinstance(value, list)
):
value_str = json.dumps(value)
else:
value_str = "{}".format(value)
return value_str
def _write_to_ini_file(self, filename, config):
cp = ConfigParser()
for section in config:
# add section.
# print("section: {}".format(section))
cp.add_section(section)
for option in config[section]:
# print("option: {}".format(option))
value = None
if isinstance(config[section], list):
# option_index = config[section].index(option)
# value = config[section][option_index]
value = None
else:
value = config[section][option]
# print("value: {}".format(value))
# add option
cp.set(section, option, self._value_to_string(value))
with open(filename, 'w') as f:
cp.write(f)
f.closed
def write_to_file(self, filename=None):
"""write configuration to file."""
if filename is not None:
self.filename = filename
if self.filename is not None:
# print("\nwrite file: {}".format(self.filename))
filename_ext = os.path.splitext(self.filename)[1]
if filename_ext is not "" and filename_ext in '.json .js':
self._write_to_json_file(
self.filename,
self.config
)
else:
self._write_to_ini_file(
self.filename,
self.config
)
##########################################
if __name__ == '__main__':
print(42*'*')
print('Python Version: ' + sys.version)
print(42*'*')
print(__doc__)
print(42*'*')
# parse arguments
filename = "test.json"
# only use args after script name
arg = sys.argv[1:]
if not arg:
print("using standard values.")
print(" allowed parameters:")
print(" filename for config file (default='test.json')")
print("")
else:
filename = arg[0]
# if len(arg) > 1:
# pixel_count = int(arg[1])
# print parsed argument values
print('''values:
filename: {}
'''.format(filename))
default_config = {
'hello': {
'world': 1,
'space': 42,
},
'world': {
'books': [0, 1, 2, 3, 4, 4, 3, 2, 1, 0, ],
'fun': True,
'python': True,
'trees': {
'fir': 1,
'birch': 9,
'poplar': 33,
'maple': 11,
'cherry': 5,
'walnut': 2,
},
},
'blubber': ['water', 'air'],
}
my_config = ConfigDict(default_config, filename)
print("my_config.config: {}".format(my_config.config))
# wait for user to hit key.
try:
input(
"\n\n" +
42*'*' +
"\nhit a key to stop the mapper\n" +
42*'*' +
"\n\n"
)
except KeyboardInterrupt:
print("\nstop.")
except:
print("\nstop.")
# as last thing we save the current configuration.
print("\nwrite config.")
my_config.write_to_file()
# ###########################################
| <filename>configdict.py
#!/usr/bin/env python
# coding=utf-8
"""
simple package to read and write configs in dict format to file.
supports two formats:
json (preferred)
ini (not implemented jet)
history:
see git commits
todo:
~ all fine :-)
"""
import sys
import os
import time
import json
try:
# python3
from configparser import ConfigParser
# print("loaded python3 ConfigParser")
except:
# python2
from ConfigParser import ConfigParser
# print("loaded python2 ConfigParser")
version = """09.03.2016 18:00 stefan"""
##########################################
# globals
##########################################
# functions
##########################################
# classes
class ConfigDict():
"""abstract the reading / writing of configuration parameters."""
def __init__(self, default_config={}, filename=None):
"""initialize config to defaults."""
self.default_config = default_config
self.filename = filename
self.config = {}
if self.filename is not None:
if os.path.isfile(filename):
self.read_from_file()
else:
self.config = self.default_config.copy()
self.write_to_file()
else:
self.config = self.default_config.copy()
def merge_deep(self, obj_1, obj_2):
"""
merge dicts deeply.
obj_2 overwrittes keys with same values in obj_1.
(if they are dicts its recusive merged.)
"""
# work on obj_1
result = obj_1
# make copy
# result = obj_1.copy()
if (isinstance(result, dict) and isinstance(obj_2, dict)):
for key in obj_2:
if key in result:
result[key] = self.merge_deep(result[key], obj_2[key])
else:
result[key] = obj_2[key]
else:
result = obj_2
return result
def set_filename(self, filename):
"""set new filename."""
self.filename = filename
def _read_from_json_file(self, filename):
config_temp = {}
with open(self.filename, 'r') as f:
config_temp = json.load(f)
f.closed
return config_temp
# def _list_to_string(self, value_list):
# """try to convert string to a meaningfull datatype."""
# value_str = ""
# value_str = ", ".join(value_list)
# value_str = "[{}]".format(value_str)
# return value_str
#
# def _string_to_list(self, value_str):
# """try to convert string to a meaningfull datatype."""
# list = []
# if value_str.startswith("[") and value_str.endswith("]"):
# value_str = value_str.strip()
# list = value_str.split(",")
# else:
# raise TypeError("input string is not a valid list format.")
# return list
#
# def _try_to_convert_string(self, value_str):
# """try to convert string to a meaningfull datatype."""
# value = None
# try:
# value = self._string_to_list(value_str)
# except Exception as e:
# print("value not a list. ({})".format(e))
# else:
# try:
# value = self._string_to_dict(value_str)
# except Exception as e:
# print("value not a list. ({})".format(e))
# return value
def _convert_string_to_None(self, value_str):
"""test if string is None."""
value = None
value_str = value_str.strip()
if value_str in ["None", "none", "NONE", "Null", "NULL", "null"]:
value = None
else:
value = value_str
raise TypeError("input string is not valid None")
return value
def _try_to_interpret_string(self, value_str):
"""try to interprete string as something meaningfull."""
value = None
try:
value = json.loads(value_str)
except Exception as e:
# print("value not valid json. ({})".format(e))
try:
value = self._convert_string_to_None(value_str)
except Exception as e:
# print("value not None. ({})".format(e))
value = value_str
return value
def _configparser_get_converted(self, cp, section, option):
"""get option and try to convert it to a meaningfull datatype."""
# with this we try to convert the value to a meaningfull value..
value = None
try:
# try to read as int
value = cp.getint(section, option)
except Exception as e:
# print("value not a int. ({})".format(e))
try:
# try to read as float
value = cp.getfloat(section, option)
except Exception as e:
# print("value not a float. ({})".format(e))
# try to read as int
try:
value = cp.getboolean(section, option)
except Exception as e:
# print("value not a boolean. ({})".format(e))
# read as string
value = cp.get(section, option)
# try to convert it to something meaningfull
value = self._try_to_interpret_string(value)
# return value
return value
def _read_from_ini_file(self, filename):
config_temp = {}
cp = ConfigParser()
with open(self.filename, 'r') as f:
cp.readfp(f)
f.closed
# now converte ConfigParser to dict.
for section in cp.sections():
# print("section: {}".format(section))
config_temp[section] = {}
for option in cp.options(section):
# get option and add it to the dict
# print("option: {}".format(option))
value = self._configparser_get_converted(
cp,
section,
option
)
# print("value: {}".format(value))
config_temp[section][option] = value
return config_temp
def read_from_file(self, filename=None):
"""read configuration from file."""
if filename is not None:
self.filename = filename
config_temp = {}
if self.filename is not None:
# read file
filename_ext = os.path.splitext(self.filename)[1]
if filename_ext is not "" and filename_ext in '.json .js':
config_temp = self._read_from_json_file(self.filename)
else:
config_temp = self._read_from_ini_file(self.filename)
# do a merge with the defaults.
self.config = self.default_config.copy()
self.merge_deep(self.config, config_temp)
def _write_to_json_file(self, filename, config):
with open(filename, 'w') as f:
json.dump(
config,
f,
sort_keys=True,
indent=4,
separators=(',', ': ')
)
f.closed
def _value_to_string(self, value):
value_str = ""
if (
isinstance(value, object) or
isinstance(value, dict) or
isinstance(value, list)
):
value_str = json.dumps(value)
else:
value_str = "{}".format(value)
return value_str
def _write_to_ini_file(self, filename, config):
cp = ConfigParser()
for section in config:
# add section.
# print("section: {}".format(section))
cp.add_section(section)
for option in config[section]:
# print("option: {}".format(option))
value = None
if isinstance(config[section], list):
# option_index = config[section].index(option)
# value = config[section][option_index]
value = None
else:
value = config[section][option]
# print("value: {}".format(value))
# add option
cp.set(section, option, self._value_to_string(value))
with open(filename, 'w') as f:
cp.write(f)
f.closed
def write_to_file(self, filename=None):
"""write configuration to file."""
if filename is not None:
self.filename = filename
if self.filename is not None:
# print("\nwrite file: {}".format(self.filename))
filename_ext = os.path.splitext(self.filename)[1]
if filename_ext is not "" and filename_ext in '.json .js':
self._write_to_json_file(
self.filename,
self.config
)
else:
self._write_to_ini_file(
self.filename,
self.config
)
##########################################
if __name__ == '__main__':
print(42*'*')
print('Python Version: ' + sys.version)
print(42*'*')
print(__doc__)
print(42*'*')
# parse arguments
filename = "test.json"
# only use args after script name
arg = sys.argv[1:]
if not arg:
print("using standard values.")
print(" allowed parameters:")
print(" filename for config file (default='test.json')")
print("")
else:
filename = arg[0]
# if len(arg) > 1:
# pixel_count = int(arg[1])
# print parsed argument values
print('''values:
filename: {}
'''.format(filename))
default_config = {
'hello': {
'world': 1,
'space': 42,
},
'world': {
'books': [0, 1, 2, 3, 4, 4, 3, 2, 1, 0, ],
'fun': True,
'python': True,
'trees': {
'fir': 1,
'birch': 9,
'poplar': 33,
'maple': 11,
'cherry': 5,
'walnut': 2,
},
},
'blubber': ['water', 'air'],
}
my_config = ConfigDict(default_config, filename)
print("my_config.config: {}".format(my_config.config))
# wait for user to hit key.
try:
input(
"\n\n" +
42*'*' +
"\nhit a key to stop the mapper\n" +
42*'*' +
"\n\n"
)
except KeyboardInterrupt:
print("\nstop.")
except:
print("\nstop.")
# as last thing we save the current configuration.
print("\nwrite config.")
my_config.write_to_file()
# ###########################################
| en | 0.429926 | #!/usr/bin/env python # coding=utf-8 simple package to read and write configs in dict format to file. supports two formats: json (preferred) ini (not implemented jet) history: see git commits todo: ~ all fine :-) # python3 # print("loaded python3 ConfigParser") # python2 # print("loaded python2 ConfigParser") 09.03.2016 18:00 stefan ########################################## # globals ########################################## # functions ########################################## # classes abstract the reading / writing of configuration parameters. initialize config to defaults. merge dicts deeply. obj_2 overwrittes keys with same values in obj_1. (if they are dicts its recusive merged.) # work on obj_1 # make copy # result = obj_1.copy() set new filename. # def _list_to_string(self, value_list): # """try to convert string to a meaningfull datatype.""" # value_str = "" # value_str = ", ".join(value_list) # value_str = "[{}]".format(value_str) # return value_str # # def _string_to_list(self, value_str): # """try to convert string to a meaningfull datatype.""" # list = [] # if value_str.startswith("[") and value_str.endswith("]"): # value_str = value_str.strip() # list = value_str.split(",") # else: # raise TypeError("input string is not a valid list format.") # return list # # def _try_to_convert_string(self, value_str): # """try to convert string to a meaningfull datatype.""" # value = None # try: # value = self._string_to_list(value_str) # except Exception as e: # print("value not a list. ({})".format(e)) # else: # try: # value = self._string_to_dict(value_str) # except Exception as e: # print("value not a list. ({})".format(e)) # return value test if string is None. try to interprete string as something meaningfull. # print("value not valid json. ({})".format(e)) # print("value not None. ({})".format(e)) get option and try to convert it to a meaningfull datatype. # with this we try to convert the value to a meaningfull value.. # try to read as int # print("value not a int. ({})".format(e)) # try to read as float # print("value not a float. ({})".format(e)) # try to read as int # print("value not a boolean. ({})".format(e)) # read as string # try to convert it to something meaningfull # return value # now converte ConfigParser to dict. # print("section: {}".format(section)) # get option and add it to the dict # print("option: {}".format(option)) # print("value: {}".format(value)) read configuration from file. # read file # do a merge with the defaults. # add section. # print("section: {}".format(section)) # print("option: {}".format(option)) # option_index = config[section].index(option) # value = config[section][option_index] # print("value: {}".format(value)) # add option write configuration to file. # print("\nwrite file: {}".format(self.filename)) ########################################## # parse arguments # only use args after script name # if len(arg) > 1: # pixel_count = int(arg[1]) # print parsed argument values values: filename: {} # wait for user to hit key. # as last thing we save the current configuration. # ########################################### | 3.370448 | 3 |
brutejudge/_http/ejudge/__init__.py | sleirsgoevy/brutejudge | 1 | 6631897 | <reponame>sleirsgoevy/brutejudge<filename>brutejudge/_http/ejudge/__init__.py
import ssl, socket, html, collections, urllib.parse, time, math
import brutejudge._http.ejudge.ej371, brutejudge._http.ejudge.ej373, brutejudge._http.html2md as html2md, brutejudge._http.types as bjtypes
from brutejudge._http.base import Backend
from brutejudge.error import BruteError
def _http_header_capitalize(h):
return '-'.join(i[:1].upper()+i[1:].lower() for i in h.split('-'))
def do_http(url, method, headers={}, data=b''):
if '://' not in url:
raise BruteError("Invalid URL")
proto, path = url.split('://', 1)
if proto not in ('http', 'https'):
raise BruteError("Not an HTTP url: " + url)
if '/' not in path: path += '/'
s_host, path = path.split('/', 1)
path = '/' + path
host = s_host
if ':' in host:
host, port = host.rsplit(':', 1)
port = int(port)
else: port = 80 if proto == 'http' else 443
if host.startswith('[') and host.endswith(']'): host = host[1:-1]
sock = socket.create_connection((host, port))
if proto == 'https':
sock = ssl.create_default_context().wrap_socket(sock, server_hostname=host)
headers['Host'] = s_host
if data:
headers['Content-Length'] = len(data)
request = ['%s %s HTTP/1.1' % (method, path)]
for k, v in headers.items():
request.append(str(k) + ': ' + str(v))
request.append('')
request.append('')
sock.sendall('\r\n'.join(request).encode('utf-8'))
if data:
sock.sendall(data)
def readline():
ans = b''
while not ans.endswith(b'\n'): ans += sock.recv(1)
return ans
v, c, *exp = readline().decode('utf-8').split()
resp_headers = []
while True:
l = readline().decode('utf-8').strip()
if l == '': break
k, v = l.split(': ', 1)
resp_headers.append((_http_header_capitalize(k), v))
rhd = dict(resp_headers)
if 'Content-Length' in rhd:
data = b''
while len(data) < int(rhd['Content-Length']):
data += sock.recv(int(rhd['Content-Length']) - len(data))
elif rhd.get('Transfer-Encoding', None) == 'chunked':
data = b''
while True:
l = int(readline().decode('ascii'), 16)
data2 = b''
while len(data2) < l:
data2 += sock.recv(l - len(data2))
data += data2
readline()
if l == 0: break
else:
data = b''
nfails = 0
while nfails < 100:
try: chunk = sock.recv(1 << 20)
except socket.error: break
data += chunk
if len(chunk) == 0: break
sock.close()
return (int(c), rhd, data)
#try: import requests
#except ImportError: pass
#else:
# def do_http(url, method, headers, data=b''):
# if method == 'GET': data = None
# it = requests.request(method, url, data=data, headers=headers, allow_redirects=False)
# return (it.status_code, it.headers, it.content)
def get(url, headers={}):
return do_http(url, 'GET', headers)
def post(url, data, headers={}):
if isinstance(data, dict):
l = []
for k, v in data.items():
k += '='
for c in str(v):
if c.lower() in 'abcdefghijklmnopqrstuvwxyz0123456789':
k += c
else:
k += ''.join(map('%%%02x'.__mod__, c.encode('utf-8')))
l.append(k)
data = '&'.join(l)
if isinstance(data, str):
data = data.encode('utf-8')
return do_http(url, 'POST', headers, data)
def contest_name(url):
code, headers, data = get(url)
if code != 200:
raise BruteError("Page retrieval failed.")
try: return html.unescape(data.decode('utf-8', 'replace').split('<title>', 1)[1].split(' [', 1)[1].split(']</title>', 1)[-2])
except IndexError: return None
class Ejudge(Backend):
@staticmethod
def detect(url):
return url.startswith('http://') or url.startswith('https://')
def __init__(self, url, login, password):
Backend.__init__(self)
url0 = url
url = url.replace('/new-register?', '/new-client?')
contest_id = url.split("contest_id=")[1].split("&")[0]
self.contest_id = int(contest_id)
base_url = url.split("?")[0]
code, headers, data = post(url0.split("?")[0], {'contest_id': contest_id, 'locale_id': 0, 'login': login, 'password': password, 'action_213': ''})
if code != 302:
raise BruteError("Login failed.")
rhd = dict(headers)
base_url = rhd['Location'].split('&')[0]
base_url = base_url.replace('/new-register?', '/new-client?')
if 'new-client?SID=' in base_url:
urls = ej371.get_urls(base_url)
elif any(i in base_url for i in ('/user/', '/client/', '/register/', '/register?SID=')):
urls = ej373.get_urls(base_url)
else:
raise BruteError("Unknown ejudge version.")
self.urls = urls
self.cookie = rhd["Set-Cookie"].split(";")[0]
self._get_cache = {}
def _cache_get(self, url, cookie=True):
with self.cache_lock:
if url in self._get_cache:
return self._get_cache[url]
ans = get(url, {'Cookie': self.cookie} if cookie else {})
with self.cache_lock:
if self.caching: self._get_cache[url] = ans
return ans
# @staticmethod
# def _task_list(data):
# column_count = data.count('<th ')
# if column_count == 0: return []
# splitted = data.split('<td class="b1">')[1:]
# if not splitted: splitted = [i.split('>', 1)[-1].split('</div>', 1)[0] for i in data.split('<div class="prob', 1)]
# print(data)
# data = []
# for x in splitted[::column_count]:
# x = x.split("</td>")[0]
# if x.startswith('<a href="') and x.endswith('</a>'):
# x = x.split('"', 2)[2].split('>', 1)[1][:-4]
# data.append(html.unescape(x))
# return data
@staticmethod
def _task_ids(data):
try:
data = data.split('<tr id="probNavTopList"><td width="100%" class="nTopNavList"><ul class="nTopNavList"><li class="first-rad">', 1)[1].split('\n', 1)[0]
except IndexError:
try:
data = data.split('<td class="b0" id="probNavRightList" valign="top">', 1)[1].split('\n', 1)[0]
except IndexError: return []
splitted = data.split('<a class="tab" href="')[1:]
data = [x.split('"', 1) for x in splitted]
ans = []
for i in data:
ans.append((int(i[0].split('prob_id=', 1)[1]), html.unescape(i[1].split('>', 1)[-1].split('</a>', 1)[0])))
return ans
def tasks(self):
code, headers, data = self._cache_get(self.urls['summary'])
if b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
if code != 200:
raise BruteError("Failed to fetch task list.")
data = data.decode('utf-8')
#tl = self._task_list(data)
ti = self._task_ids(data)
#if len(tl) < len(ti): tl.extend([None]*(len(ti)-len(tl)))
#else: ti.extend([None]*(len(tl)-len(ti)))
return [bjtypes.task_t(i, j, None) for i, j in ti]
def submissions(self):
code, headers, data = self._cache_get(self.urls['submissions'])
if b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
if code != 200:
raise BruteError("Failed to fetch submission list.")
ths = [i.split('</th>', 1)[0] for i in data.decode('utf-8').split('<th class="b1">')[1:]]
w = len(ths)
if w == 0: return []
splitted = data.decode('utf-8').split('<td class="b1">')[1:]
data = [x.split("</td>")[0] for x in splitted]
run_ids = list(map(lambda x:(int(x[:-1]) if x[-1:] == '#' else int(x)), data[ths.index('Run ID')::w]))
task_ids = data[ths.index('Problem')::w]
if 'Result' in ths:
statuses = data[ths.index('Result')::w]
else:
statuses = [None]*len(run_ids)
if 'Score' in ths:
scores = []
for i in data[ths.index('Score')::w]:
i = i.strip()
if i.startswith('<b>'): i = i[3:].split('</b>', 1)[0] # TODO: report score_ex in submission_stats
i = i.strip()
if i in ('', 'N/A', ' '): scores.append(None)
else: scores.append(int(i))
else:
scores = [None]*len(run_ids)
if 'Tests passed' in ths:
oktests = []
for i in data[ths.index('Tests passed')::w]:
i = i.strip()
if i.startswith('<b>'): i = i[3:]
if i.endswith('</b>'): i = i[:-4]
i = i.strip()
if i in ('', 'N/A', ' '): oktests.append(None)
else: oktests.append(int(i))
else:
oktests = [None]*len(run_ids)
assert len(run_ids) == len(task_ids) == len(statuses) == len(scores) == len(oktests)
return [bjtypes.submission_t(i, j, k, l, m) for i, j, k, l, m in zip(run_ids, task_ids, statuses, scores, oktests)]
def submission_protocol(self, id):
code, headers, data = self._cache_get(self.urls['protocol'].format(run_id=id))
if b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
if code != 200:
raise BruteError("Failed to fetch testing protocol.")
w = data.count(b'<th ')
if w == 0: return []
splitted = data.decode('utf-8').split('<td class="b1">')[1:]
data = [x.split("</td>")[0] for x in splitted]
statuses = [i[:-7].split('>')[-1] for i in data[1::w]]
tls = []
for i in map(html.unescape, data[2::w]):
if i.startswith('>'): i = i[1:]
tls.append(float(i))
assert len(statuses) == len(tls)
return [bjtypes.test_t(i, {'time_usage': j}) for i, j in zip(statuses, tls)]
def submit_solution(self, task, lang, text):
if isinstance(text, str): text = text.encode('utf-8')
sid = self.urls['sid']
url = self.urls['submit']
data = []
data.append(b'"SID"\r\n\r\n'+sid.encode('ascii'))
data.append(b'"prob_id"\r\n\r\n'+str(task).encode('ascii'))
data.append(b'"lang_id"\r\n\r\n'+str(lang).encode('ascii'))
data.append(b'"file"; filename="brute.txt"\r\nContent-Type'
b': text/plain\r\n\r\n'+text)
data.append(b'"action_40"\r\n\r\nSend!')
import random
while True:
x = b'----------'+str(random.randrange(1, 1000000000)).encode('ascii')
for i in data:
if x in i: break
else: break
data = b'\r\n'.join(b'--'+x+b'\r\nContent-Disposition: form-data; name='+i for i in data)+b'\r\n--'+x+b'--\r\n'
ans = post(url, data, {'Content-Type': 'multipart/form-data; boundary='+x.decode('ascii'), 'Cookie': self.cookie})
with self.cache_lock: self.stop_caching()
def status(self):
code, headers, data = self._cache_get(self.urls['summary'])
if b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
if code != 200:
raise BruteError("Failed to fetch task list")
ths = [i.split('</th>', 1)[0] for i in data.decode('utf-8').split('<th class="b1">')[1:]]
w = len(ths)
if w == 0: return {}
splitted = data.decode('utf-8').split('<td class="b1">')[1:]
data = [x.split("</td>")[0] for x in splitted]
idx = ths.index('Status')
return collections.OrderedDict((a, b if b != ' ' else None) for a, b in zip(data[ths.index('Short name')::w], data[idx::w]))
def scores(self, *, total=None):
code, headers, data = self._cache_get(self.urls['summary'])
if b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
if code != 200:
raise BruteError("Failed to fetch task list")
data0 = data.decode('utf-8')
ths = [i.split('</th>', 1)[0] for i in data0.split('<th class="b1">')[1:]]
w = len(ths)
splitted = data.decode('utf-8').split('<td class="b1">')[1:]
data = [x.split("</td>")[0] for x in splitted]
if 'Score' not in ths: ans = {}
else: ans = collections.OrderedDict(zip(data[ths.index('Short name')::w], [None if x == ' ' else int(x) for x in data[ths.index('Score')::w]]))
if total != None and '<p><big>Total score: ' in data0:
try: ans[total] = int(data0.split('<p><big>Total score: ', 1)[1].split('</big></p>', 1)[0])
except (ValueError, IndexError): pass
return ans
def compile_error(self, id):
code, headers, data = self._cache_get(self.urls['protocol'].format(run_id=id))
if b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
if code != 200:
raise BruteError("Failed to fetch testing protocol.")
splitted = data.decode('utf-8').split('<pre>')[1:]
ans = []
for i in splitted:
i = i.split('</pre>')[0]
i = i.split('<')
i = i[0] + ''.join(j.split('>', 1)[1] for j in i[1:])
import html
ans.append(html.unescape(i))
return '\n'.join(ans)
def submission_source(self, id):
code, headers, data = self._cache_get(self.urls['source'].format(run_id=id))
rhd = dict(headers)
if 'html' in rhd['Content-Type'] and b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
if code != 200 or 'html' in rhd['Content-Type']:
return None
return data
def do_action(self, name, *args):
code, headers, data = get(self.urls[name], {'Cookie': self.cookie})
if b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
return code == 302
def compiler_list(self, prob_id):
code, headers, data = self._cache_get(self.urls['submission'].format(prob_id=prob_id))
if b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
data = data.decode('utf-8')
if '<input type="hidden" name="lang_id" value="' in data:
data = data.split('<input type="hidden" name="lang_id" value="', 1)[1]
num_id = int(data.split('"', 1)[0])
lit_id = html.unescape(data.split('</td><td class="b0">', 1)[1].split('</td>', 1)[0])
short, long = lit_id.strip().split(' - ')
return [bjtypes.compiler_t(num_id, short, long)]
try: data = data.split('<select name="lang_id">', 1)[1].split('</select>', 1)[0]
except IndexError: raise BruteError("Failed to fetch language list")
data = data.split('<option ')[1:]
ans = []
for i in data:
a, b = (' '+i).split(' value="', 1)[1].split('"', 1)
b = b.split('>', 1)[1].split('</option>', 1)[0]
if not a.isnumeric(): continue
b, c = html.unescape(b).split(' - ')
ans.append(bjtypes.compiler_t(int(a), b.strip(), c.strip()))
return ans
def submission_stats(self, id):
code, headers, data = self._cache_get(self.urls['protocol'].format(run_id=id))
if b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
data = data.decode('utf-8')
if '<big>' in data:
data = '\n\n'.join(i.split('</big>', 1)[0] for i in data.split('<big>')[1:]).split('<')
data = data[0]+''.join(i.split('>', 1)[1] for i in data[1:])
ans = {}
for l in data.split('\n'):
l = l.split(' ')
if l[1:4] == ['total', 'tests', 'runs,'] and l[5] == 'passed,' and l[7:] == ['failed.']:
ans['tests'] = {}
ans['tests']['total'] = int(l[0])
ans['tests']['success'] = int(l[4])
ans['tests']['fail'] = int(l[6])
elif l[:2] == ['Score', 'gained:']:
ans['score'] = int(l[2])
return (ans, data)
else:
return ({}, None)
def contest_info(self):
code, headers, data = self._cache_get(self.urls['contest_info'])
if b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
data = data.decode('utf-8')
try: pbs = '\n'.join(html.unescape(i.split('</b></p>', 1)[0]) for i in data.split('<p><b>')[1:])
except IndexError: pbs = ''
datas = {}
for i in data.split('<tr><td class="b0">')[1:]:
i = i.split('</td></tr>', 1)[0]
try: key, value = i.split('<td class="b0">')
except IndexError: pass
else: datas[html.unescape(key.split('</td>', 1)[0])] = html.unescape(value)
data1 = {}
for k1, k2 in (('server_time', 'Server time:'), ('contest_start', 'Contest start time'), ('contest_duration', 'Duration:')):
if k2 not in datas: continue
if datas[k2] == 'Unlimited':
data1[k1] = math.inf
continue
if ' ' in datas[k2]:
date, s_time = datas[k2].split(' ')
year, month, day = map(int, date.split('/'))
hour, minute, second = map(int, s_time.split(':'))
data1[k1] = time.mktime((year, month, day, hour, minute, second, -1, -1, -1))
else:
data1[k1] = 0
for i in map(int, datas[k2].split(':')):
data1[k1] = data1[k1] * 60 + i
if 'contest_start' in data1 and 'contest_duration' in data1:
data1['contest_end'] = data1['contest_start'] + data1['contest_duration']
if 'contest_start' in data1 and 'server_time' in data1:
data1['contest_time'] = data1['server_time'] - data1['contest_start']
return (pbs, datas, data1)
def problem_info(self, id):
code, headers, data = self._cache_get(self.urls['submission'].format(prob_id=id))
if b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
data = data.decode('utf-8')
if '<table class="line-table-wb">' not in data: return ({}, None)
data = data.split('<table class="line-table-wb">', 1)[1]
stats = {}
data, data2 = data.split('</table>', 1)
while '<tr><td><b>' in data:
k, data = data.split('<tr><td><b>', 1)[1].split('</b></td><td>', 1)
v, data = data.split('</td></tr>', 1)
v = v.split('<')
v = v[0]+''.join(i.split('>', 1)[1] for i in v[1:])
stats[k.rsplit(':', 1)[0].strip()] = html.unescape(v.strip())
data = data2.split('<form method="post" enctype="multipart/form-data" action="', 1)[0].rsplit('<h3>', 1)[0]
if data.endswith('</html>\n'):
data = ''
return (stats, html2md.html2md(data, self.urls['download_file'].format(prob_id=id, filename=''), self.urls['submission'].format(prob_id=id)))
def download_file(self, prob_id, filename):
code, headers, data = self._cache_get(self.urls['download_file'].format(prob_id=prob_id, filename=filename))
if code == 404:
raise BruteError("File not found.")
elif code != 200:
raise BruteError("Error downloading.")
return data
def clar_list(self):
code, headers, data = self._cache_get(self.urls['clars'])
if b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
ths = [i.split('</th>', 1)[0] for i in data.decode('utf-8').split('<th class="b1">')[1:]]
w = len(ths)
splitted = data.decode('utf-8').split('<td class="b1">')[1:]
data = [x.split("</td>")[0] for x in splitted]
return [bjtypes.clar_t(i, j) for i, j in zip(map(int, data[ths.index('Clar ID')::w]), map(html.unescape, data[ths.index('Subject')::w]))]
def submit_clar(self, task, subject, text):
if post(self.urls['submit'], {'SID': self.urls['sid'], 'prob_id': task, 'subject': subject, 'text': text, 'action_41': 'Send!'}, {'Cookie': self.cookie})[0] != 302:
raise BruteError("Failed to submit clar")
with self.cache_lock: self.stop_caching()
def read_clar(self, id):
code, headers, data = self._cache_get(self.urls['read_clar'].format(clar_id=id))
if b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
data = html.unescape(data.decode('utf-8').split('<pre class="message">', 1)[1].split('</pre>', 1)[0])
return data.split('\n', 2)[2]
def _get_samples(self, err):
if err == None: err = ''
err = err.strip()
if "====== Test #" not in err:
raise BruteError("No test cases available")
err = err[err.find("====== Test #"):]
lines = iter(err.split('\n'))
tests = {}
curr = None
for line in lines:
if line.startswith("====== Test #"):
num = int(line[13:-7])
curr = tests[num] = {}
elif line.startswith('--- '):
line = line[4:-4]
if ': size ' not in line: continue
what, size = line.split(': size ')
size = int(size) + 1
data = ''
while len(data) < size:
try: data += '\n' + next(lines)
except StopIteration: break
data = data[1:]
curr[what] = data
return tests
def get_samples(self, subm_id):
return self._get_samples(self.compile_error(subm_id))
def scoreboard(self):
code, headers, data = self._cache_get(self.urls['standings'])
if b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
if code != 200:
raise BruteError("Failed to fetch scoreboard.")
teams = data.decode('utf-8').split('<td class="st_team">')[1:]
probs = data.decode('utf-8').split('<td class="st_prob')[1:]
naux = 0
if not teams and b'<table border=1 cellspacing=1 celpadding=3>\n <tr >\n <th align=right>Place</th>' in data:
probs = sum((i.split('<td class="st_prob') for i in data.decode('utf-8').split('<td align=center')), [])[1:]
teams = data.decode('utf-8').split('<td align=left>')[1:]
naux = 1
probs = [x.split("</td>", 1)[0] for x in probs]
teams = [html.unescape(x.split("</td>", 1)[0]) for x in teams]
try: ntasks = len(probs) // len(teams) - naux
except ZeroDivisionError: return []
del teams[-3:]
del probs[-3*ntasks:]
probs = iter(probs)
ans = []
for i in teams:
ans.append(({'name': i}, []))
for j in range(ntasks):
j = next(probs).split('>', 1)[1]
if j == ' ': ans[-1][1].append(None)
elif j[:1] in ('+', '-'):
attempts = int(j[0]+'0'+j[1:])
ans[-1][1].append({'attempts': attempts})
elif j.startswith('<b>') and j.endswith('</b>') and j[3:-4].isnumeric():
score = int(j[3:-4])
attempts = float('inf')
ans[-1][1].append({'score': score, 'attempts': attempts})
elif j.isnumeric():
score = int(j)
attempts = float('-inf')
ans[-1][1].append({'score': score, 'attempts': attempts})
else:
assert False, repr(j)
for j in range(naux): next(probs)
return ans
def stop_caching(self):
self._get_cache.clear()
def contest_list(self):
if isinstance(self, str): url = self
else: url = self.urls['contest_list']
code, headers, data = get(url)
if code != 200:
return []
ans = []
for i in data.decode('utf-8').split('<td><a href="')[1:]:
url = html.unescape(i.split('"', 1)[0])
name = html.unescape(i.split('>', 1)[1].split('<', 1)[0])
ans.append((name, url, {}))
return ans
def change_password(self, oldpwd, newpwd):
if post(self.urls['submit'], {'SID': self.urls['sid'], 'oldpasswd': oldpwd, 'newpasswd1': newpwd, 'newpasswd2': newpwd, 'action_35': 'Change!'}, {'Cookie': self.cookie})[0] != 302:
raise BruteError("Failed to change password.")
| import ssl, socket, html, collections, urllib.parse, time, math
import brutejudge._http.ejudge.ej371, brutejudge._http.ejudge.ej373, brutejudge._http.html2md as html2md, brutejudge._http.types as bjtypes
from brutejudge._http.base import Backend
from brutejudge.error import BruteError
def _http_header_capitalize(h):
return '-'.join(i[:1].upper()+i[1:].lower() for i in h.split('-'))
def do_http(url, method, headers={}, data=b''):
if '://' not in url:
raise BruteError("Invalid URL")
proto, path = url.split('://', 1)
if proto not in ('http', 'https'):
raise BruteError("Not an HTTP url: " + url)
if '/' not in path: path += '/'
s_host, path = path.split('/', 1)
path = '/' + path
host = s_host
if ':' in host:
host, port = host.rsplit(':', 1)
port = int(port)
else: port = 80 if proto == 'http' else 443
if host.startswith('[') and host.endswith(']'): host = host[1:-1]
sock = socket.create_connection((host, port))
if proto == 'https':
sock = ssl.create_default_context().wrap_socket(sock, server_hostname=host)
headers['Host'] = s_host
if data:
headers['Content-Length'] = len(data)
request = ['%s %s HTTP/1.1' % (method, path)]
for k, v in headers.items():
request.append(str(k) + ': ' + str(v))
request.append('')
request.append('')
sock.sendall('\r\n'.join(request).encode('utf-8'))
if data:
sock.sendall(data)
def readline():
ans = b''
while not ans.endswith(b'\n'): ans += sock.recv(1)
return ans
v, c, *exp = readline().decode('utf-8').split()
resp_headers = []
while True:
l = readline().decode('utf-8').strip()
if l == '': break
k, v = l.split(': ', 1)
resp_headers.append((_http_header_capitalize(k), v))
rhd = dict(resp_headers)
if 'Content-Length' in rhd:
data = b''
while len(data) < int(rhd['Content-Length']):
data += sock.recv(int(rhd['Content-Length']) - len(data))
elif rhd.get('Transfer-Encoding', None) == 'chunked':
data = b''
while True:
l = int(readline().decode('ascii'), 16)
data2 = b''
while len(data2) < l:
data2 += sock.recv(l - len(data2))
data += data2
readline()
if l == 0: break
else:
data = b''
nfails = 0
while nfails < 100:
try: chunk = sock.recv(1 << 20)
except socket.error: break
data += chunk
if len(chunk) == 0: break
sock.close()
return (int(c), rhd, data)
#try: import requests
#except ImportError: pass
#else:
# def do_http(url, method, headers, data=b''):
# if method == 'GET': data = None
# it = requests.request(method, url, data=data, headers=headers, allow_redirects=False)
# return (it.status_code, it.headers, it.content)
def get(url, headers={}):
return do_http(url, 'GET', headers)
def post(url, data, headers={}):
if isinstance(data, dict):
l = []
for k, v in data.items():
k += '='
for c in str(v):
if c.lower() in 'abcdefghijklmnopqrstuvwxyz0123456789':
k += c
else:
k += ''.join(map('%%%02x'.__mod__, c.encode('utf-8')))
l.append(k)
data = '&'.join(l)
if isinstance(data, str):
data = data.encode('utf-8')
return do_http(url, 'POST', headers, data)
def contest_name(url):
code, headers, data = get(url)
if code != 200:
raise BruteError("Page retrieval failed.")
try: return html.unescape(data.decode('utf-8', 'replace').split('<title>', 1)[1].split(' [', 1)[1].split(']</title>', 1)[-2])
except IndexError: return None
class Ejudge(Backend):
@staticmethod
def detect(url):
return url.startswith('http://') or url.startswith('https://')
def __init__(self, url, login, password):
Backend.__init__(self)
url0 = url
url = url.replace('/new-register?', '/new-client?')
contest_id = url.split("contest_id=")[1].split("&")[0]
self.contest_id = int(contest_id)
base_url = url.split("?")[0]
code, headers, data = post(url0.split("?")[0], {'contest_id': contest_id, 'locale_id': 0, 'login': login, 'password': password, 'action_213': ''})
if code != 302:
raise BruteError("Login failed.")
rhd = dict(headers)
base_url = rhd['Location'].split('&')[0]
base_url = base_url.replace('/new-register?', '/new-client?')
if 'new-client?SID=' in base_url:
urls = ej371.get_urls(base_url)
elif any(i in base_url for i in ('/user/', '/client/', '/register/', '/register?SID=')):
urls = ej373.get_urls(base_url)
else:
raise BruteError("Unknown ejudge version.")
self.urls = urls
self.cookie = rhd["Set-Cookie"].split(";")[0]
self._get_cache = {}
def _cache_get(self, url, cookie=True):
with self.cache_lock:
if url in self._get_cache:
return self._get_cache[url]
ans = get(url, {'Cookie': self.cookie} if cookie else {})
with self.cache_lock:
if self.caching: self._get_cache[url] = ans
return ans
# @staticmethod
# def _task_list(data):
# column_count = data.count('<th ')
# if column_count == 0: return []
# splitted = data.split('<td class="b1">')[1:]
# if not splitted: splitted = [i.split('>', 1)[-1].split('</div>', 1)[0] for i in data.split('<div class="prob', 1)]
# print(data)
# data = []
# for x in splitted[::column_count]:
# x = x.split("</td>")[0]
# if x.startswith('<a href="') and x.endswith('</a>'):
# x = x.split('"', 2)[2].split('>', 1)[1][:-4]
# data.append(html.unescape(x))
# return data
@staticmethod
def _task_ids(data):
try:
data = data.split('<tr id="probNavTopList"><td width="100%" class="nTopNavList"><ul class="nTopNavList"><li class="first-rad">', 1)[1].split('\n', 1)[0]
except IndexError:
try:
data = data.split('<td class="b0" id="probNavRightList" valign="top">', 1)[1].split('\n', 1)[0]
except IndexError: return []
splitted = data.split('<a class="tab" href="')[1:]
data = [x.split('"', 1) for x in splitted]
ans = []
for i in data:
ans.append((int(i[0].split('prob_id=', 1)[1]), html.unescape(i[1].split('>', 1)[-1].split('</a>', 1)[0])))
return ans
def tasks(self):
code, headers, data = self._cache_get(self.urls['summary'])
if b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
if code != 200:
raise BruteError("Failed to fetch task list.")
data = data.decode('utf-8')
#tl = self._task_list(data)
ti = self._task_ids(data)
#if len(tl) < len(ti): tl.extend([None]*(len(ti)-len(tl)))
#else: ti.extend([None]*(len(tl)-len(ti)))
return [bjtypes.task_t(i, j, None) for i, j in ti]
def submissions(self):
code, headers, data = self._cache_get(self.urls['submissions'])
if b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
if code != 200:
raise BruteError("Failed to fetch submission list.")
ths = [i.split('</th>', 1)[0] for i in data.decode('utf-8').split('<th class="b1">')[1:]]
w = len(ths)
if w == 0: return []
splitted = data.decode('utf-8').split('<td class="b1">')[1:]
data = [x.split("</td>")[0] for x in splitted]
run_ids = list(map(lambda x:(int(x[:-1]) if x[-1:] == '#' else int(x)), data[ths.index('Run ID')::w]))
task_ids = data[ths.index('Problem')::w]
if 'Result' in ths:
statuses = data[ths.index('Result')::w]
else:
statuses = [None]*len(run_ids)
if 'Score' in ths:
scores = []
for i in data[ths.index('Score')::w]:
i = i.strip()
if i.startswith('<b>'): i = i[3:].split('</b>', 1)[0] # TODO: report score_ex in submission_stats
i = i.strip()
if i in ('', 'N/A', ' '): scores.append(None)
else: scores.append(int(i))
else:
scores = [None]*len(run_ids)
if 'Tests passed' in ths:
oktests = []
for i in data[ths.index('Tests passed')::w]:
i = i.strip()
if i.startswith('<b>'): i = i[3:]
if i.endswith('</b>'): i = i[:-4]
i = i.strip()
if i in ('', 'N/A', ' '): oktests.append(None)
else: oktests.append(int(i))
else:
oktests = [None]*len(run_ids)
assert len(run_ids) == len(task_ids) == len(statuses) == len(scores) == len(oktests)
return [bjtypes.submission_t(i, j, k, l, m) for i, j, k, l, m in zip(run_ids, task_ids, statuses, scores, oktests)]
def submission_protocol(self, id):
code, headers, data = self._cache_get(self.urls['protocol'].format(run_id=id))
if b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
if code != 200:
raise BruteError("Failed to fetch testing protocol.")
w = data.count(b'<th ')
if w == 0: return []
splitted = data.decode('utf-8').split('<td class="b1">')[1:]
data = [x.split("</td>")[0] for x in splitted]
statuses = [i[:-7].split('>')[-1] for i in data[1::w]]
tls = []
for i in map(html.unescape, data[2::w]):
if i.startswith('>'): i = i[1:]
tls.append(float(i))
assert len(statuses) == len(tls)
return [bjtypes.test_t(i, {'time_usage': j}) for i, j in zip(statuses, tls)]
def submit_solution(self, task, lang, text):
if isinstance(text, str): text = text.encode('utf-8')
sid = self.urls['sid']
url = self.urls['submit']
data = []
data.append(b'"SID"\r\n\r\n'+sid.encode('ascii'))
data.append(b'"prob_id"\r\n\r\n'+str(task).encode('ascii'))
data.append(b'"lang_id"\r\n\r\n'+str(lang).encode('ascii'))
data.append(b'"file"; filename="brute.txt"\r\nContent-Type'
b': text/plain\r\n\r\n'+text)
data.append(b'"action_40"\r\n\r\nSend!')
import random
while True:
x = b'----------'+str(random.randrange(1, 1000000000)).encode('ascii')
for i in data:
if x in i: break
else: break
data = b'\r\n'.join(b'--'+x+b'\r\nContent-Disposition: form-data; name='+i for i in data)+b'\r\n--'+x+b'--\r\n'
ans = post(url, data, {'Content-Type': 'multipart/form-data; boundary='+x.decode('ascii'), 'Cookie': self.cookie})
with self.cache_lock: self.stop_caching()
def status(self):
code, headers, data = self._cache_get(self.urls['summary'])
if b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
if code != 200:
raise BruteError("Failed to fetch task list")
ths = [i.split('</th>', 1)[0] for i in data.decode('utf-8').split('<th class="b1">')[1:]]
w = len(ths)
if w == 0: return {}
splitted = data.decode('utf-8').split('<td class="b1">')[1:]
data = [x.split("</td>")[0] for x in splitted]
idx = ths.index('Status')
return collections.OrderedDict((a, b if b != ' ' else None) for a, b in zip(data[ths.index('Short name')::w], data[idx::w]))
def scores(self, *, total=None):
code, headers, data = self._cache_get(self.urls['summary'])
if b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
if code != 200:
raise BruteError("Failed to fetch task list")
data0 = data.decode('utf-8')
ths = [i.split('</th>', 1)[0] for i in data0.split('<th class="b1">')[1:]]
w = len(ths)
splitted = data.decode('utf-8').split('<td class="b1">')[1:]
data = [x.split("</td>")[0] for x in splitted]
if 'Score' not in ths: ans = {}
else: ans = collections.OrderedDict(zip(data[ths.index('Short name')::w], [None if x == ' ' else int(x) for x in data[ths.index('Score')::w]]))
if total != None and '<p><big>Total score: ' in data0:
try: ans[total] = int(data0.split('<p><big>Total score: ', 1)[1].split('</big></p>', 1)[0])
except (ValueError, IndexError): pass
return ans
def compile_error(self, id):
code, headers, data = self._cache_get(self.urls['protocol'].format(run_id=id))
if b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
if code != 200:
raise BruteError("Failed to fetch testing protocol.")
splitted = data.decode('utf-8').split('<pre>')[1:]
ans = []
for i in splitted:
i = i.split('</pre>')[0]
i = i.split('<')
i = i[0] + ''.join(j.split('>', 1)[1] for j in i[1:])
import html
ans.append(html.unescape(i))
return '\n'.join(ans)
def submission_source(self, id):
code, headers, data = self._cache_get(self.urls['source'].format(run_id=id))
rhd = dict(headers)
if 'html' in rhd['Content-Type'] and b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
if code != 200 or 'html' in rhd['Content-Type']:
return None
return data
def do_action(self, name, *args):
code, headers, data = get(self.urls[name], {'Cookie': self.cookie})
if b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
return code == 302
def compiler_list(self, prob_id):
code, headers, data = self._cache_get(self.urls['submission'].format(prob_id=prob_id))
if b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
data = data.decode('utf-8')
if '<input type="hidden" name="lang_id" value="' in data:
data = data.split('<input type="hidden" name="lang_id" value="', 1)[1]
num_id = int(data.split('"', 1)[0])
lit_id = html.unescape(data.split('</td><td class="b0">', 1)[1].split('</td>', 1)[0])
short, long = lit_id.strip().split(' - ')
return [bjtypes.compiler_t(num_id, short, long)]
try: data = data.split('<select name="lang_id">', 1)[1].split('</select>', 1)[0]
except IndexError: raise BruteError("Failed to fetch language list")
data = data.split('<option ')[1:]
ans = []
for i in data:
a, b = (' '+i).split(' value="', 1)[1].split('"', 1)
b = b.split('>', 1)[1].split('</option>', 1)[0]
if not a.isnumeric(): continue
b, c = html.unescape(b).split(' - ')
ans.append(bjtypes.compiler_t(int(a), b.strip(), c.strip()))
return ans
def submission_stats(self, id):
code, headers, data = self._cache_get(self.urls['protocol'].format(run_id=id))
if b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
data = data.decode('utf-8')
if '<big>' in data:
data = '\n\n'.join(i.split('</big>', 1)[0] for i in data.split('<big>')[1:]).split('<')
data = data[0]+''.join(i.split('>', 1)[1] for i in data[1:])
ans = {}
for l in data.split('\n'):
l = l.split(' ')
if l[1:4] == ['total', 'tests', 'runs,'] and l[5] == 'passed,' and l[7:] == ['failed.']:
ans['tests'] = {}
ans['tests']['total'] = int(l[0])
ans['tests']['success'] = int(l[4])
ans['tests']['fail'] = int(l[6])
elif l[:2] == ['Score', 'gained:']:
ans['score'] = int(l[2])
return (ans, data)
else:
return ({}, None)
def contest_info(self):
code, headers, data = self._cache_get(self.urls['contest_info'])
if b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
data = data.decode('utf-8')
try: pbs = '\n'.join(html.unescape(i.split('</b></p>', 1)[0]) for i in data.split('<p><b>')[1:])
except IndexError: pbs = ''
datas = {}
for i in data.split('<tr><td class="b0">')[1:]:
i = i.split('</td></tr>', 1)[0]
try: key, value = i.split('<td class="b0">')
except IndexError: pass
else: datas[html.unescape(key.split('</td>', 1)[0])] = html.unescape(value)
data1 = {}
for k1, k2 in (('server_time', 'Server time:'), ('contest_start', 'Contest start time'), ('contest_duration', 'Duration:')):
if k2 not in datas: continue
if datas[k2] == 'Unlimited':
data1[k1] = math.inf
continue
if ' ' in datas[k2]:
date, s_time = datas[k2].split(' ')
year, month, day = map(int, date.split('/'))
hour, minute, second = map(int, s_time.split(':'))
data1[k1] = time.mktime((year, month, day, hour, minute, second, -1, -1, -1))
else:
data1[k1] = 0
for i in map(int, datas[k2].split(':')):
data1[k1] = data1[k1] * 60 + i
if 'contest_start' in data1 and 'contest_duration' in data1:
data1['contest_end'] = data1['contest_start'] + data1['contest_duration']
if 'contest_start' in data1 and 'server_time' in data1:
data1['contest_time'] = data1['server_time'] - data1['contest_start']
return (pbs, datas, data1)
def problem_info(self, id):
code, headers, data = self._cache_get(self.urls['submission'].format(prob_id=id))
if b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
data = data.decode('utf-8')
if '<table class="line-table-wb">' not in data: return ({}, None)
data = data.split('<table class="line-table-wb">', 1)[1]
stats = {}
data, data2 = data.split('</table>', 1)
while '<tr><td><b>' in data:
k, data = data.split('<tr><td><b>', 1)[1].split('</b></td><td>', 1)
v, data = data.split('</td></tr>', 1)
v = v.split('<')
v = v[0]+''.join(i.split('>', 1)[1] for i in v[1:])
stats[k.rsplit(':', 1)[0].strip()] = html.unescape(v.strip())
data = data2.split('<form method="post" enctype="multipart/form-data" action="', 1)[0].rsplit('<h3>', 1)[0]
if data.endswith('</html>\n'):
data = ''
return (stats, html2md.html2md(data, self.urls['download_file'].format(prob_id=id, filename=''), self.urls['submission'].format(prob_id=id)))
def download_file(self, prob_id, filename):
code, headers, data = self._cache_get(self.urls['download_file'].format(prob_id=prob_id, filename=filename))
if code == 404:
raise BruteError("File not found.")
elif code != 200:
raise BruteError("Error downloading.")
return data
def clar_list(self):
code, headers, data = self._cache_get(self.urls['clars'])
if b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
ths = [i.split('</th>', 1)[0] for i in data.decode('utf-8').split('<th class="b1">')[1:]]
w = len(ths)
splitted = data.decode('utf-8').split('<td class="b1">')[1:]
data = [x.split("</td>")[0] for x in splitted]
return [bjtypes.clar_t(i, j) for i, j in zip(map(int, data[ths.index('Clar ID')::w]), map(html.unescape, data[ths.index('Subject')::w]))]
def submit_clar(self, task, subject, text):
if post(self.urls['submit'], {'SID': self.urls['sid'], 'prob_id': task, 'subject': subject, 'text': text, 'action_41': 'Send!'}, {'Cookie': self.cookie})[0] != 302:
raise BruteError("Failed to submit clar")
with self.cache_lock: self.stop_caching()
def read_clar(self, id):
code, headers, data = self._cache_get(self.urls['read_clar'].format(clar_id=id))
if b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
data = html.unescape(data.decode('utf-8').split('<pre class="message">', 1)[1].split('</pre>', 1)[0])
return data.split('\n', 2)[2]
def _get_samples(self, err):
if err == None: err = ''
err = err.strip()
if "====== Test #" not in err:
raise BruteError("No test cases available")
err = err[err.find("====== Test #"):]
lines = iter(err.split('\n'))
tests = {}
curr = None
for line in lines:
if line.startswith("====== Test #"):
num = int(line[13:-7])
curr = tests[num] = {}
elif line.startswith('--- '):
line = line[4:-4]
if ': size ' not in line: continue
what, size = line.split(': size ')
size = int(size) + 1
data = ''
while len(data) < size:
try: data += '\n' + next(lines)
except StopIteration: break
data = data[1:]
curr[what] = data
return tests
def get_samples(self, subm_id):
return self._get_samples(self.compile_error(subm_id))
def scoreboard(self):
code, headers, data = self._cache_get(self.urls['standings'])
if b'<input type="submit" name="action_35" value="Change!" />' in data:
raise BruteError("Password change is required.")
if code != 200:
raise BruteError("Failed to fetch scoreboard.")
teams = data.decode('utf-8').split('<td class="st_team">')[1:]
probs = data.decode('utf-8').split('<td class="st_prob')[1:]
naux = 0
if not teams and b'<table border=1 cellspacing=1 celpadding=3>\n <tr >\n <th align=right>Place</th>' in data:
probs = sum((i.split('<td class="st_prob') for i in data.decode('utf-8').split('<td align=center')), [])[1:]
teams = data.decode('utf-8').split('<td align=left>')[1:]
naux = 1
probs = [x.split("</td>", 1)[0] for x in probs]
teams = [html.unescape(x.split("</td>", 1)[0]) for x in teams]
try: ntasks = len(probs) // len(teams) - naux
except ZeroDivisionError: return []
del teams[-3:]
del probs[-3*ntasks:]
probs = iter(probs)
ans = []
for i in teams:
ans.append(({'name': i}, []))
for j in range(ntasks):
j = next(probs).split('>', 1)[1]
if j == ' ': ans[-1][1].append(None)
elif j[:1] in ('+', '-'):
attempts = int(j[0]+'0'+j[1:])
ans[-1][1].append({'attempts': attempts})
elif j.startswith('<b>') and j.endswith('</b>') and j[3:-4].isnumeric():
score = int(j[3:-4])
attempts = float('inf')
ans[-1][1].append({'score': score, 'attempts': attempts})
elif j.isnumeric():
score = int(j)
attempts = float('-inf')
ans[-1][1].append({'score': score, 'attempts': attempts})
else:
assert False, repr(j)
for j in range(naux): next(probs)
return ans
def stop_caching(self):
self._get_cache.clear()
def contest_list(self):
if isinstance(self, str): url = self
else: url = self.urls['contest_list']
code, headers, data = get(url)
if code != 200:
return []
ans = []
for i in data.decode('utf-8').split('<td><a href="')[1:]:
url = html.unescape(i.split('"', 1)[0])
name = html.unescape(i.split('>', 1)[1].split('<', 1)[0])
ans.append((name, url, {}))
return ans
def change_password(self, oldpwd, newpwd):
if post(self.urls['submit'], {'SID': self.urls['sid'], 'oldpasswd': oldpwd, 'newpasswd1': newpwd, 'newpasswd2': newpwd, 'action_35': 'Change!'}, {'Cookie': self.cookie})[0] != 302:
raise BruteError("Failed to change password.") | en | 0.250731 | #try: import requests #except ImportError: pass #else: # def do_http(url, method, headers, data=b''): # if method == 'GET': data = None # it = requests.request(method, url, data=data, headers=headers, allow_redirects=False) # return (it.status_code, it.headers, it.content) # @staticmethod # def _task_list(data): # column_count = data.count('<th ') # if column_count == 0: return [] # splitted = data.split('<td class="b1">')[1:] # if not splitted: splitted = [i.split('>', 1)[-1].split('</div>', 1)[0] for i in data.split('<div class="prob', 1)] # print(data) # data = [] # for x in splitted[::column_count]: # x = x.split("</td>")[0] # if x.startswith('<a href="') and x.endswith('</a>'): # x = x.split('"', 2)[2].split('>', 1)[1][:-4] # data.append(html.unescape(x)) # return data #tl = self._task_list(data) #if len(tl) < len(ti): tl.extend([None]*(len(ti)-len(tl))) #else: ti.extend([None]*(len(tl)-len(ti))) # TODO: report score_ex in submission_stats #" not in err: #"):] #"): | 2.824674 | 3 |
cinder/tests/test_rbd.py | cloudbau/cinder | 0 | 6631898 | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 <NAME>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import mox
import os
import tempfile
from cinder import db
from cinder import exception
from cinder.image import image_utils
from cinder.openstack.common import log as logging
from cinder.openstack.common import timeutils
from cinder import test
from cinder.tests.backup.fake_rados import mock_rados
from cinder.tests.backup.fake_rados import mock_rbd
from cinder.tests.image import fake as fake_image
from cinder.tests.test_volume import DriverTestCase
from cinder import units
from cinder.volume import configuration as conf
import cinder.volume.drivers.rbd as driver
LOG = logging.getLogger(__name__)
CEPH_MON_DUMP = """dumped monmap epoch 1
{ "epoch": 1,
"fsid": "33630410-6d93-4d66-8e42-3b953cf194aa",
"modified": "2013-05-22 17:44:56.343618",
"created": "2013-05-22 17:44:56.343618",
"mons": [
{ "rank": 0,
"name": "a",
"addr": "[::1]:6789\/0"},
{ "rank": 1,
"name": "b",
"addr": "[::1]:6790\/0"},
{ "rank": 2,
"name": "c",
"addr": "[::1]:6791\/0"},
{ "rank": 3,
"name": "d",
"addr": "127.0.0.1:6792\/0"},
{ "rank": 4,
"name": "e",
"addr": "example.com:6791\/0"}],
"quorum": [
0,
1,
2]}
"""
class FakeImageService:
def download(self, context, image_id, path):
pass
class TestUtil(test.TestCase):
def test_ascii_str(self):
self.assertIsNone(driver.ascii_str(None))
self.assertEqual('foo', driver.ascii_str('foo'))
self.assertEqual('foo', driver.ascii_str(u'foo'))
self.assertRaises(UnicodeEncodeError,
driver.ascii_str, 'foo' + unichr(300))
class RBDTestCase(test.TestCase):
def setUp(self):
super(RBDTestCase, self).setUp()
def fake_execute(*args, **kwargs):
return '', ''
self.configuration = mox.MockObject(conf.Configuration)
self.configuration.volume_tmp_dir = None
self.configuration.rbd_pool = 'rbd'
self.configuration.rbd_ceph_conf = None
self.configuration.rbd_secret_uuid = None
self.configuration.rbd_user = None
self.configuration.append_config_values(mox.IgnoreArg())
self.rados = self.mox.CreateMockAnything()
self.rbd = self.mox.CreateMockAnything()
self.driver = driver.RBDDriver(execute=fake_execute,
configuration=self.configuration,
rados=self.rados,
rbd=self.rbd)
self.driver.set_initialized()
def test_create_volume(self):
name = u'volume-00000001'
size = 1
volume = dict(name=name, size=size)
mock_client = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(driver, 'RADOSClient')
driver.RADOSClient(self.driver).AndReturn(mock_client)
mock_client.__enter__().AndReturn(mock_client)
self.rbd.RBD_FEATURE_LAYERING = 1
_mock_rbd = self.mox.CreateMockAnything()
self.rbd.RBD().AndReturn(_mock_rbd)
_mock_rbd.create(mox.IgnoreArg(), str(name), size * 1024 ** 3,
old_format=False,
features=self.rbd.RBD_FEATURE_LAYERING)
mock_client.__exit__(None, None, None).AndReturn(None)
self.mox.ReplayAll()
self.driver.create_volume(volume)
def test_delete_volume(self):
name = u'volume-00000001'
volume = dict(name=name)
# Setup librbd stubs
self.stubs.Set(self.driver, 'rados', mock_rados)
self.stubs.Set(self.driver, 'rbd', mock_rbd)
class mock_client(object):
def __init__(self, *args, **kwargs):
self.ioctx = None
def __enter__(self, *args, **kwargs):
return self
def __exit__(self, type_, value, traceback):
pass
self.stubs.Set(driver, 'RADOSClient', mock_client)
self.stubs.Set(self.driver, '_get_backup_snaps',
lambda *args: None)
self.stubs.Set(self.driver.rbd.Image, 'list_snaps',
lambda *args: [])
self.stubs.Set(self.driver.rbd.Image, 'parent_info',
lambda *args: (None, None, None))
self.stubs.Set(self.driver.rbd.Image, 'unprotect_snap',
lambda *args: None)
self.driver.delete_volume(volume)
def test_create_snapshot(self):
vol_name = u'volume-00000001'
snap_name = u'snapshot-name'
snapshot = dict(volume_name=vol_name, name=snap_name)
mock_proxy = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(driver, 'RBDVolumeProxy')
driver.RBDVolumeProxy(self.driver, vol_name) \
.AndReturn(mock_proxy)
mock_proxy.__enter__().AndReturn(mock_proxy)
mock_proxy.create_snap(str(snap_name))
self.rbd.RBD_FEATURE_LAYERING = 1
mock_proxy.protect_snap(str(snap_name))
mock_proxy.__exit__(None, None, None).AndReturn(None)
self.mox.ReplayAll()
self.driver.create_snapshot(snapshot)
def test_delete_snapshot(self):
vol_name = u'volume-00000001'
snap_name = u'snapshot-name'
snapshot = dict(volume_name=vol_name, name=snap_name)
mock_proxy = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(driver, 'RBDVolumeProxy')
driver.RBDVolumeProxy(self.driver, vol_name) \
.AndReturn(mock_proxy)
mock_proxy.__enter__().AndReturn(mock_proxy)
self.rbd.RBD_FEATURE_LAYERING = 1
mock_proxy.unprotect_snap(str(snap_name))
mock_proxy.remove_snap(str(snap_name))
mock_proxy.__exit__(None, None, None).AndReturn(None)
self.mox.ReplayAll()
self.driver.delete_snapshot(snapshot)
def test_create_cloned_volume(self):
src_name = u'volume-00000001'
dst_name = u'volume-00000002'
# Setup librbd stubs
self.stubs.Set(self.driver, 'rados', mock_rados)
self.stubs.Set(self.driver, 'rbd', mock_rbd)
self.driver.rbd.RBD_FEATURE_LAYERING = 1
class mock_client(object):
def __init__(self, *args, **kwargs):
self.ioctx = None
def __enter__(self, *args, **kwargs):
return self
def __exit__(self, type_, value, traceback):
pass
self.stubs.Set(driver, 'RADOSClient', mock_client)
def mock_clone(*args, **kwargs):
pass
self.stubs.Set(self.driver.rbd.RBD, 'clone', mock_clone)
self.stubs.Set(self.driver.rbd.Image, 'list_snaps',
lambda *args: [{'name': 'snap1'}, {'name': 'snap2'}])
self.stubs.Set(self.driver.rbd.Image, 'parent_info',
lambda *args: (None, None, None))
self.stubs.Set(self.driver.rbd.Image, 'protect_snap',
lambda *args: None)
self.driver.create_cloned_volume(dict(name=dst_name),
dict(name=src_name))
def test_good_locations(self):
locations = ['rbd://fsid/pool/image/snap',
'rbd://%2F/%2F/%2F/%2F', ]
map(self.driver._parse_location, locations)
def test_bad_locations(self):
locations = ['rbd://image',
'http://path/to/somewhere/else',
'rbd://image/extra',
'rbd://image/',
'rbd://fsid/pool/image/',
'rbd://fsid/pool/image/snap/',
'rbd://///', ]
for loc in locations:
self.assertRaises(exception.ImageUnacceptable,
self.driver._parse_location,
loc)
self.assertFalse(self.driver._is_cloneable(loc))
def test_cloneable(self):
self.stubs.Set(self.driver, '_get_fsid', lambda: 'abc')
location = 'rbd://abc/pool/image/snap'
mock_proxy = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(driver, 'RBDVolumeProxy')
driver.RBDVolumeProxy(self.driver, 'image',
pool='pool',
snapshot='snap',
read_only=True).AndReturn(mock_proxy)
mock_proxy.__enter__().AndReturn(mock_proxy)
mock_proxy.__exit__(None, None, None).AndReturn(None)
self.mox.ReplayAll()
self.assertTrue(self.driver._is_cloneable(location))
def test_uncloneable_different_fsid(self):
self.stubs.Set(self.driver, '_get_fsid', lambda: 'abc')
location = 'rbd://def/pool/image/snap'
self.assertFalse(self.driver._is_cloneable(location))
def test_uncloneable_unreadable(self):
self.stubs.Set(self.driver, '_get_fsid', lambda: 'abc')
location = 'rbd://abc/pool/image/snap'
self.stubs.Set(self.rbd, 'Error', test.TestingException)
self.mox.StubOutWithMock(driver, 'RBDVolumeProxy')
driver.RBDVolumeProxy(self.driver, 'image',
pool='pool',
snapshot='snap',
read_only=True).AndRaise(test.TestingException)
self.mox.ReplayAll()
self.assertFalse(self.driver._is_cloneable(location))
def _copy_image(self):
@contextlib.contextmanager
def fake_temp_file(dir):
class FakeTmp:
def __init__(self, name):
self.name = name
yield FakeTmp('test')
self.stubs.Set(tempfile, 'NamedTemporaryFile', fake_temp_file)
self.stubs.Set(os.path, 'exists', lambda x: True)
self.stubs.Set(image_utils, 'fetch_to_raw', lambda w, x, y, z: None)
self.stubs.Set(self.driver, 'delete_volume', lambda x: None)
self.stubs.Set(self.driver, '_resize', lambda x: None)
self.driver.copy_image_to_volume(None, {'name': 'test',
'size': 1},
FakeImageService(), None)
def test_copy_image_no_volume_tmp(self):
self.configuration.volume_tmp_dir = None
self._copy_image()
def test_copy_image_volume_tmp(self):
self.configuration.volume_tmp_dir = '/var/run/cinder/tmp'
self._copy_image()
def test_update_volume_stats(self):
self.stubs.Set(self.driver.configuration, 'safe_get', lambda x: 'RBD')
mock_client = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(driver, 'RADOSClient')
driver.RADOSClient(self.driver).AndReturn(mock_client)
mock_client.__enter__().AndReturn(mock_client)
self.mox.StubOutWithMock(mock_client, 'cluster')
mock_client.cluster.get_cluster_stats().AndReturn(dict(
kb=1234567890,
kb_used=4567890,
kb_avail=1000000000,
num_objects=4683))
mock_client.__exit__(None, None, None).AndReturn(None)
self.mox.ReplayAll()
expected = dict(
volume_backend_name='RBD',
vendor_name='Open Source',
driver_version=self.driver.VERSION,
storage_protocol='ceph',
total_capacity_gb=1177,
free_capacity_gb=953,
reserved_percentage=0)
actual = self.driver.get_volume_stats(True)
self.assertDictMatch(expected, actual)
def test_update_volume_stats_error(self):
self.stubs.Set(self.driver.configuration, 'safe_get', lambda x: 'RBD')
mock_client = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(driver, 'RADOSClient')
driver.RADOSClient(self.driver).AndReturn(mock_client)
mock_client.__enter__().AndReturn(mock_client)
self.mox.StubOutWithMock(mock_client, 'cluster')
self.stubs.Set(self.rados, 'Error', test.TestingException)
mock_client.cluster.get_cluster_stats().AndRaise(test.TestingException)
mock_client.__exit__(test.TestingException,
mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(None)
self.mox.ReplayAll()
expected = dict(
volume_backend_name='RBD',
vendor_name='Open Source',
driver_version=self.driver.VERSION,
storage_protocol='ceph',
total_capacity_gb='unknown',
free_capacity_gb='unknown',
reserved_percentage=0)
actual = self.driver.get_volume_stats(True)
self.assertDictMatch(expected, actual)
def test_get_mon_addrs(self):
self.stubs.Set(self.driver, '_execute',
lambda *a: (CEPH_MON_DUMP, ''))
hosts = ['::1', '::1', '::1', '127.0.0.1', 'example.com']
ports = ['6789', '6790', '6791', '6792', '6791']
self.assertEqual((hosts, ports), self.driver._get_mon_addrs())
def test_initialize_connection(self):
name = 'volume-00000001'
hosts = ['::1', '::1', '::1', '127.0.0.1', 'example.com']
ports = ['6789', '6790', '6791', '6792', '6791']
self.stubs.Set(self.driver, '_get_mon_addrs', lambda: (hosts, ports))
expected = {
'driver_volume_type': 'rbd',
'data': {
'name': '%s/%s' % (self.configuration.rbd_pool,
name),
'hosts': hosts,
'ports': ports,
'auth_enabled': False,
'auth_username': None,
'secret_type': 'ceph',
'secret_uuid': None, }
}
actual = self.driver.initialize_connection(dict(name=name), None)
self.assertDictMatch(expected, actual)
def test_clone(self):
name = u'volume-00000001'
volume = dict(name=name)
src_pool = u'images'
src_image = u'image-name'
src_snap = u'snapshot-name'
mock_src_client = self.mox.CreateMockAnything()
mock_dst_client = self.mox.CreateMockAnything()
mock_rbd = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(driver, 'RADOSClient')
driver.RADOSClient(self.driver, src_pool).AndReturn(mock_src_client)
mock_src_client.__enter__().AndReturn(mock_src_client)
driver.RADOSClient(self.driver).AndReturn(mock_dst_client)
mock_dst_client.__enter__().AndReturn(mock_dst_client)
self.rbd.RBD_FEATURE_LAYERING = 1
self.rbd.RBD().AndReturn(mock_rbd)
mock_rbd.clone(mox.IgnoreArg(),
str(src_image),
str(src_snap),
mox.IgnoreArg(),
str(name),
features=self.rbd.RBD_FEATURE_LAYERING)
mock_dst_client.__exit__(None, None, None).AndReturn(None)
mock_src_client.__exit__(None, None, None).AndReturn(None)
self.mox.ReplayAll()
self.driver._clone(volume, src_pool, src_image, src_snap)
def test_extend_volume(self):
fake_name = u'volume-00000001'
fake_size = '20'
fake_vol = {'project_id': 'testprjid', 'name': fake_name,
'size': fake_size,
'id': 'a720b3c0-d1f0-11e1-9b23-0800200c9a66'}
self.mox.StubOutWithMock(self.driver, '_resize')
size = int(fake_size) * units.GiB
self.driver._resize(fake_vol, size=size)
self.mox.ReplayAll()
self.driver.extend_volume(fake_vol, fake_size)
self.mox.VerifyAll()
def test_rbd_volume_proxy_init(self):
name = u'volume-00000001'
snap = u'snapshot-name'
self.stubs.Set(self.driver, '_connect_to_rados',
lambda x: (None, None))
self.mox.StubOutWithMock(self.driver, '_disconnect_from_rados')
# no snapshot
self.rbd.Image(None, str(name), snapshot=None, read_only=False) \
.AndReturn(None)
# snapshot
self.rbd.Image(None, str(name), snapshot=str(snap), read_only=True) \
.AndReturn(None)
# error causes disconnect
self.stubs.Set(self.rbd, 'Error', test.TestingException)
self.rbd.Image(None, str(name), snapshot=None, read_only=False) \
.AndRaise(test.TestingException)
self.driver._disconnect_from_rados(None, None)
self.mox.ReplayAll()
driver.RBDVolumeProxy(self.driver, name)
driver.RBDVolumeProxy(self.driver, name, snapshot=snap, read_only=True)
self.assertRaises(test.TestingException,
driver.RBDVolumeProxy, self.driver, name)
def test_connect_to_rados(self):
mock_client = self.mox.CreateMockAnything()
mock_ioctx = self.mox.CreateMockAnything()
self.stubs.Set(self.rados, 'Error', test.TestingException)
# default configured pool
self.rados.Rados(rados_id=None, conffile=None).AndReturn(mock_client)
mock_client.connect()
mock_client.open_ioctx('rbd').AndReturn(mock_ioctx)
# different pool
self.rados.Rados(rados_id=None, conffile=None).AndReturn(mock_client)
mock_client.connect()
mock_client.open_ioctx('images').AndReturn(mock_ioctx)
# error
self.rados.Rados(rados_id=None, conffile=None).AndReturn(mock_client)
mock_client.connect()
mock_client.open_ioctx('rbd').AndRaise(test.TestingException)
mock_client.shutdown()
self.mox.ReplayAll()
self.assertEqual((mock_client, mock_ioctx),
self.driver._connect_to_rados())
self.assertEqual((mock_client, mock_ioctx),
self.driver._connect_to_rados('images'))
self.assertRaises(test.TestingException, self.driver._connect_to_rados)
class ManagedRBDTestCase(DriverTestCase):
driver_name = "cinder.volume.drivers.rbd.RBDDriver"
def setUp(self):
super(ManagedRBDTestCase, self).setUp()
fake_image.stub_out_image_service(self.stubs)
self.volume.driver.set_initialized()
def _clone_volume_from_image(self, expected_status,
clone_works=True):
"""Try to clone a volume from an image, and check the status
afterwards.
"""
def fake_clone_image(volume, image_location, image_id):
return {'provider_location': None}, True
def fake_clone_error(volume, image_location, image_id):
raise exception.CinderException()
self.stubs.Set(self.volume.driver, '_is_cloneable', lambda x: True)
if clone_works:
self.stubs.Set(self.volume.driver, 'clone_image', fake_clone_image)
else:
self.stubs.Set(self.volume.driver, 'clone_image', fake_clone_error)
image_id = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
volume_id = 1
# creating volume testdata
db.volume_create(self.context,
{'id': volume_id,
'updated_at': timeutils.utcnow(),
'display_description': 'Test Desc',
'size': 20,
'status': 'creating',
'instance_uuid': None,
'host': 'dummy'})
try:
if clone_works:
self.volume.create_volume(self.context,
volume_id,
image_id=image_id)
else:
self.assertRaises(exception.CinderException,
self.volume.create_volume,
self.context,
volume_id,
image_id=image_id)
volume = db.volume_get(self.context, volume_id)
self.assertEqual(volume['status'], expected_status)
finally:
# cleanup
db.volume_destroy(self.context, volume_id)
def test_create_vol_from_image_status_available(self):
"""Verify that before cloning, an image is in the available state."""
self._clone_volume_from_image('available', True)
def test_create_vol_from_image_status_error(self):
"""Verify that before cloning, an image is in the available state."""
self._clone_volume_from_image('error', False)
def test_clone_image(self):
# Test Failure Case(s)
expected = ({}, False)
self.stubs.Set(self.volume.driver, '_is_cloneable', lambda x: False)
image_loc = (object(), object())
actual = self.volume.driver.clone_image(object(), image_loc, object())
self.assertEqual(expected, actual)
self.stubs.Set(self.volume.driver, '_is_cloneable', lambda x: True)
self.assertEqual(expected,
self.volume.driver.clone_image(object(), None, None))
# Test Success Case(s)
expected = ({'provider_location': None}, True)
self.stubs.Set(self.volume.driver, '_parse_location',
lambda x: ('a', 'b', 'c', 'd'))
self.stubs.Set(self.volume.driver, '_clone', lambda *args: None)
self.stubs.Set(self.volume.driver, '_resize', lambda *args: None)
actual = self.volume.driver.clone_image(object(), image_loc, object())
self.assertEqual(expected, actual)
def test_clone_success(self):
self.stubs.Set(self.volume.driver, '_is_cloneable', lambda x: True)
self.stubs.Set(self.volume.driver, 'clone_image', lambda a, b, c: True)
image_id = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
self.assertTrue(self.volume.driver.clone_image({}, image_id, image_id))
| # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 <NAME>
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import contextlib
import mox
import os
import tempfile
from cinder import db
from cinder import exception
from cinder.image import image_utils
from cinder.openstack.common import log as logging
from cinder.openstack.common import timeutils
from cinder import test
from cinder.tests.backup.fake_rados import mock_rados
from cinder.tests.backup.fake_rados import mock_rbd
from cinder.tests.image import fake as fake_image
from cinder.tests.test_volume import DriverTestCase
from cinder import units
from cinder.volume import configuration as conf
import cinder.volume.drivers.rbd as driver
LOG = logging.getLogger(__name__)
CEPH_MON_DUMP = """dumped monmap epoch 1
{ "epoch": 1,
"fsid": "33630410-6d93-4d66-8e42-3b953cf194aa",
"modified": "2013-05-22 17:44:56.343618",
"created": "2013-05-22 17:44:56.343618",
"mons": [
{ "rank": 0,
"name": "a",
"addr": "[::1]:6789\/0"},
{ "rank": 1,
"name": "b",
"addr": "[::1]:6790\/0"},
{ "rank": 2,
"name": "c",
"addr": "[::1]:6791\/0"},
{ "rank": 3,
"name": "d",
"addr": "127.0.0.1:6792\/0"},
{ "rank": 4,
"name": "e",
"addr": "example.com:6791\/0"}],
"quorum": [
0,
1,
2]}
"""
class FakeImageService:
def download(self, context, image_id, path):
pass
class TestUtil(test.TestCase):
def test_ascii_str(self):
self.assertIsNone(driver.ascii_str(None))
self.assertEqual('foo', driver.ascii_str('foo'))
self.assertEqual('foo', driver.ascii_str(u'foo'))
self.assertRaises(UnicodeEncodeError,
driver.ascii_str, 'foo' + unichr(300))
class RBDTestCase(test.TestCase):
def setUp(self):
super(RBDTestCase, self).setUp()
def fake_execute(*args, **kwargs):
return '', ''
self.configuration = mox.MockObject(conf.Configuration)
self.configuration.volume_tmp_dir = None
self.configuration.rbd_pool = 'rbd'
self.configuration.rbd_ceph_conf = None
self.configuration.rbd_secret_uuid = None
self.configuration.rbd_user = None
self.configuration.append_config_values(mox.IgnoreArg())
self.rados = self.mox.CreateMockAnything()
self.rbd = self.mox.CreateMockAnything()
self.driver = driver.RBDDriver(execute=fake_execute,
configuration=self.configuration,
rados=self.rados,
rbd=self.rbd)
self.driver.set_initialized()
def test_create_volume(self):
name = u'volume-00000001'
size = 1
volume = dict(name=name, size=size)
mock_client = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(driver, 'RADOSClient')
driver.RADOSClient(self.driver).AndReturn(mock_client)
mock_client.__enter__().AndReturn(mock_client)
self.rbd.RBD_FEATURE_LAYERING = 1
_mock_rbd = self.mox.CreateMockAnything()
self.rbd.RBD().AndReturn(_mock_rbd)
_mock_rbd.create(mox.IgnoreArg(), str(name), size * 1024 ** 3,
old_format=False,
features=self.rbd.RBD_FEATURE_LAYERING)
mock_client.__exit__(None, None, None).AndReturn(None)
self.mox.ReplayAll()
self.driver.create_volume(volume)
def test_delete_volume(self):
name = u'volume-00000001'
volume = dict(name=name)
# Setup librbd stubs
self.stubs.Set(self.driver, 'rados', mock_rados)
self.stubs.Set(self.driver, 'rbd', mock_rbd)
class mock_client(object):
def __init__(self, *args, **kwargs):
self.ioctx = None
def __enter__(self, *args, **kwargs):
return self
def __exit__(self, type_, value, traceback):
pass
self.stubs.Set(driver, 'RADOSClient', mock_client)
self.stubs.Set(self.driver, '_get_backup_snaps',
lambda *args: None)
self.stubs.Set(self.driver.rbd.Image, 'list_snaps',
lambda *args: [])
self.stubs.Set(self.driver.rbd.Image, 'parent_info',
lambda *args: (None, None, None))
self.stubs.Set(self.driver.rbd.Image, 'unprotect_snap',
lambda *args: None)
self.driver.delete_volume(volume)
def test_create_snapshot(self):
vol_name = u'volume-00000001'
snap_name = u'snapshot-name'
snapshot = dict(volume_name=vol_name, name=snap_name)
mock_proxy = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(driver, 'RBDVolumeProxy')
driver.RBDVolumeProxy(self.driver, vol_name) \
.AndReturn(mock_proxy)
mock_proxy.__enter__().AndReturn(mock_proxy)
mock_proxy.create_snap(str(snap_name))
self.rbd.RBD_FEATURE_LAYERING = 1
mock_proxy.protect_snap(str(snap_name))
mock_proxy.__exit__(None, None, None).AndReturn(None)
self.mox.ReplayAll()
self.driver.create_snapshot(snapshot)
def test_delete_snapshot(self):
vol_name = u'volume-00000001'
snap_name = u'snapshot-name'
snapshot = dict(volume_name=vol_name, name=snap_name)
mock_proxy = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(driver, 'RBDVolumeProxy')
driver.RBDVolumeProxy(self.driver, vol_name) \
.AndReturn(mock_proxy)
mock_proxy.__enter__().AndReturn(mock_proxy)
self.rbd.RBD_FEATURE_LAYERING = 1
mock_proxy.unprotect_snap(str(snap_name))
mock_proxy.remove_snap(str(snap_name))
mock_proxy.__exit__(None, None, None).AndReturn(None)
self.mox.ReplayAll()
self.driver.delete_snapshot(snapshot)
def test_create_cloned_volume(self):
src_name = u'volume-00000001'
dst_name = u'volume-00000002'
# Setup librbd stubs
self.stubs.Set(self.driver, 'rados', mock_rados)
self.stubs.Set(self.driver, 'rbd', mock_rbd)
self.driver.rbd.RBD_FEATURE_LAYERING = 1
class mock_client(object):
def __init__(self, *args, **kwargs):
self.ioctx = None
def __enter__(self, *args, **kwargs):
return self
def __exit__(self, type_, value, traceback):
pass
self.stubs.Set(driver, 'RADOSClient', mock_client)
def mock_clone(*args, **kwargs):
pass
self.stubs.Set(self.driver.rbd.RBD, 'clone', mock_clone)
self.stubs.Set(self.driver.rbd.Image, 'list_snaps',
lambda *args: [{'name': 'snap1'}, {'name': 'snap2'}])
self.stubs.Set(self.driver.rbd.Image, 'parent_info',
lambda *args: (None, None, None))
self.stubs.Set(self.driver.rbd.Image, 'protect_snap',
lambda *args: None)
self.driver.create_cloned_volume(dict(name=dst_name),
dict(name=src_name))
def test_good_locations(self):
locations = ['rbd://fsid/pool/image/snap',
'rbd://%2F/%2F/%2F/%2F', ]
map(self.driver._parse_location, locations)
def test_bad_locations(self):
locations = ['rbd://image',
'http://path/to/somewhere/else',
'rbd://image/extra',
'rbd://image/',
'rbd://fsid/pool/image/',
'rbd://fsid/pool/image/snap/',
'rbd://///', ]
for loc in locations:
self.assertRaises(exception.ImageUnacceptable,
self.driver._parse_location,
loc)
self.assertFalse(self.driver._is_cloneable(loc))
def test_cloneable(self):
self.stubs.Set(self.driver, '_get_fsid', lambda: 'abc')
location = 'rbd://abc/pool/image/snap'
mock_proxy = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(driver, 'RBDVolumeProxy')
driver.RBDVolumeProxy(self.driver, 'image',
pool='pool',
snapshot='snap',
read_only=True).AndReturn(mock_proxy)
mock_proxy.__enter__().AndReturn(mock_proxy)
mock_proxy.__exit__(None, None, None).AndReturn(None)
self.mox.ReplayAll()
self.assertTrue(self.driver._is_cloneable(location))
def test_uncloneable_different_fsid(self):
self.stubs.Set(self.driver, '_get_fsid', lambda: 'abc')
location = 'rbd://def/pool/image/snap'
self.assertFalse(self.driver._is_cloneable(location))
def test_uncloneable_unreadable(self):
self.stubs.Set(self.driver, '_get_fsid', lambda: 'abc')
location = 'rbd://abc/pool/image/snap'
self.stubs.Set(self.rbd, 'Error', test.TestingException)
self.mox.StubOutWithMock(driver, 'RBDVolumeProxy')
driver.RBDVolumeProxy(self.driver, 'image',
pool='pool',
snapshot='snap',
read_only=True).AndRaise(test.TestingException)
self.mox.ReplayAll()
self.assertFalse(self.driver._is_cloneable(location))
def _copy_image(self):
@contextlib.contextmanager
def fake_temp_file(dir):
class FakeTmp:
def __init__(self, name):
self.name = name
yield FakeTmp('test')
self.stubs.Set(tempfile, 'NamedTemporaryFile', fake_temp_file)
self.stubs.Set(os.path, 'exists', lambda x: True)
self.stubs.Set(image_utils, 'fetch_to_raw', lambda w, x, y, z: None)
self.stubs.Set(self.driver, 'delete_volume', lambda x: None)
self.stubs.Set(self.driver, '_resize', lambda x: None)
self.driver.copy_image_to_volume(None, {'name': 'test',
'size': 1},
FakeImageService(), None)
def test_copy_image_no_volume_tmp(self):
self.configuration.volume_tmp_dir = None
self._copy_image()
def test_copy_image_volume_tmp(self):
self.configuration.volume_tmp_dir = '/var/run/cinder/tmp'
self._copy_image()
def test_update_volume_stats(self):
self.stubs.Set(self.driver.configuration, 'safe_get', lambda x: 'RBD')
mock_client = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(driver, 'RADOSClient')
driver.RADOSClient(self.driver).AndReturn(mock_client)
mock_client.__enter__().AndReturn(mock_client)
self.mox.StubOutWithMock(mock_client, 'cluster')
mock_client.cluster.get_cluster_stats().AndReturn(dict(
kb=1234567890,
kb_used=4567890,
kb_avail=1000000000,
num_objects=4683))
mock_client.__exit__(None, None, None).AndReturn(None)
self.mox.ReplayAll()
expected = dict(
volume_backend_name='RBD',
vendor_name='Open Source',
driver_version=self.driver.VERSION,
storage_protocol='ceph',
total_capacity_gb=1177,
free_capacity_gb=953,
reserved_percentage=0)
actual = self.driver.get_volume_stats(True)
self.assertDictMatch(expected, actual)
def test_update_volume_stats_error(self):
self.stubs.Set(self.driver.configuration, 'safe_get', lambda x: 'RBD')
mock_client = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(driver, 'RADOSClient')
driver.RADOSClient(self.driver).AndReturn(mock_client)
mock_client.__enter__().AndReturn(mock_client)
self.mox.StubOutWithMock(mock_client, 'cluster')
self.stubs.Set(self.rados, 'Error', test.TestingException)
mock_client.cluster.get_cluster_stats().AndRaise(test.TestingException)
mock_client.__exit__(test.TestingException,
mox.IgnoreArg(), mox.IgnoreArg()).AndReturn(None)
self.mox.ReplayAll()
expected = dict(
volume_backend_name='RBD',
vendor_name='Open Source',
driver_version=self.driver.VERSION,
storage_protocol='ceph',
total_capacity_gb='unknown',
free_capacity_gb='unknown',
reserved_percentage=0)
actual = self.driver.get_volume_stats(True)
self.assertDictMatch(expected, actual)
def test_get_mon_addrs(self):
self.stubs.Set(self.driver, '_execute',
lambda *a: (CEPH_MON_DUMP, ''))
hosts = ['::1', '::1', '::1', '127.0.0.1', 'example.com']
ports = ['6789', '6790', '6791', '6792', '6791']
self.assertEqual((hosts, ports), self.driver._get_mon_addrs())
def test_initialize_connection(self):
name = 'volume-00000001'
hosts = ['::1', '::1', '::1', '127.0.0.1', 'example.com']
ports = ['6789', '6790', '6791', '6792', '6791']
self.stubs.Set(self.driver, '_get_mon_addrs', lambda: (hosts, ports))
expected = {
'driver_volume_type': 'rbd',
'data': {
'name': '%s/%s' % (self.configuration.rbd_pool,
name),
'hosts': hosts,
'ports': ports,
'auth_enabled': False,
'auth_username': None,
'secret_type': 'ceph',
'secret_uuid': None, }
}
actual = self.driver.initialize_connection(dict(name=name), None)
self.assertDictMatch(expected, actual)
def test_clone(self):
name = u'volume-00000001'
volume = dict(name=name)
src_pool = u'images'
src_image = u'image-name'
src_snap = u'snapshot-name'
mock_src_client = self.mox.CreateMockAnything()
mock_dst_client = self.mox.CreateMockAnything()
mock_rbd = self.mox.CreateMockAnything()
self.mox.StubOutWithMock(driver, 'RADOSClient')
driver.RADOSClient(self.driver, src_pool).AndReturn(mock_src_client)
mock_src_client.__enter__().AndReturn(mock_src_client)
driver.RADOSClient(self.driver).AndReturn(mock_dst_client)
mock_dst_client.__enter__().AndReturn(mock_dst_client)
self.rbd.RBD_FEATURE_LAYERING = 1
self.rbd.RBD().AndReturn(mock_rbd)
mock_rbd.clone(mox.IgnoreArg(),
str(src_image),
str(src_snap),
mox.IgnoreArg(),
str(name),
features=self.rbd.RBD_FEATURE_LAYERING)
mock_dst_client.__exit__(None, None, None).AndReturn(None)
mock_src_client.__exit__(None, None, None).AndReturn(None)
self.mox.ReplayAll()
self.driver._clone(volume, src_pool, src_image, src_snap)
def test_extend_volume(self):
fake_name = u'volume-00000001'
fake_size = '20'
fake_vol = {'project_id': 'testprjid', 'name': fake_name,
'size': fake_size,
'id': 'a720b3c0-d1f0-11e1-9b23-0800200c9a66'}
self.mox.StubOutWithMock(self.driver, '_resize')
size = int(fake_size) * units.GiB
self.driver._resize(fake_vol, size=size)
self.mox.ReplayAll()
self.driver.extend_volume(fake_vol, fake_size)
self.mox.VerifyAll()
def test_rbd_volume_proxy_init(self):
name = u'volume-00000001'
snap = u'snapshot-name'
self.stubs.Set(self.driver, '_connect_to_rados',
lambda x: (None, None))
self.mox.StubOutWithMock(self.driver, '_disconnect_from_rados')
# no snapshot
self.rbd.Image(None, str(name), snapshot=None, read_only=False) \
.AndReturn(None)
# snapshot
self.rbd.Image(None, str(name), snapshot=str(snap), read_only=True) \
.AndReturn(None)
# error causes disconnect
self.stubs.Set(self.rbd, 'Error', test.TestingException)
self.rbd.Image(None, str(name), snapshot=None, read_only=False) \
.AndRaise(test.TestingException)
self.driver._disconnect_from_rados(None, None)
self.mox.ReplayAll()
driver.RBDVolumeProxy(self.driver, name)
driver.RBDVolumeProxy(self.driver, name, snapshot=snap, read_only=True)
self.assertRaises(test.TestingException,
driver.RBDVolumeProxy, self.driver, name)
def test_connect_to_rados(self):
mock_client = self.mox.CreateMockAnything()
mock_ioctx = self.mox.CreateMockAnything()
self.stubs.Set(self.rados, 'Error', test.TestingException)
# default configured pool
self.rados.Rados(rados_id=None, conffile=None).AndReturn(mock_client)
mock_client.connect()
mock_client.open_ioctx('rbd').AndReturn(mock_ioctx)
# different pool
self.rados.Rados(rados_id=None, conffile=None).AndReturn(mock_client)
mock_client.connect()
mock_client.open_ioctx('images').AndReturn(mock_ioctx)
# error
self.rados.Rados(rados_id=None, conffile=None).AndReturn(mock_client)
mock_client.connect()
mock_client.open_ioctx('rbd').AndRaise(test.TestingException)
mock_client.shutdown()
self.mox.ReplayAll()
self.assertEqual((mock_client, mock_ioctx),
self.driver._connect_to_rados())
self.assertEqual((mock_client, mock_ioctx),
self.driver._connect_to_rados('images'))
self.assertRaises(test.TestingException, self.driver._connect_to_rados)
class ManagedRBDTestCase(DriverTestCase):
driver_name = "cinder.volume.drivers.rbd.RBDDriver"
def setUp(self):
super(ManagedRBDTestCase, self).setUp()
fake_image.stub_out_image_service(self.stubs)
self.volume.driver.set_initialized()
def _clone_volume_from_image(self, expected_status,
clone_works=True):
"""Try to clone a volume from an image, and check the status
afterwards.
"""
def fake_clone_image(volume, image_location, image_id):
return {'provider_location': None}, True
def fake_clone_error(volume, image_location, image_id):
raise exception.CinderException()
self.stubs.Set(self.volume.driver, '_is_cloneable', lambda x: True)
if clone_works:
self.stubs.Set(self.volume.driver, 'clone_image', fake_clone_image)
else:
self.stubs.Set(self.volume.driver, 'clone_image', fake_clone_error)
image_id = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
volume_id = 1
# creating volume testdata
db.volume_create(self.context,
{'id': volume_id,
'updated_at': timeutils.utcnow(),
'display_description': 'Test Desc',
'size': 20,
'status': 'creating',
'instance_uuid': None,
'host': 'dummy'})
try:
if clone_works:
self.volume.create_volume(self.context,
volume_id,
image_id=image_id)
else:
self.assertRaises(exception.CinderException,
self.volume.create_volume,
self.context,
volume_id,
image_id=image_id)
volume = db.volume_get(self.context, volume_id)
self.assertEqual(volume['status'], expected_status)
finally:
# cleanup
db.volume_destroy(self.context, volume_id)
def test_create_vol_from_image_status_available(self):
"""Verify that before cloning, an image is in the available state."""
self._clone_volume_from_image('available', True)
def test_create_vol_from_image_status_error(self):
"""Verify that before cloning, an image is in the available state."""
self._clone_volume_from_image('error', False)
def test_clone_image(self):
# Test Failure Case(s)
expected = ({}, False)
self.stubs.Set(self.volume.driver, '_is_cloneable', lambda x: False)
image_loc = (object(), object())
actual = self.volume.driver.clone_image(object(), image_loc, object())
self.assertEqual(expected, actual)
self.stubs.Set(self.volume.driver, '_is_cloneable', lambda x: True)
self.assertEqual(expected,
self.volume.driver.clone_image(object(), None, None))
# Test Success Case(s)
expected = ({'provider_location': None}, True)
self.stubs.Set(self.volume.driver, '_parse_location',
lambda x: ('a', 'b', 'c', 'd'))
self.stubs.Set(self.volume.driver, '_clone', lambda *args: None)
self.stubs.Set(self.volume.driver, '_resize', lambda *args: None)
actual = self.volume.driver.clone_image(object(), image_loc, object())
self.assertEqual(expected, actual)
def test_clone_success(self):
self.stubs.Set(self.volume.driver, '_is_cloneable', lambda x: True)
self.stubs.Set(self.volume.driver, 'clone_image', lambda a, b, c: True)
image_id = 'c905cedb-7281-47e4-8a62-f26bc5fc4c77'
self.assertTrue(self.volume.driver.clone_image({}, image_id, image_id))
| en | 0.720324 | # vim: tabstop=4 shiftwidth=4 softtabstop=4 # Copyright 2012 <NAME> # All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. dumped monmap epoch 1 { "epoch": 1, "fsid": "33630410-6d93-4d66-8e42-3b953cf194aa", "modified": "2013-05-22 17:44:56.343618", "created": "2013-05-22 17:44:56.343618", "mons": [ { "rank": 0, "name": "a", "addr": "[::1]:6789\/0"}, { "rank": 1, "name": "b", "addr": "[::1]:6790\/0"}, { "rank": 2, "name": "c", "addr": "[::1]:6791\/0"}, { "rank": 3, "name": "d", "addr": "127.0.0.1:6792\/0"}, { "rank": 4, "name": "e", "addr": "example.com:6791\/0"}], "quorum": [ 0, 1, 2]} # Setup librbd stubs # Setup librbd stubs # no snapshot # snapshot # error causes disconnect # default configured pool # different pool # error Try to clone a volume from an image, and check the status afterwards. # creating volume testdata # cleanup Verify that before cloning, an image is in the available state. Verify that before cloning, an image is in the available state. # Test Failure Case(s) # Test Success Case(s) | 1.622343 | 2 |
1143. Longest Common Subsequence/1143. Longest Common Subsequence.py | JawadAsifBD/leetcode | 0 | 6631899 | class Solution:
def longestCommonSubsequence(self, text1, text2):
text1 = "!" + text1
text2 = "!" + text2
m, n = len(text1), len(text2)
dp = [[0] * n for _ in range(m)]
dp[0][0] = 1
for i, j in product(range(m), range(n)):
if text1[i] == text2[j]:
dp[i][j] = dp[i-1][j-1] + 1
else:
dp[i][j] = max(dp[i-1][j], dp[i][j-1])
return dp[-1][-1] - 1
| class Solution:
def longestCommonSubsequence(self, text1, text2):
text1 = "!" + text1
text2 = "!" + text2
m, n = len(text1), len(text2)
dp = [[0] * n for _ in range(m)]
dp[0][0] = 1
for i, j in product(range(m), range(n)):
if text1[i] == text2[j]:
dp[i][j] = dp[i-1][j-1] + 1
else:
dp[i][j] = max(dp[i-1][j], dp[i][j-1])
return dp[-1][-1] - 1
| none | 1 | 3.302524 | 3 |
|
src/numerical/pure_python/rootfinding.py | wolfram74/magnetic_symmetry_project | 0 | 6631900 | <reponame>wolfram74/magnetic_symmetry_project<filename>src/numerical/pure_python/rootfinding.py
from scipy import optimize
import scipy
import symbolic_manipulations
import generate_equation
import numpy as np
import random
def find_solutions(positions, filename='Collage.png', iters=100):
# generate important functions
torque_function, potential_function, hessian_function = symbolic_manipulations.derive_functions(generate_equation.get_equations(positions), len(positions))
'''
Determine nature of a solution using the 2nd derivative (Hessian Matrix)
'''
def determine_nature(x):
hessian = hessian_function(x)
eigen_values = np.linalg.eig(hessian)[0]
positive = np.sum([eigen_values > 0])
negative = np.sum([eigen_values < 0])
print(eigen_values)
print(positive, negative)
if (positive) > 0 and (negative) == 0:
return 'local minimum'
if (positive) ==0 and (negative) > 0:
return 'local maximum'
if (positive) > 0 and (negative) > 0:
return 'saddle point'
return 'indeterminate' # highly unlikely to happen
# test near equality of two floats
def has_seen(seen_u, u):
for x in seen_u:
if abs(x-u) < 0.00001:
return True
return False
# seen_u: array of the potential energies of the solutions that have been found
# seen: array of the corresponding rotations for each solution
seen_u = []
seen = []
for i in range(1, iters):
if not i % 1000:
print(' '+str(i))
output = scipy.optimize.minimize(torque_function, [random.uniform(0, 2*np.pi) for i in range(len(positions))])
sol = output['x']
#print(sol)
u = potential_function(sol)
print(u)
if not has_seen(seen_u, u): # don't double count identical or degenerate solutions
tau = torque_function(sol)
seen_u.append(u)
seen.append(sol)
print('candidate solution no.'+str(len(seen))+' found on iter.'+str(int(i)))
print(' u='+str(float(u)) + '; tau^2=' + str(float(tau)))
print(' ' + str(sol))
sorted_u = sorted(seen_u)
indeces = [seen_u.index(x) for x in sorted_u]
sorted_sols = [seen[i] for i in indeces]
'''
now we draw a collage of the solutions we've found:
'''
import graphics
torque_cutoff = 0.0000000001
# ^^ we are quite confident that geniune solutions will have converged their sum of torques-squared to within 10^-10 of zero
candidate_sols = sorted_sols
n=1
cb = graphics.collage_builder()
for sol in candidate_sols:
tau = torque_function(sol)
if tau < torque_cutoff:
print('solution no.:'+str(n)+':'+str(sol))
n = n + 1
rotations = [np.array([np.cos(theta), np.sin(theta)]) for theta in sol]
cb.add_solution(positions,rotations, potential_function(sol), tau, determine_nature(sol))
cb.create_collage(filename)
| from scipy import optimize
import scipy
import symbolic_manipulations
import generate_equation
import numpy as np
import random
def find_solutions(positions, filename='Collage.png', iters=100):
# generate important functions
torque_function, potential_function, hessian_function = symbolic_manipulations.derive_functions(generate_equation.get_equations(positions), len(positions))
'''
Determine nature of a solution using the 2nd derivative (Hessian Matrix)
'''
def determine_nature(x):
hessian = hessian_function(x)
eigen_values = np.linalg.eig(hessian)[0]
positive = np.sum([eigen_values > 0])
negative = np.sum([eigen_values < 0])
print(eigen_values)
print(positive, negative)
if (positive) > 0 and (negative) == 0:
return 'local minimum'
if (positive) ==0 and (negative) > 0:
return 'local maximum'
if (positive) > 0 and (negative) > 0:
return 'saddle point'
return 'indeterminate' # highly unlikely to happen
# test near equality of two floats
def has_seen(seen_u, u):
for x in seen_u:
if abs(x-u) < 0.00001:
return True
return False
# seen_u: array of the potential energies of the solutions that have been found
# seen: array of the corresponding rotations for each solution
seen_u = []
seen = []
for i in range(1, iters):
if not i % 1000:
print(' '+str(i))
output = scipy.optimize.minimize(torque_function, [random.uniform(0, 2*np.pi) for i in range(len(positions))])
sol = output['x']
#print(sol)
u = potential_function(sol)
print(u)
if not has_seen(seen_u, u): # don't double count identical or degenerate solutions
tau = torque_function(sol)
seen_u.append(u)
seen.append(sol)
print('candidate solution no.'+str(len(seen))+' found on iter.'+str(int(i)))
print(' u='+str(float(u)) + '; tau^2=' + str(float(tau)))
print(' ' + str(sol))
sorted_u = sorted(seen_u)
indeces = [seen_u.index(x) for x in sorted_u]
sorted_sols = [seen[i] for i in indeces]
'''
now we draw a collage of the solutions we've found:
'''
import graphics
torque_cutoff = 0.0000000001
# ^^ we are quite confident that geniune solutions will have converged their sum of torques-squared to within 10^-10 of zero
candidate_sols = sorted_sols
n=1
cb = graphics.collage_builder()
for sol in candidate_sols:
tau = torque_function(sol)
if tau < torque_cutoff:
print('solution no.:'+str(n)+':'+str(sol))
n = n + 1
rotations = [np.array([np.cos(theta), np.sin(theta)]) for theta in sol]
cb.add_solution(positions,rotations, potential_function(sol), tau, determine_nature(sol))
cb.create_collage(filename) | en | 0.916107 | # generate important functions Determine nature of a solution using the 2nd derivative (Hessian Matrix) # highly unlikely to happen # test near equality of two floats # seen_u: array of the potential energies of the solutions that have been found # seen: array of the corresponding rotations for each solution #print(sol) # don't double count identical or degenerate solutions now we draw a collage of the solutions we've found: # ^^ we are quite confident that geniune solutions will have converged their sum of torques-squared to within 10^-10 of zero | 2.700298 | 3 |
tests/test_mixins.py | ejulio/web-poet | 0 | 6631901 | <reponame>ejulio/web-poet
import pytest
from web_poet.mixins import ResponseShortcutsMixin
from web_poet.page_inputs import ResponseData
class MyClass(ResponseShortcutsMixin):
def __init__(self, response: ResponseData):
self.response = response
@pytest.fixture
def my_instance(book_list_html_response):
return MyClass(book_list_html_response)
def test_url(my_instance):
assert my_instance.url == 'http://book.toscrape.com/'
def test_html(my_instance, book_list_html):
assert my_instance.html == book_list_html
def test_xpath(my_instance):
title = my_instance.xpath('.//title/text()').get().strip()
assert title == 'All products | Books to Scrape - Sandbox'
def test_css(my_instance):
title = my_instance.css('title::text').get().strip()
assert title == 'All products | Books to Scrape - Sandbox'
| import pytest
from web_poet.mixins import ResponseShortcutsMixin
from web_poet.page_inputs import ResponseData
class MyClass(ResponseShortcutsMixin):
def __init__(self, response: ResponseData):
self.response = response
@pytest.fixture
def my_instance(book_list_html_response):
return MyClass(book_list_html_response)
def test_url(my_instance):
assert my_instance.url == 'http://book.toscrape.com/'
def test_html(my_instance, book_list_html):
assert my_instance.html == book_list_html
def test_xpath(my_instance):
title = my_instance.xpath('.//title/text()').get().strip()
assert title == 'All products | Books to Scrape - Sandbox'
def test_css(my_instance):
title = my_instance.css('title::text').get().strip()
assert title == 'All products | Books to Scrape - Sandbox' | none | 1 | 2.492387 | 2 |
|
tests/test_model/test_layer/test_squeeze_and_excitation_block.py | ZJCV/PyCls | 110 | 6631902 | <reponame>ZJCV/PyCls
# -*- coding: utf-8 -*-
"""
@date: 2020/12/14 下午7:14
@file: test_squeeze_and_excitation_block.py
@author: zj
@description:
"""
import torch
from zcls.model.layers.squeeze_and_excitation_block import SqueezeAndExcitationBlock1D, \
SqueezeAndExcitationBlock2D, SqueezeAndExcitationBlock3D
def test_squeeze_and_excitation_block_1d():
N = 10
C = 128
reduction = 16
data = torch.randn(N, C, 7)
model = SqueezeAndExcitationBlock1D(in_channels=C, reduction=reduction)
print(model)
outputs = model(data)
print(outputs.shape)
assert outputs.shape == (N, C, 7)
def test_squeeze_and_excitation_block_2d():
N = 10
C = 128
reduction = 16
data = torch.randn(N, C, 7, 7)
model = SqueezeAndExcitationBlock2D(in_channels=C, reduction=reduction)
print(model)
outputs = model(data)
print(outputs.shape)
assert outputs.shape == (N, C, 7, 7)
def test_squeeze_and_excitation_block_3d():
N = 10
C = 128
reduction = 16
data = torch.randn(N, C, 4, 7, 7)
model = SqueezeAndExcitationBlock3D(in_channels=C, reduction=reduction)
print(model)
outputs = model(data)
print(outputs.shape)
assert outputs.shape == (N, C, 4, 7, 7)
if __name__ == '__main__':
test_squeeze_and_excitation_block_1d()
test_squeeze_and_excitation_block_2d()
test_squeeze_and_excitation_block_3d()
| # -*- coding: utf-8 -*-
"""
@date: 2020/12/14 下午7:14
@file: test_squeeze_and_excitation_block.py
@author: zj
@description:
"""
import torch
from zcls.model.layers.squeeze_and_excitation_block import SqueezeAndExcitationBlock1D, \
SqueezeAndExcitationBlock2D, SqueezeAndExcitationBlock3D
def test_squeeze_and_excitation_block_1d():
N = 10
C = 128
reduction = 16
data = torch.randn(N, C, 7)
model = SqueezeAndExcitationBlock1D(in_channels=C, reduction=reduction)
print(model)
outputs = model(data)
print(outputs.shape)
assert outputs.shape == (N, C, 7)
def test_squeeze_and_excitation_block_2d():
N = 10
C = 128
reduction = 16
data = torch.randn(N, C, 7, 7)
model = SqueezeAndExcitationBlock2D(in_channels=C, reduction=reduction)
print(model)
outputs = model(data)
print(outputs.shape)
assert outputs.shape == (N, C, 7, 7)
def test_squeeze_and_excitation_block_3d():
N = 10
C = 128
reduction = 16
data = torch.randn(N, C, 4, 7, 7)
model = SqueezeAndExcitationBlock3D(in_channels=C, reduction=reduction)
print(model)
outputs = model(data)
print(outputs.shape)
assert outputs.shape == (N, C, 4, 7, 7)
if __name__ == '__main__':
test_squeeze_and_excitation_block_1d()
test_squeeze_and_excitation_block_2d()
test_squeeze_and_excitation_block_3d() | en | 0.530011 | # -*- coding: utf-8 -*- @date: 2020/12/14 下午7:14 @file: test_squeeze_and_excitation_block.py @author: zj @description: | 2.795754 | 3 |
susemanager/system.py | DreadlordGG/python-susemanager | 0 | 6631903 | <gh_stars>0
import xmlrpclib
class list:
def __init__(self, client, session):
self.session = session
self.client = client
def ActivationKeys(self, pid):
return self.client.system.listActivationKeys(self.session, sid)
@property
def ActiveSystems(self):
return self.client.system.listActiveSystems(self.session)
def ActiveSystemsDetails(self, *args):
return self.client.system.listActiveSystemsDetails(self.session, args)
@property
def Systems(self):
return self.client.system.listSystems(self.session)
def SystemsWithPackage(self, pid):
return self.client.system.listSystemsWithPackage(self.session, pid)
| import xmlrpclib
class list:
def __init__(self, client, session):
self.session = session
self.client = client
def ActivationKeys(self, pid):
return self.client.system.listActivationKeys(self.session, sid)
@property
def ActiveSystems(self):
return self.client.system.listActiveSystems(self.session)
def ActiveSystemsDetails(self, *args):
return self.client.system.listActiveSystemsDetails(self.session, args)
@property
def Systems(self):
return self.client.system.listSystems(self.session)
def SystemsWithPackage(self, pid):
return self.client.system.listSystemsWithPackage(self.session, pid) | none | 1 | 2.277734 | 2 |
|
test/id3v1.py | Taiko2k/stagger | 21 | 6631904 | #!/usr/bin/env python3
#
# id3v1.py
# From the stagger project: http://code.google.com/p/stagger/
#
# Copyright (c) 2009-2011 <NAME> <<EMAIL>>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# - Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# - Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
######################################################################
# This test automatically downloads the ID3v1 test suite by <NAME>,
# and runs stagger's id3v1 decoder on all 274 test cases, comparing
# decoded field values to expected values listed in the test suite's
# generation.log file.
#
# Nilsson's tests are rather strict -- stagger intentionally accepts broken
# id3v1 tags, so it only complains on test case 3 (bad tag header).
#
# Test cases 7 and 8 (junk after string terminator) include NUL characters
# in field values in the log file, which is likely a mistake. Their
# description prescribes that the NULs and the data after them should
# not show up for the user, so I override the test case's field values to check that.
#
# Test case 12 has leading spaces in the year field which are intentionally
# stripped by stagger.
#
# In two test cases, Nilsson uses genre names that differ from most other
# sources/implementations:
#
# Test case Genre # Genre in test Genre elsewhere
# 151 136 Christian <NAME>
# 155 140 Contemporary Contemporary Christian
#
# Stagger follows the de facto ID3v1 standard and resolves 136 and 140 to
# the insane genres on the right.
import unittest
import os
import os.path
import re
import string
import urllib.request
import tarfile
import random
import io
import warnings
from stagger.errors import *
import stagger.id3v1
testsuite_url = r"http://id3.org/Developer%20Information?action=AttachFile&do=get&target=id3v1_test_suite.tar.gz"
testsuite_file = os.path.join(os.path.dirname(__file__), "id3v1_test_suite.tar.gz")
testsuite_log = "id3v1/generation.log"
def download_testsuite():
try:
with open(testsuite_file, "rb") as file:
pass
except IOError:
urllib.request.urlretrieve(testsuite_url, testsuite_file)
class ID3v1TestCase(unittest.TestCase):
def parse_log(self):
log = self.tar.extractfile(testsuite_log)
try:
tests = []
tag = {}
for bline in log:
line = bline.decode('iso-8859-1')
m = re.match(r'^Test case ([0-9]+)$', line)
if m is not None:
tag["id"] = int(m.group(1))
continue
m = re.match(r'^Generated test file "([a-zA-Z0-9_.]+)"$', line)
if m is not None:
tag["filename"] = m.group(1)
continue
m = re.match(r'^([a-z]+) *: "([^"]*)"$', line)
if m is not None:
tag[m.group(1)] = m.group(2)
continue
m = re.match(r'^version: (1\.[01])$', line)
if m is not None:
tag["version"] = m.group(1)
continue
m = re.match(r'^genre : ([0-9]+ \(.*\))$', line)
if m is not None:
tag["genre"] = m.group(1)
continue
m = re.match(r'^$', line)
if m is not None and tag:
tests.append(tag)
tag = {}
return tests
finally:
log.close()
def setUp(self):
download_testsuite()
self.tar = tarfile.open(testsuite_file)
def tearDown(self):
self.tar.close()
def testID3v1Conformance(self):
for test in self.parse_log():
# Fix expected values in test cases 7-8 (junk after string terminator).
if test["id"] in [7, 8]:
for field in ["title", "artist", "album", "comment"]:
test[field] = "12345"
# Fix expected value in test case 12 (strip year field).
if test["id"] == 12:
test["year"] = test["year"].strip(string.whitespace)
# Fix expected genre names in test cases 151 and 155 to de-facto standard values.
if test["id"] == 151:
test["genre"] = '136 (Christian Gangsta Rap)'
if test["id"] == 155:
test["genre"] = '140 (Contemporary Christian)'
filename = 'id3v1/' + test["filename"]
file = self.tar.extractfile(filename)
try:
# Test case 3 contains no valid ID3v1 tag.
if test["id"] == 3:
self.assertRaises(NoTagError, stagger.id3v1.Tag1.read, file)
continue
tag = stagger.id3v1.Tag1.read(file)
for field in ["title", "artist", "album",
"year", "comment", "track", "genre"]:
if field in test:
self.assertEqual(test[field], getattr(tag, field),
"Value mismatch in field " + field
+ " of testcase " + str(test["id"])
+ ": '" + test[field] + "' vs '"
+ getattr(tag, field) + "'")
# Try encoding the tag and comparing binary data
if test["id"] not in [7, 8, 12]:
data = tag.encode()
file.seek(-128, 2)
data2 = file.read(128)
self.assertEqual(data, data2, "Data mismatch in testcase " + str(test["id"]))
finally:
file.close()
suite = unittest.TestLoader().loadTestsFromTestCase(ID3v1TestCase)
if __name__ == "__main__":
warnings.simplefilter("always", stagger.Warning)
unittest.main(defaultTest="suite")
| #!/usr/bin/env python3
#
# id3v1.py
# From the stagger project: http://code.google.com/p/stagger/
#
# Copyright (c) 2009-2011 <NAME> <<EMAIL>>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# - Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# - Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
# FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
# COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
# BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
# LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
# ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
######################################################################
# This test automatically downloads the ID3v1 test suite by <NAME>,
# and runs stagger's id3v1 decoder on all 274 test cases, comparing
# decoded field values to expected values listed in the test suite's
# generation.log file.
#
# Nilsson's tests are rather strict -- stagger intentionally accepts broken
# id3v1 tags, so it only complains on test case 3 (bad tag header).
#
# Test cases 7 and 8 (junk after string terminator) include NUL characters
# in field values in the log file, which is likely a mistake. Their
# description prescribes that the NULs and the data after them should
# not show up for the user, so I override the test case's field values to check that.
#
# Test case 12 has leading spaces in the year field which are intentionally
# stripped by stagger.
#
# In two test cases, Nilsson uses genre names that differ from most other
# sources/implementations:
#
# Test case Genre # Genre in test Genre elsewhere
# 151 136 Christian <NAME>
# 155 140 Contemporary Contemporary Christian
#
# Stagger follows the de facto ID3v1 standard and resolves 136 and 140 to
# the insane genres on the right.
import unittest
import os
import os.path
import re
import string
import urllib.request
import tarfile
import random
import io
import warnings
from stagger.errors import *
import stagger.id3v1
testsuite_url = r"http://id3.org/Developer%20Information?action=AttachFile&do=get&target=id3v1_test_suite.tar.gz"
testsuite_file = os.path.join(os.path.dirname(__file__), "id3v1_test_suite.tar.gz")
testsuite_log = "id3v1/generation.log"
def download_testsuite():
try:
with open(testsuite_file, "rb") as file:
pass
except IOError:
urllib.request.urlretrieve(testsuite_url, testsuite_file)
class ID3v1TestCase(unittest.TestCase):
def parse_log(self):
log = self.tar.extractfile(testsuite_log)
try:
tests = []
tag = {}
for bline in log:
line = bline.decode('iso-8859-1')
m = re.match(r'^Test case ([0-9]+)$', line)
if m is not None:
tag["id"] = int(m.group(1))
continue
m = re.match(r'^Generated test file "([a-zA-Z0-9_.]+)"$', line)
if m is not None:
tag["filename"] = m.group(1)
continue
m = re.match(r'^([a-z]+) *: "([^"]*)"$', line)
if m is not None:
tag[m.group(1)] = m.group(2)
continue
m = re.match(r'^version: (1\.[01])$', line)
if m is not None:
tag["version"] = m.group(1)
continue
m = re.match(r'^genre : ([0-9]+ \(.*\))$', line)
if m is not None:
tag["genre"] = m.group(1)
continue
m = re.match(r'^$', line)
if m is not None and tag:
tests.append(tag)
tag = {}
return tests
finally:
log.close()
def setUp(self):
download_testsuite()
self.tar = tarfile.open(testsuite_file)
def tearDown(self):
self.tar.close()
def testID3v1Conformance(self):
for test in self.parse_log():
# Fix expected values in test cases 7-8 (junk after string terminator).
if test["id"] in [7, 8]:
for field in ["title", "artist", "album", "comment"]:
test[field] = "12345"
# Fix expected value in test case 12 (strip year field).
if test["id"] == 12:
test["year"] = test["year"].strip(string.whitespace)
# Fix expected genre names in test cases 151 and 155 to de-facto standard values.
if test["id"] == 151:
test["genre"] = '136 (Christian Gangsta Rap)'
if test["id"] == 155:
test["genre"] = '140 (Contemporary Christian)'
filename = 'id3v1/' + test["filename"]
file = self.tar.extractfile(filename)
try:
# Test case 3 contains no valid ID3v1 tag.
if test["id"] == 3:
self.assertRaises(NoTagError, stagger.id3v1.Tag1.read, file)
continue
tag = stagger.id3v1.Tag1.read(file)
for field in ["title", "artist", "album",
"year", "comment", "track", "genre"]:
if field in test:
self.assertEqual(test[field], getattr(tag, field),
"Value mismatch in field " + field
+ " of testcase " + str(test["id"])
+ ": '" + test[field] + "' vs '"
+ getattr(tag, field) + "'")
# Try encoding the tag and comparing binary data
if test["id"] not in [7, 8, 12]:
data = tag.encode()
file.seek(-128, 2)
data2 = file.read(128)
self.assertEqual(data, data2, "Data mismatch in testcase " + str(test["id"]))
finally:
file.close()
suite = unittest.TestLoader().loadTestsFromTestCase(ID3v1TestCase)
if __name__ == "__main__":
warnings.simplefilter("always", stagger.Warning)
unittest.main(defaultTest="suite")
| en | 0.711061 | #!/usr/bin/env python3 # # id3v1.py # From the stagger project: http://code.google.com/p/stagger/ # # Copyright (c) 2009-2011 <NAME> <<EMAIL>> # All rights reserved. # # Redistribution and use in source and binary forms, with or without # modification, are permitted provided that the following conditions # are met: # # - Redistributions of source code must retain the above copyright # notice, this list of conditions and the following disclaimer. # # - Redistributions in binary form must reproduce the above copyright # notice, this list of conditions and the following disclaimer in # the documentation and/or other materials provided with the # distribution. # # THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS # "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT # LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS # FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE # COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, # INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, # BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; # LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER # CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT # LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN # ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE # POSSIBILITY OF SUCH DAMAGE. ###################################################################### # This test automatically downloads the ID3v1 test suite by <NAME>, # and runs stagger's id3v1 decoder on all 274 test cases, comparing # decoded field values to expected values listed in the test suite's # generation.log file. # # Nilsson's tests are rather strict -- stagger intentionally accepts broken # id3v1 tags, so it only complains on test case 3 (bad tag header). # # Test cases 7 and 8 (junk after string terminator) include NUL characters # in field values in the log file, which is likely a mistake. Their # description prescribes that the NULs and the data after them should # not show up for the user, so I override the test case's field values to check that. # # Test case 12 has leading spaces in the year field which are intentionally # stripped by stagger. # # In two test cases, Nilsson uses genre names that differ from most other # sources/implementations: # # Test case Genre # Genre in test Genre elsewhere # 151 136 Christian <NAME> # 155 140 Contemporary Contemporary Christian # # Stagger follows the de facto ID3v1 standard and resolves 136 and 140 to # the insane genres on the right. # Fix expected values in test cases 7-8 (junk after string terminator). # Fix expected value in test case 12 (strip year field). # Fix expected genre names in test cases 151 and 155 to de-facto standard values. # Test case 3 contains no valid ID3v1 tag. # Try encoding the tag and comparing binary data | 1.417844 | 1 |
advanced_logger/json_encoder/django_json_encoder_copy.py | EveryMundo/python-advanced-logger | 0 | 6631905 | <reponame>EveryMundo/python-advanced-logger
"""
Copyright (c) Django Software Foundation and individual contributors.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of Django nor the names of its contributors may be used
to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
https://github.com/django/django/blob/master/LICENSE
https://github.com/django/django/blob/1dae4ac1778f04805c0ed62c8debb13b281ba02b/LICENSE
=========
This file is not an endorsement of this package by the Django Foundation or any of its contributors.
All code in the following file was originally written by the Django Software Foundation and individual contributors
as part of the django framework and has been copied into this repository for use
in the case django is not installed in the user's environment
It may potentially become out of date, and it is recommend that `django` be installed as a requirement for this library
Last updated: 2020-09-22
"""
import decimal
import json
import uuid
import datetime
def is_aware(value):
"""
Determine if a given datetime.datetime is aware.
The concept is defined in Python's docs:
https://docs.python.org/library/datetime.html#datetime.tzinfo
Assuming value.tzinfo is either None or a proper datetime.tzinfo,
value.utcoffset() implements the appropriate logic.
"""
return value.utcoffset() is not None
def _get_duration_components(duration):
days = duration.days
seconds = duration.seconds
microseconds = duration.microseconds
minutes = seconds // 60
seconds = seconds % 60
hours = minutes // 60
minutes = minutes % 60
return days, hours, minutes, seconds, microseconds
def duration_iso_string(duration):
if duration < datetime.timedelta(0):
sign = '-'
duration *= -1
else:
sign = ''
days, hours, minutes, seconds, microseconds = _get_duration_components(duration)
ms = '.{:06d}'.format(microseconds) if microseconds else ""
return '{}P{}DT{:02d}H{:02d}M{:02d}{}S'.format(sign, days, hours, minutes, seconds, ms)
class DjangoJSONEncoder(json.JSONEncoder):
"""
JSONEncoder subclass that knows how to encode date/time, decimal types, and
UUIDs.
"""
def default(self, o):
# See "Date Time String Format" in the ECMA-262 specification.
if isinstance(o, datetime.datetime):
r = o.isoformat()
if o.microsecond:
r = r[:23] + r[26:]
if r.endswith('+00:00'):
r = r[:-6] + 'Z'
return r
elif isinstance(o, datetime.date):
return o.isoformat()
elif isinstance(o, datetime.time):
if is_aware(o):
raise ValueError("JSON can't represent timezone-aware times.")
r = o.isoformat()
if o.microsecond:
r = r[:12]
return r
elif isinstance(o, datetime.timedelta):
return duration_iso_string(o)
elif isinstance(o, (decimal.Decimal, uuid.UUID)):
return str(o)
else:
return super().default(o) | """
Copyright (c) Django Software Foundation and individual contributors.
All rights reserved.
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of Django nor the names of its contributors may be used
to endorse or promote products derived from this software without
specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON
ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
https://github.com/django/django/blob/master/LICENSE
https://github.com/django/django/blob/1dae4ac1778f04805c0ed62c8debb13b281ba02b/LICENSE
=========
This file is not an endorsement of this package by the Django Foundation or any of its contributors.
All code in the following file was originally written by the Django Software Foundation and individual contributors
as part of the django framework and has been copied into this repository for use
in the case django is not installed in the user's environment
It may potentially become out of date, and it is recommend that `django` be installed as a requirement for this library
Last updated: 2020-09-22
"""
import decimal
import json
import uuid
import datetime
def is_aware(value):
"""
Determine if a given datetime.datetime is aware.
The concept is defined in Python's docs:
https://docs.python.org/library/datetime.html#datetime.tzinfo
Assuming value.tzinfo is either None or a proper datetime.tzinfo,
value.utcoffset() implements the appropriate logic.
"""
return value.utcoffset() is not None
def _get_duration_components(duration):
days = duration.days
seconds = duration.seconds
microseconds = duration.microseconds
minutes = seconds // 60
seconds = seconds % 60
hours = minutes // 60
minutes = minutes % 60
return days, hours, minutes, seconds, microseconds
def duration_iso_string(duration):
if duration < datetime.timedelta(0):
sign = '-'
duration *= -1
else:
sign = ''
days, hours, minutes, seconds, microseconds = _get_duration_components(duration)
ms = '.{:06d}'.format(microseconds) if microseconds else ""
return '{}P{}DT{:02d}H{:02d}M{:02d}{}S'.format(sign, days, hours, minutes, seconds, ms)
class DjangoJSONEncoder(json.JSONEncoder):
"""
JSONEncoder subclass that knows how to encode date/time, decimal types, and
UUIDs.
"""
def default(self, o):
# See "Date Time String Format" in the ECMA-262 specification.
if isinstance(o, datetime.datetime):
r = o.isoformat()
if o.microsecond:
r = r[:23] + r[26:]
if r.endswith('+00:00'):
r = r[:-6] + 'Z'
return r
elif isinstance(o, datetime.date):
return o.isoformat()
elif isinstance(o, datetime.time):
if is_aware(o):
raise ValueError("JSON can't represent timezone-aware times.")
r = o.isoformat()
if o.microsecond:
r = r[:12]
return r
elif isinstance(o, datetime.timedelta):
return duration_iso_string(o)
elif isinstance(o, (decimal.Decimal, uuid.UUID)):
return str(o)
else:
return super().default(o) | en | 0.7851 | Copyright (c) Django Software Foundation and individual contributors. All rights reserved. Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: 1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. 2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. 3. Neither the name of Django nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. https://github.com/django/django/blob/master/LICENSE https://github.com/django/django/blob/1dae4ac1778f04805c0ed62c8debb13b281ba02b/LICENSE ========= This file is not an endorsement of this package by the Django Foundation or any of its contributors. All code in the following file was originally written by the Django Software Foundation and individual contributors as part of the django framework and has been copied into this repository for use in the case django is not installed in the user's environment It may potentially become out of date, and it is recommend that `django` be installed as a requirement for this library Last updated: 2020-09-22 Determine if a given datetime.datetime is aware. The concept is defined in Python's docs: https://docs.python.org/library/datetime.html#datetime.tzinfo Assuming value.tzinfo is either None or a proper datetime.tzinfo, value.utcoffset() implements the appropriate logic. JSONEncoder subclass that knows how to encode date/time, decimal types, and UUIDs. # See "Date Time String Format" in the ECMA-262 specification. | 1.161053 | 1 |
tests/test_dannet.py | eaksnes/dasem | 18 | 6631906 | """Test dasem.dannet."""
import pytest
from dasem.dannet import Dannet
@pytest.fixture
def dannet():
return Dannet()
def test_download(dannet):
dannet.download()
def test_glossary(dannet):
assert len(dannet.glossary('virksomhed')) == 3
| """Test dasem.dannet."""
import pytest
from dasem.dannet import Dannet
@pytest.fixture
def dannet():
return Dannet()
def test_download(dannet):
dannet.download()
def test_glossary(dannet):
assert len(dannet.glossary('virksomhed')) == 3
| de | 0.463148 | Test dasem.dannet. | 2.167121 | 2 |
tests/utils/synthetic_utils.py | hanlint/composer | 0 | 6631907 | from typing import Any, Dict, Optional, Type
import pytest
from composer.datasets import GLUEHparams, LMDatasetHparams
from composer.datasets.hparams import DatasetHparams, SyntheticHparamsMixin
from composer.datasets.synthetic_lm import generate_synthetic_tokenizer
from composer.models import (BERTForClassificationHparams, BERTHparams, DeepLabV3Hparams, GPT2Hparams, ModelHparams,
TransformerHparams)
def configure_dataset_for_synthetic(dataset_hparams: DatasetHparams,
model_hparams: Optional[ModelHparams] = None) -> None:
if not isinstance(dataset_hparams, SyntheticHparamsMixin):
pytest.xfail(f"{dataset_hparams.__class__.__name__} does not support synthetic data or num_total_batches")
assert isinstance(dataset_hparams, SyntheticHparamsMixin)
dataset_hparams.use_synthetic = True
if isinstance(model_hparams, TransformerHparams):
if type(model_hparams) not in _model_hparams_to_tokenizer_family:
raise ValueError(f"Model {type(model_hparams)} is currently not supported for synthetic testing!")
tokenizer_family = _model_hparams_to_tokenizer_family[type(model_hparams)]
assert isinstance(dataset_hparams, (GLUEHparams, LMDatasetHparams))
dataset_hparams.tokenizer_name = tokenizer_family
dataset_hparams.max_seq_length = 128
_model_hparams_to_tokenizer_family: Dict[Type[TransformerHparams], str] = {
GPT2Hparams: "gpt2",
BERTForClassificationHparams: "bert",
BERTHparams: "bert"
}
def configure_model_for_synthetic(model_hparams: ModelHparams) -> None:
# configure Transformer-based models for synthetic testing
if isinstance(model_hparams, TransformerHparams):
if type(model_hparams) not in _model_hparams_to_tokenizer_family:
raise ValueError(f"Model {type(model_hparams)} is currently not supported for synthetic testing!")
tokenizer_family = _model_hparams_to_tokenizer_family[type(model_hparams)]
# force a non-pretrained model
model_hparams.use_pretrained = False
model_hparams.pretrained_model_name = None
# generate tokenizers and synthetic models
tokenizer = generate_synthetic_tokenizer(tokenizer_family=tokenizer_family)
model_hparams.tokenizer_name = None
model_hparams.model_config = generate_dummy_model_config(type(model_hparams), tokenizer)
# configure DeepLabV3 models for synthetic testing
if isinstance(model_hparams, DeepLabV3Hparams):
model_hparams.is_backbone_pretrained = False # prevent downloading pretrained weights during test
model_hparams.sync_bn = False # sync_bn throws an error when run on CPU
def generate_dummy_model_config(class_name, tokenizer) -> Dict[str, Any]:
model_to_dummy_mapping = {
BERTHparams: {
"architectures": ["BertForMaskedLM"],
"attention_probs_dropout_prob": 0.1,
"gradient_checkpointing": False,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 64,
"initializer_range": 0.02,
"intermediate_size": 256,
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "bert",
"num_attention_heads": 1,
"num_hidden_layers": 1,
"pad_token_id": tokenizer.pad_token_id,
"position_embedding_type": "absolute",
"transformers_version": "4.6.0.dev0",
"type_vocab_size": 2,
"use_cache": True,
"vocab_size": tokenizer.vocab_size,
},
GPT2Hparams: {
"activation_function": "gelu_new",
"architectures": ["GPT2LMHeadModel"],
"attn_pdrop": 0.1,
"bos_token_id": tokenizer.cls_token_id,
"embd_pdrop": 0.1,
"eos_token_id": tokenizer.cls_token_id,
"initializer_range": 0.02,
"layer_norm_epsilon": 0.00001,
"model_type": "gpt2",
"n_ctx": 128,
"n_embd": 64,
"n_head": 1,
"n_layer": 1,
"n_positions": 128,
"resid_pdrop": 0.1,
"summary_activation": None,
"summary_first_dropout": 0.1,
"summary_proj_to_labels": True,
"summary_type": "cls_index",
"summary_use_proj": True,
"task_specific_params": {
"text-generation": {
"do_sample": True,
"max_length": 50
}
},
"vocab_size": tokenizer.vocab_size
},
BERTForClassificationHparams: {
"architectures": ["BertForSequenceClassification"],
"attention_probs_dropout_prob": 0.1,
"classifier_dropout": None,
"gradient_checkpointing": False,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 64,
"initializer_range": 0.02,
"intermediate_size": 256,
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "bert",
"num_attention_heads": 1,
"num_hidden_layers": 1,
"pad_token_id": tokenizer.pad_token_id,
"position_embedding_type": "absolute",
"transformers_version": "4.16.2",
"type_vocab_size": 2,
"use_cache": True,
"vocab_size": tokenizer.vocab_size
}
}
return model_to_dummy_mapping[class_name]
| from typing import Any, Dict, Optional, Type
import pytest
from composer.datasets import GLUEHparams, LMDatasetHparams
from composer.datasets.hparams import DatasetHparams, SyntheticHparamsMixin
from composer.datasets.synthetic_lm import generate_synthetic_tokenizer
from composer.models import (BERTForClassificationHparams, BERTHparams, DeepLabV3Hparams, GPT2Hparams, ModelHparams,
TransformerHparams)
def configure_dataset_for_synthetic(dataset_hparams: DatasetHparams,
model_hparams: Optional[ModelHparams] = None) -> None:
if not isinstance(dataset_hparams, SyntheticHparamsMixin):
pytest.xfail(f"{dataset_hparams.__class__.__name__} does not support synthetic data or num_total_batches")
assert isinstance(dataset_hparams, SyntheticHparamsMixin)
dataset_hparams.use_synthetic = True
if isinstance(model_hparams, TransformerHparams):
if type(model_hparams) not in _model_hparams_to_tokenizer_family:
raise ValueError(f"Model {type(model_hparams)} is currently not supported for synthetic testing!")
tokenizer_family = _model_hparams_to_tokenizer_family[type(model_hparams)]
assert isinstance(dataset_hparams, (GLUEHparams, LMDatasetHparams))
dataset_hparams.tokenizer_name = tokenizer_family
dataset_hparams.max_seq_length = 128
_model_hparams_to_tokenizer_family: Dict[Type[TransformerHparams], str] = {
GPT2Hparams: "gpt2",
BERTForClassificationHparams: "bert",
BERTHparams: "bert"
}
def configure_model_for_synthetic(model_hparams: ModelHparams) -> None:
# configure Transformer-based models for synthetic testing
if isinstance(model_hparams, TransformerHparams):
if type(model_hparams) not in _model_hparams_to_tokenizer_family:
raise ValueError(f"Model {type(model_hparams)} is currently not supported for synthetic testing!")
tokenizer_family = _model_hparams_to_tokenizer_family[type(model_hparams)]
# force a non-pretrained model
model_hparams.use_pretrained = False
model_hparams.pretrained_model_name = None
# generate tokenizers and synthetic models
tokenizer = generate_synthetic_tokenizer(tokenizer_family=tokenizer_family)
model_hparams.tokenizer_name = None
model_hparams.model_config = generate_dummy_model_config(type(model_hparams), tokenizer)
# configure DeepLabV3 models for synthetic testing
if isinstance(model_hparams, DeepLabV3Hparams):
model_hparams.is_backbone_pretrained = False # prevent downloading pretrained weights during test
model_hparams.sync_bn = False # sync_bn throws an error when run on CPU
def generate_dummy_model_config(class_name, tokenizer) -> Dict[str, Any]:
model_to_dummy_mapping = {
BERTHparams: {
"architectures": ["BertForMaskedLM"],
"attention_probs_dropout_prob": 0.1,
"gradient_checkpointing": False,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 64,
"initializer_range": 0.02,
"intermediate_size": 256,
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "bert",
"num_attention_heads": 1,
"num_hidden_layers": 1,
"pad_token_id": tokenizer.pad_token_id,
"position_embedding_type": "absolute",
"transformers_version": "4.6.0.dev0",
"type_vocab_size": 2,
"use_cache": True,
"vocab_size": tokenizer.vocab_size,
},
GPT2Hparams: {
"activation_function": "gelu_new",
"architectures": ["GPT2LMHeadModel"],
"attn_pdrop": 0.1,
"bos_token_id": tokenizer.cls_token_id,
"embd_pdrop": 0.1,
"eos_token_id": tokenizer.cls_token_id,
"initializer_range": 0.02,
"layer_norm_epsilon": 0.00001,
"model_type": "gpt2",
"n_ctx": 128,
"n_embd": 64,
"n_head": 1,
"n_layer": 1,
"n_positions": 128,
"resid_pdrop": 0.1,
"summary_activation": None,
"summary_first_dropout": 0.1,
"summary_proj_to_labels": True,
"summary_type": "cls_index",
"summary_use_proj": True,
"task_specific_params": {
"text-generation": {
"do_sample": True,
"max_length": 50
}
},
"vocab_size": tokenizer.vocab_size
},
BERTForClassificationHparams: {
"architectures": ["BertForSequenceClassification"],
"attention_probs_dropout_prob": 0.1,
"classifier_dropout": None,
"gradient_checkpointing": False,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 64,
"initializer_range": 0.02,
"intermediate_size": 256,
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "bert",
"num_attention_heads": 1,
"num_hidden_layers": 1,
"pad_token_id": tokenizer.pad_token_id,
"position_embedding_type": "absolute",
"transformers_version": "4.16.2",
"type_vocab_size": 2,
"use_cache": True,
"vocab_size": tokenizer.vocab_size
}
}
return model_to_dummy_mapping[class_name]
| en | 0.83342 | # configure Transformer-based models for synthetic testing # force a non-pretrained model # generate tokenizers and synthetic models # configure DeepLabV3 models for synthetic testing # prevent downloading pretrained weights during test # sync_bn throws an error when run on CPU | 2.229686 | 2 |
cycada/tools/util.py | Luodian/MADAN | 150 | 6631908 | from functools import partial
import torch
from torch.autograd import Variable
def make_variable(tensor, volatile=False, requires_grad=True):
if torch.cuda.is_available():
tensor = tensor.cuda()
if volatile:
requires_grad = False
return Variable(tensor, volatile=volatile, requires_grad=requires_grad)
def pairwise_distance(x, y):
if not len(x.shape) == len(y.shape):
raise ValueError('Both inputs should be matrices.')
if x.shape[1] != y.shape[1]:
raise ValueError('The number of features should be the same.')
x = x.view(x.shape[0], x.shape[1], 1)
y = torch.transpose(y, 0, 1)
output = torch.sum((x - y) ** 2, 1)
output = torch.transpose(output, 0, 1)
return output
def gaussian_kernel_matrix(x, y, sigmas):
sigmas = sigmas.view(sigmas.shape[0], 1)
beta = 1. / (2. * sigmas)
dist = pairwise_distance(x, y).contiguous()
dist_ = dist.view(1, -1)
s = torch.matmul(beta, dist_)
return torch.sum(torch.exp(-s), 0).view_as(dist)
def maximum_mean_discrepancy(x, y, kernel=gaussian_kernel_matrix):
cost = torch.mean(kernel(x, x))
cost += torch.mean(kernel(y, y))
cost -= 2 * torch.mean(kernel(x, y))
return cost
def mmd_loss(source_features, target_features):
sigmas = [
1e-6, 1e-5, 1e-4, 1e-3, 1e-2, 1e-1, 1, 5, 10, 15, 20, 25, 30, 35, 100,
1e3, 1e4, 1e5, 1e6
]
gaussian_kernel = partial(
gaussian_kernel_matrix, sigmas=Variable(torch.cuda.FloatTensor(sigmas))
)
loss_value = maximum_mean_discrepancy(source_features, target_features, kernel=gaussian_kernel)
loss_value = loss_value
return loss_value
| from functools import partial
import torch
from torch.autograd import Variable
def make_variable(tensor, volatile=False, requires_grad=True):
if torch.cuda.is_available():
tensor = tensor.cuda()
if volatile:
requires_grad = False
return Variable(tensor, volatile=volatile, requires_grad=requires_grad)
def pairwise_distance(x, y):
if not len(x.shape) == len(y.shape):
raise ValueError('Both inputs should be matrices.')
if x.shape[1] != y.shape[1]:
raise ValueError('The number of features should be the same.')
x = x.view(x.shape[0], x.shape[1], 1)
y = torch.transpose(y, 0, 1)
output = torch.sum((x - y) ** 2, 1)
output = torch.transpose(output, 0, 1)
return output
def gaussian_kernel_matrix(x, y, sigmas):
sigmas = sigmas.view(sigmas.shape[0], 1)
beta = 1. / (2. * sigmas)
dist = pairwise_distance(x, y).contiguous()
dist_ = dist.view(1, -1)
s = torch.matmul(beta, dist_)
return torch.sum(torch.exp(-s), 0).view_as(dist)
def maximum_mean_discrepancy(x, y, kernel=gaussian_kernel_matrix):
cost = torch.mean(kernel(x, x))
cost += torch.mean(kernel(y, y))
cost -= 2 * torch.mean(kernel(x, y))
return cost
def mmd_loss(source_features, target_features):
sigmas = [
1e-6, 1e-5, 1e-4, 1e-3, 1e-2, 1e-1, 1, 5, 10, 15, 20, 25, 30, 35, 100,
1e3, 1e4, 1e5, 1e6
]
gaussian_kernel = partial(
gaussian_kernel_matrix, sigmas=Variable(torch.cuda.FloatTensor(sigmas))
)
loss_value = maximum_mean_discrepancy(source_features, target_features, kernel=gaussian_kernel)
loss_value = loss_value
return loss_value
| none | 1 | 2.413425 | 2 |
|
ml_project/tests/models/test_train_model.py | made-ml-in-prod-2021/liliyamakhmutova- | 0 | 6631909 | import os
import pickle
from typing import List, Tuple
import pandas as pd
import pytest
from py._path.local import LocalPath
from sklearn.linear_model import LogisticRegression
from src.data.make_dataset import read_data
from src.enities import TrainingParams
from src.enities.feature_params import FeatureParams
from src.features.build_features import make_features, extract_target, build_transformer
from src.models.model_fit_predict import train_model, serialize_model
from ..features import categorical_features, features_to_drop, numerical_features, target_col, feature_params
from ..data import data
@pytest.fixture
def features_and_target(
data: pd.DataFrame, categorical_features: List[str], numerical_features: List[str], feature_params: FeatureParams
) -> Tuple[pd.DataFrame, pd.Series]:
feature_params.use_log_trick=False
target = extract_target(data, feature_params)
transformer = build_transformer(feature_params)
transformer.fit(data)
features = make_features(transformer, data)
return features, target
@pytest.fixture
def train_params() -> TrainingParams:
params = TrainingParams(
model_type="LogisticRegression",
random_state=123
)
return params
@pytest.fixture
def model() -> LogisticRegression:
return LogisticRegression()
def test_train_model(features_and_target: Tuple[pd.DataFrame, pd.Series], model: LogisticRegression):
features, target = features_and_target
model = train_model(features, target, model)
assert isinstance(model, LogisticRegression)
assert model.predict(features).shape[0] == target.shape[0]
def test_serialize_model(tmpdir: LocalPath, model: LogisticRegression):
expected_output = tmpdir.join("model.pkl")
model = LogisticRegression()
real_output = serialize_model(model, expected_output)
assert real_output == expected_output
assert os.path.exists
with open(real_output, "rb") as f:
model = pickle.load(f)
assert isinstance(model, LogisticRegression) | import os
import pickle
from typing import List, Tuple
import pandas as pd
import pytest
from py._path.local import LocalPath
from sklearn.linear_model import LogisticRegression
from src.data.make_dataset import read_data
from src.enities import TrainingParams
from src.enities.feature_params import FeatureParams
from src.features.build_features import make_features, extract_target, build_transformer
from src.models.model_fit_predict import train_model, serialize_model
from ..features import categorical_features, features_to_drop, numerical_features, target_col, feature_params
from ..data import data
@pytest.fixture
def features_and_target(
data: pd.DataFrame, categorical_features: List[str], numerical_features: List[str], feature_params: FeatureParams
) -> Tuple[pd.DataFrame, pd.Series]:
feature_params.use_log_trick=False
target = extract_target(data, feature_params)
transformer = build_transformer(feature_params)
transformer.fit(data)
features = make_features(transformer, data)
return features, target
@pytest.fixture
def train_params() -> TrainingParams:
params = TrainingParams(
model_type="LogisticRegression",
random_state=123
)
return params
@pytest.fixture
def model() -> LogisticRegression:
return LogisticRegression()
def test_train_model(features_and_target: Tuple[pd.DataFrame, pd.Series], model: LogisticRegression):
features, target = features_and_target
model = train_model(features, target, model)
assert isinstance(model, LogisticRegression)
assert model.predict(features).shape[0] == target.shape[0]
def test_serialize_model(tmpdir: LocalPath, model: LogisticRegression):
expected_output = tmpdir.join("model.pkl")
model = LogisticRegression()
real_output = serialize_model(model, expected_output)
assert real_output == expected_output
assert os.path.exists
with open(real_output, "rb") as f:
model = pickle.load(f)
assert isinstance(model, LogisticRegression) | none | 1 | 2.393608 | 2 |
|
common/schema.py | gtmills/Redfish-Service-Validator | 0 | 6631910 | # Copyright Notice:
# Copyright 2016-2020 DMTF. All rights reserved.
# License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Service-Validator/blob/master/LICENSE.md
from collections import namedtuple
from bs4 import BeautifulSoup
from functools import lru_cache
import os.path
from common.helper import getType, getNamespace, getNamespaceUnversioned, getVersion, compareMinVersion, splitVersionString
import logging
my_logger = logging.getLogger(__name__)
def storeSchemaToLocal(xml_data, origin, service):
"""storeSchemaToLocal
Moves data pulled from service/online to local schema storage
Does NOT do so if preferonline is specified
:param xml_data: data being transferred
:param origin: origin of xml pulled
"""
config = service.config
SchemaLocation = config['metadatafilepath']
if not os.path.isdir(SchemaLocation):
os.makedirs(SchemaLocation)
if 'localFile' not in origin and '$metadata' not in origin:
__, xml_name = origin.rsplit('/', 1)
new_file = os.path.join(SchemaLocation, xml_name)
if not os.path.isfile(new_file):
with open(new_file, "w") as filehandle:
filehandle.write(xml_data)
my_logger.info('Writing online XML to file: {}'.format(xml_name))
else:
my_logger.info('NOT writing online XML to file: {}'.format(xml_name))
@lru_cache(maxsize=64)
def getSchemaDetails(service, SchemaType, SchemaURI):
"""
Find Schema file for given Namespace.
param SchemaType: Schema Namespace, such as ServiceRoot
param SchemaURI: uri to grab schema, given LocalOnly is False
return: (success boolean, a Soup object, origin)
"""
my_logger.debug('getting Schema of {} {}'.format(SchemaType, SchemaURI))
if SchemaType is None:
return False, None, None
if service is None:
return getSchemaDetailsLocal(SchemaType, SchemaURI, {})
elif service.active and getNamespace(SchemaType) in service.metadata.schema_store:
result = service.metadata.schema_store[getNamespace(SchemaType)]
if result is not None:
return True, result.soup, result.origin
success, soup, origin = getSchemaDetailsLocal(SchemaType, SchemaURI, service.config)
if success:
return success, soup, origin
xml_suffix = '_v1.xml'
if (SchemaURI is not None) or (SchemaURI is not None and '/redfish/v1/$metadata' in SchemaURI):
# Get our expected Schema file here
# if success, generate Soup, then check for frags to parse
# start by parsing references, then check for the refLink
if '#' in SchemaURI:
base_schema_uri, frag = tuple(SchemaURI.rsplit('#', 1))
else:
base_schema_uri, frag = SchemaURI, None
success, data, status, elapsed = service.callResourceURI(base_schema_uri)
if success:
soup = BeautifulSoup(data, "xml")
# if frag, look inside xml for real target as a reference
if frag is not None:
# prefer type over frag, truncated down
# using frag, check references
frag = getNamespace(SchemaType)
frag = frag.split('.', 1)[0]
refType, refLink = getReferenceDetails(
soup, name=base_schema_uri).get(frag, (None, None))
if refLink is not None:
success, linksoup, newlink = getSchemaDetails(service, refType, refLink)
if success:
return True, linksoup, newlink
else:
my_logger.error(
"SchemaURI couldn't call reference link {} inside {}".format(frag, base_schema_uri))
else:
my_logger.error(
"SchemaURI missing reference link {} inside {}".format(frag, base_schema_uri))
# error reported; assume likely schema uri to allow continued validation
uri = 'http://redfish.dmtf.org/schemas/v1/{}{}'.format(frag, xml_suffix)
my_logger.info("Continue assuming schema URI for {} is {}".format(SchemaType, uri))
return getSchemaDetails(service, SchemaType, uri)
else:
storeSchemaToLocal(data, base_schema_uri, service)
return True, soup, base_schema_uri
else:
my_logger.debug("SchemaURI called unsuccessfully: {}".format(base_schema_uri))
return getSchemaDetailsLocal(SchemaType, SchemaURI, service.config)
def getSchemaDetailsLocal(SchemaType, SchemaURI, config):
"""
Find Schema file for given Namespace, from local directory
param SchemaType: Schema Namespace, such as ServiceRoot
param SchemaURI: uri to grab schem (generate information from it)
return: (success boolean, a Soup object, origin)
"""
Alias = getNamespaceUnversioned(SchemaType)
SchemaLocation, SchemaSuffix = config['metadatafilepath'], '_v1.xml'
if SchemaURI is not None:
uriparse = SchemaURI.split('/')[-1].split('#')
xml = uriparse[0]
else:
my_logger.warning("SchemaURI was empty, must generate xml name from type {}".format(SchemaType)),
return getSchemaDetailsLocal(SchemaType, Alias + SchemaSuffix, config)
my_logger.debug(('local', SchemaType, SchemaURI, SchemaLocation + '/' + xml))
filestring = Alias + SchemaSuffix if xml is None else xml
try:
# get file
with open(SchemaLocation + '/' + xml, "r") as filehandle:
data = filehandle.read()
# get tags
soup = BeautifulSoup(data, "xml")
edmxTag = soup.find('edmx:Edmx', recursive=False)
parentTag = edmxTag.find('edmx:DataServices', recursive=False)
child = parentTag.find('Schema', recursive=False)
SchemaNamespace = child['Namespace']
FoundAlias = SchemaNamespace.split(".")[0]
my_logger.debug(FoundAlias)
if FoundAlias in Alias:
return True, soup, "localFile:" + SchemaLocation + '/' + filestring
except FileNotFoundError:
# if we're looking for $metadata locally... ditch looking for it, go straight to file
if '/redfish/v1/$metadata' in SchemaURI and Alias != '$metadata':
my_logger.debug("Unable to find a xml of {} at {}, defaulting to {}".format(SchemaURI, SchemaLocation, Alias + SchemaSuffix))
return getSchemaDetailsLocal(SchemaType, Alias + SchemaSuffix, config)
else:
my_logger.warn("Schema file {} not found in {}".format(filestring, SchemaLocation))
if Alias == '$metadata':
my_logger.warning("If $metadata cannot be found, Annotations may be unverifiable")
except Exception as ex:
my_logger.error("A problem when getting a local schema has occurred {}".format(SchemaURI))
my_logger.warning("output: ", exc_info=True)
return False, None, None
def check_redfish_extensions_alias(name, namespace, alias):
"""
Check that edmx:Include for Namespace RedfishExtensions has the expected 'Redfish' Alias attribute
:param name: the name of the resource
:param item: the edmx:Include item for RedfishExtensions
:return: bool
"""
if alias is None or alias != 'Redfish':
msg = ("In the resource {}, the {} namespace must have an alias of 'Redfish'. The alias is {}. " +
"This may cause properties of the form [PropertyName]@Redfish.TermName to be unrecognized.")
my_logger.error(msg.format(name, namespace,
'missing' if alias is None else "'" + str(alias) + "'"))
return False
return True
def getReferenceDetails(soup, metadata_dict=None, name='xml'):
"""
Create a reference dictionary from a soup file
param arg1: soup
param metadata_dict: dictionary of service metadata, compare with
return: dictionary
"""
includeTuple = namedtuple('include', ['Namespace', 'Uri'])
refDict = {}
maintag = soup.find("edmx:Edmx", recursive=False)
reftags = maintag.find_all('edmx:Reference', recursive=False)
for ref in reftags:
includes = ref.find_all('edmx:Include', recursive=False)
for item in includes:
uri = ref.get('Uri')
ns, alias = (item.get(x) for x in ['Namespace', 'Alias'])
if ns is None or uri is None:
my_logger.error("Reference incorrect for: {}".format(item))
continue
if alias is None:
alias = ns
refDict[alias] = includeTuple(ns, uri)
# Check for proper Alias for RedfishExtensions
if name == '$metadata' and ns.startswith('RedfishExtensions.'):
check_bool = check_redfish_extensions_alias(name, ns, alias)
cntref = len(refDict)
if metadata_dict is not None:
refDict.update(metadata_dict)
my_logger.debug("References generated from {}: {} out of {}".format(name, cntref, len(refDict)))
return refDict
class rfSchema:
def __init__(self, soup, context, origin, metadata=None, name='xml'):
self.soup = soup
self.refs = getReferenceDetails(soup, metadata, name)
self.context = context
self.origin = origin
self.name = name
def getSchemaFromReference(self, namespace):
"""getSchemaFromReference
Get SchemaObj from generated references
:param namespace: Namespace of reference
"""
tup = self.refs.get(namespace)
tupVersionless = self.refs.get(getNamespace(namespace))
if tup is None:
if tupVersionless is None:
my_logger.warning('No such reference {} in {}'.format(namespace, self.origin))
return None
else:
tup = tupVersionless
my_logger.warning('No such reference {} in {}, using unversioned'.format(namespace, self.origin))
typ, uri = tup
newSchemaObj = getSchemaObject(typ, uri)
return newSchemaObj
def getTypeTagInSchema(self, currentType, tagType=['EntityType', 'ComplexType']):
"""getTypeTagInSchema
Get type tag in schema
:param currentType: type string
:param tagType: Array or single string containing the xml tag name
"""
pnamespace, ptype = getNamespace(currentType), getType(currentType)
soup = self.soup
currentSchema = soup.find(
'Schema', attrs={'Namespace': pnamespace})
if currentSchema is None:
return None
currentEntity = currentSchema.find(tagType, attrs={'Name': ptype}, recursive=False)
return currentEntity
def getParentType(self, currentType, tagType=['EntityType', 'ComplexType']):
"""getParentType
Get parent of this Entity/ComplexType
:param currentType: type string
:param tagType: Array or single string containing the xml tag name
"""
currentType = currentType.replace('#', '')
typetag = self.getTypeTagInSchema(currentType, tagType)
if typetag is not None:
currentType = typetag.get('BaseType')
if currentType is None:
return False, None, None
typetag = self.getTypeTagInSchema(currentType, tagType)
if typetag is not None:
return True, self, currentType
else:
namespace = getNamespace(currentType)
schemaObj = self.getSchemaFromReference(namespace)
if schemaObj is None:
return False, None, None
propSchema = schemaObj.soup.find(
'Schema', attrs={'Namespace': namespace})
if propSchema is None:
return False, None, None
return True, schemaObj, currentType
else:
return False, None, None
def getHighestType(self, acquiredtype: str, limit=None):
"""getHighestType
get Highest possible version for given type
:param acquiredtype: Type available
:param limit: Version string limit (full namespace or just version 'v1_x_x')
"""
typelist = list()
if limit is not None:
if getVersion(limit) is None:
if 'Collection' not in limit:
my_logger.warning('Limiting namespace has no version, erasing: {}'.format(limit))
else:
my_logger.info('Limiting namespace has no version, erasing: {}'.format(limit))
limit = None
else:
limit = getVersion(limit)
for schema in self.soup.find_all('Schema'):
newNamespace = schema.get('Namespace')
if limit is not None:
if getVersion(newNamespace) is None:
continue
if compareMinVersion(newNamespace, limit):
continue
if schema.find(['EntityType', 'ComplexType'], attrs={'Name': getType(acquiredtype)}, recursive=False):
typelist.append(splitVersionString(newNamespace))
if len(typelist) > 1:
for ns in reversed(sorted(typelist)):
my_logger.debug(
"{} {}".format(ns, getType(acquiredtype)))
acquiredtype = getNamespaceUnversioned(acquiredtype) + '.v{}_{}_{}'.format(*ns) + '.' + getType(acquiredtype)
return acquiredtype
return acquiredtype
@lru_cache(maxsize=64)
def getSchemaObject(service, typename, uri, metadata=None):
"""getSchemaObject
Wrapper for getting an rfSchema object
:param typename: Type with namespace of schema
:param uri: Context/URI of metadata/schema containing reference to namespace
:param metadata: parent refs of service
"""
success, soup, origin = getSchemaDetails(service, typename, uri)
return rfSchema(soup, uri, origin, metadata=metadata, name=typename) if success else None
| # Copyright Notice:
# Copyright 2016-2020 DMTF. All rights reserved.
# License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Service-Validator/blob/master/LICENSE.md
from collections import namedtuple
from bs4 import BeautifulSoup
from functools import lru_cache
import os.path
from common.helper import getType, getNamespace, getNamespaceUnversioned, getVersion, compareMinVersion, splitVersionString
import logging
my_logger = logging.getLogger(__name__)
def storeSchemaToLocal(xml_data, origin, service):
"""storeSchemaToLocal
Moves data pulled from service/online to local schema storage
Does NOT do so if preferonline is specified
:param xml_data: data being transferred
:param origin: origin of xml pulled
"""
config = service.config
SchemaLocation = config['metadatafilepath']
if not os.path.isdir(SchemaLocation):
os.makedirs(SchemaLocation)
if 'localFile' not in origin and '$metadata' not in origin:
__, xml_name = origin.rsplit('/', 1)
new_file = os.path.join(SchemaLocation, xml_name)
if not os.path.isfile(new_file):
with open(new_file, "w") as filehandle:
filehandle.write(xml_data)
my_logger.info('Writing online XML to file: {}'.format(xml_name))
else:
my_logger.info('NOT writing online XML to file: {}'.format(xml_name))
@lru_cache(maxsize=64)
def getSchemaDetails(service, SchemaType, SchemaURI):
"""
Find Schema file for given Namespace.
param SchemaType: Schema Namespace, such as ServiceRoot
param SchemaURI: uri to grab schema, given LocalOnly is False
return: (success boolean, a Soup object, origin)
"""
my_logger.debug('getting Schema of {} {}'.format(SchemaType, SchemaURI))
if SchemaType is None:
return False, None, None
if service is None:
return getSchemaDetailsLocal(SchemaType, SchemaURI, {})
elif service.active and getNamespace(SchemaType) in service.metadata.schema_store:
result = service.metadata.schema_store[getNamespace(SchemaType)]
if result is not None:
return True, result.soup, result.origin
success, soup, origin = getSchemaDetailsLocal(SchemaType, SchemaURI, service.config)
if success:
return success, soup, origin
xml_suffix = '_v1.xml'
if (SchemaURI is not None) or (SchemaURI is not None and '/redfish/v1/$metadata' in SchemaURI):
# Get our expected Schema file here
# if success, generate Soup, then check for frags to parse
# start by parsing references, then check for the refLink
if '#' in SchemaURI:
base_schema_uri, frag = tuple(SchemaURI.rsplit('#', 1))
else:
base_schema_uri, frag = SchemaURI, None
success, data, status, elapsed = service.callResourceURI(base_schema_uri)
if success:
soup = BeautifulSoup(data, "xml")
# if frag, look inside xml for real target as a reference
if frag is not None:
# prefer type over frag, truncated down
# using frag, check references
frag = getNamespace(SchemaType)
frag = frag.split('.', 1)[0]
refType, refLink = getReferenceDetails(
soup, name=base_schema_uri).get(frag, (None, None))
if refLink is not None:
success, linksoup, newlink = getSchemaDetails(service, refType, refLink)
if success:
return True, linksoup, newlink
else:
my_logger.error(
"SchemaURI couldn't call reference link {} inside {}".format(frag, base_schema_uri))
else:
my_logger.error(
"SchemaURI missing reference link {} inside {}".format(frag, base_schema_uri))
# error reported; assume likely schema uri to allow continued validation
uri = 'http://redfish.dmtf.org/schemas/v1/{}{}'.format(frag, xml_suffix)
my_logger.info("Continue assuming schema URI for {} is {}".format(SchemaType, uri))
return getSchemaDetails(service, SchemaType, uri)
else:
storeSchemaToLocal(data, base_schema_uri, service)
return True, soup, base_schema_uri
else:
my_logger.debug("SchemaURI called unsuccessfully: {}".format(base_schema_uri))
return getSchemaDetailsLocal(SchemaType, SchemaURI, service.config)
def getSchemaDetailsLocal(SchemaType, SchemaURI, config):
"""
Find Schema file for given Namespace, from local directory
param SchemaType: Schema Namespace, such as ServiceRoot
param SchemaURI: uri to grab schem (generate information from it)
return: (success boolean, a Soup object, origin)
"""
Alias = getNamespaceUnversioned(SchemaType)
SchemaLocation, SchemaSuffix = config['metadatafilepath'], '_v1.xml'
if SchemaURI is not None:
uriparse = SchemaURI.split('/')[-1].split('#')
xml = uriparse[0]
else:
my_logger.warning("SchemaURI was empty, must generate xml name from type {}".format(SchemaType)),
return getSchemaDetailsLocal(SchemaType, Alias + SchemaSuffix, config)
my_logger.debug(('local', SchemaType, SchemaURI, SchemaLocation + '/' + xml))
filestring = Alias + SchemaSuffix if xml is None else xml
try:
# get file
with open(SchemaLocation + '/' + xml, "r") as filehandle:
data = filehandle.read()
# get tags
soup = BeautifulSoup(data, "xml")
edmxTag = soup.find('edmx:Edmx', recursive=False)
parentTag = edmxTag.find('edmx:DataServices', recursive=False)
child = parentTag.find('Schema', recursive=False)
SchemaNamespace = child['Namespace']
FoundAlias = SchemaNamespace.split(".")[0]
my_logger.debug(FoundAlias)
if FoundAlias in Alias:
return True, soup, "localFile:" + SchemaLocation + '/' + filestring
except FileNotFoundError:
# if we're looking for $metadata locally... ditch looking for it, go straight to file
if '/redfish/v1/$metadata' in SchemaURI and Alias != '$metadata':
my_logger.debug("Unable to find a xml of {} at {}, defaulting to {}".format(SchemaURI, SchemaLocation, Alias + SchemaSuffix))
return getSchemaDetailsLocal(SchemaType, Alias + SchemaSuffix, config)
else:
my_logger.warn("Schema file {} not found in {}".format(filestring, SchemaLocation))
if Alias == '$metadata':
my_logger.warning("If $metadata cannot be found, Annotations may be unverifiable")
except Exception as ex:
my_logger.error("A problem when getting a local schema has occurred {}".format(SchemaURI))
my_logger.warning("output: ", exc_info=True)
return False, None, None
def check_redfish_extensions_alias(name, namespace, alias):
"""
Check that edmx:Include for Namespace RedfishExtensions has the expected 'Redfish' Alias attribute
:param name: the name of the resource
:param item: the edmx:Include item for RedfishExtensions
:return: bool
"""
if alias is None or alias != 'Redfish':
msg = ("In the resource {}, the {} namespace must have an alias of 'Redfish'. The alias is {}. " +
"This may cause properties of the form [PropertyName]@Redfish.TermName to be unrecognized.")
my_logger.error(msg.format(name, namespace,
'missing' if alias is None else "'" + str(alias) + "'"))
return False
return True
def getReferenceDetails(soup, metadata_dict=None, name='xml'):
"""
Create a reference dictionary from a soup file
param arg1: soup
param metadata_dict: dictionary of service metadata, compare with
return: dictionary
"""
includeTuple = namedtuple('include', ['Namespace', 'Uri'])
refDict = {}
maintag = soup.find("edmx:Edmx", recursive=False)
reftags = maintag.find_all('edmx:Reference', recursive=False)
for ref in reftags:
includes = ref.find_all('edmx:Include', recursive=False)
for item in includes:
uri = ref.get('Uri')
ns, alias = (item.get(x) for x in ['Namespace', 'Alias'])
if ns is None or uri is None:
my_logger.error("Reference incorrect for: {}".format(item))
continue
if alias is None:
alias = ns
refDict[alias] = includeTuple(ns, uri)
# Check for proper Alias for RedfishExtensions
if name == '$metadata' and ns.startswith('RedfishExtensions.'):
check_bool = check_redfish_extensions_alias(name, ns, alias)
cntref = len(refDict)
if metadata_dict is not None:
refDict.update(metadata_dict)
my_logger.debug("References generated from {}: {} out of {}".format(name, cntref, len(refDict)))
return refDict
class rfSchema:
def __init__(self, soup, context, origin, metadata=None, name='xml'):
self.soup = soup
self.refs = getReferenceDetails(soup, metadata, name)
self.context = context
self.origin = origin
self.name = name
def getSchemaFromReference(self, namespace):
"""getSchemaFromReference
Get SchemaObj from generated references
:param namespace: Namespace of reference
"""
tup = self.refs.get(namespace)
tupVersionless = self.refs.get(getNamespace(namespace))
if tup is None:
if tupVersionless is None:
my_logger.warning('No such reference {} in {}'.format(namespace, self.origin))
return None
else:
tup = tupVersionless
my_logger.warning('No such reference {} in {}, using unversioned'.format(namespace, self.origin))
typ, uri = tup
newSchemaObj = getSchemaObject(typ, uri)
return newSchemaObj
def getTypeTagInSchema(self, currentType, tagType=['EntityType', 'ComplexType']):
"""getTypeTagInSchema
Get type tag in schema
:param currentType: type string
:param tagType: Array or single string containing the xml tag name
"""
pnamespace, ptype = getNamespace(currentType), getType(currentType)
soup = self.soup
currentSchema = soup.find(
'Schema', attrs={'Namespace': pnamespace})
if currentSchema is None:
return None
currentEntity = currentSchema.find(tagType, attrs={'Name': ptype}, recursive=False)
return currentEntity
def getParentType(self, currentType, tagType=['EntityType', 'ComplexType']):
"""getParentType
Get parent of this Entity/ComplexType
:param currentType: type string
:param tagType: Array or single string containing the xml tag name
"""
currentType = currentType.replace('#', '')
typetag = self.getTypeTagInSchema(currentType, tagType)
if typetag is not None:
currentType = typetag.get('BaseType')
if currentType is None:
return False, None, None
typetag = self.getTypeTagInSchema(currentType, tagType)
if typetag is not None:
return True, self, currentType
else:
namespace = getNamespace(currentType)
schemaObj = self.getSchemaFromReference(namespace)
if schemaObj is None:
return False, None, None
propSchema = schemaObj.soup.find(
'Schema', attrs={'Namespace': namespace})
if propSchema is None:
return False, None, None
return True, schemaObj, currentType
else:
return False, None, None
def getHighestType(self, acquiredtype: str, limit=None):
"""getHighestType
get Highest possible version for given type
:param acquiredtype: Type available
:param limit: Version string limit (full namespace or just version 'v1_x_x')
"""
typelist = list()
if limit is not None:
if getVersion(limit) is None:
if 'Collection' not in limit:
my_logger.warning('Limiting namespace has no version, erasing: {}'.format(limit))
else:
my_logger.info('Limiting namespace has no version, erasing: {}'.format(limit))
limit = None
else:
limit = getVersion(limit)
for schema in self.soup.find_all('Schema'):
newNamespace = schema.get('Namespace')
if limit is not None:
if getVersion(newNamespace) is None:
continue
if compareMinVersion(newNamespace, limit):
continue
if schema.find(['EntityType', 'ComplexType'], attrs={'Name': getType(acquiredtype)}, recursive=False):
typelist.append(splitVersionString(newNamespace))
if len(typelist) > 1:
for ns in reversed(sorted(typelist)):
my_logger.debug(
"{} {}".format(ns, getType(acquiredtype)))
acquiredtype = getNamespaceUnversioned(acquiredtype) + '.v{}_{}_{}'.format(*ns) + '.' + getType(acquiredtype)
return acquiredtype
return acquiredtype
@lru_cache(maxsize=64)
def getSchemaObject(service, typename, uri, metadata=None):
"""getSchemaObject
Wrapper for getting an rfSchema object
:param typename: Type with namespace of schema
:param uri: Context/URI of metadata/schema containing reference to namespace
:param metadata: parent refs of service
"""
success, soup, origin = getSchemaDetails(service, typename, uri)
return rfSchema(soup, uri, origin, metadata=metadata, name=typename) if success else None
| en | 0.597794 | # Copyright Notice: # Copyright 2016-2020 DMTF. All rights reserved. # License: BSD 3-Clause License. For full text see link: https://github.com/DMTF/Redfish-Service-Validator/blob/master/LICENSE.md storeSchemaToLocal Moves data pulled from service/online to local schema storage Does NOT do so if preferonline is specified :param xml_data: data being transferred :param origin: origin of xml pulled Find Schema file for given Namespace. param SchemaType: Schema Namespace, such as ServiceRoot param SchemaURI: uri to grab schema, given LocalOnly is False return: (success boolean, a Soup object, origin) # Get our expected Schema file here # if success, generate Soup, then check for frags to parse # start by parsing references, then check for the refLink # if frag, look inside xml for real target as a reference # prefer type over frag, truncated down # using frag, check references # error reported; assume likely schema uri to allow continued validation Find Schema file for given Namespace, from local directory param SchemaType: Schema Namespace, such as ServiceRoot param SchemaURI: uri to grab schem (generate information from it) return: (success boolean, a Soup object, origin) # get file # get tags # if we're looking for $metadata locally... ditch looking for it, go straight to file Check that edmx:Include for Namespace RedfishExtensions has the expected 'Redfish' Alias attribute :param name: the name of the resource :param item: the edmx:Include item for RedfishExtensions :return: bool Create a reference dictionary from a soup file param arg1: soup param metadata_dict: dictionary of service metadata, compare with return: dictionary # Check for proper Alias for RedfishExtensions getSchemaFromReference Get SchemaObj from generated references :param namespace: Namespace of reference getTypeTagInSchema Get type tag in schema :param currentType: type string :param tagType: Array or single string containing the xml tag name getParentType Get parent of this Entity/ComplexType :param currentType: type string :param tagType: Array or single string containing the xml tag name getHighestType get Highest possible version for given type :param acquiredtype: Type available :param limit: Version string limit (full namespace or just version 'v1_x_x') getSchemaObject Wrapper for getting an rfSchema object :param typename: Type with namespace of schema :param uri: Context/URI of metadata/schema containing reference to namespace :param metadata: parent refs of service | 1.641743 | 2 |
rximp/api.py | freelancer1845/rx-imp-python | 0 | 6631911 | from rx.subject import Subject
from rx import Observable, defer, create
from rx.disposable import Disposable
from typing import Callable, Dict
from rx.operators import map, publish, filter, take_while, replay, do, share, take_until, take, do_action
import json
from types import FunctionType
from threading import Lock
from .message import RxImpMessage
class RxImp(object):
def __init__(self, inObs: Observable, outSubject: Subject):
"""
Parameters
---------
inObs : Observable<bytes>
Observable<bytes> that the instance subscribes to in order to receive data packets. The Observable should emit objects of type bytes
outSubject : Subject<bytes>
Subscribe to the outSubject to publish messages (i. e. send the to the receiver)
"""
super().__init__()
self._in: Observable = inObs.pipe(
map(self._mapIncoming),
publish())
self._in.connect()
self._out = Subject()
self._out.pipe(map(self._mapOutgoing)).subscribe(outSubject)
def observableCall(self, topic: str, payload) -> Observable:
"""
Parameters
---------
topic : str
Topic mapped to this call. Other side must register a handler for this topic first using 'registerCall'
payload : any
Payload will be send to the other side in JSON format. May be None!
Returns
---------
Observable : dict
Observable that will emit items received from the other side. Will also emit termination events
"""
def subscriptionFunction(observer, scheduler):
message = RxImpMessage(
topic, 0, RxImpMessage.STATE_SUBSCRIBE, json.dumps(payload))
publisher: Subject = Subject()
lock = Lock()
currentCount = 0
queue = []
def orderingSubscriber(msg: RxImpMessage):
nonlocal currentCount
nonlocal queue
with lock:
currentCount += 1
queue.append(msg)
queue.sort(key=lambda x: x.count)
toNext = [msg for msg in queue if msg.count < currentCount]
queue = [msg for msg in queue if msg.count >= currentCount]
for msg in toNext:
publisher.on_next(msg)
def isRelevant(msg: RxImpMessage):
return msg.rx_state == RxImpMessage.STATE_COMPLETE or msg.rx_state == RxImpMessage.STATE_ERROR or msg.rx_state == RxImpMessage.STATE_NEXT
secondSubscription: Disposable = self._in.pipe(
filter(lambda x: x.id == message.id),
filter(lambda x: isRelevant(x)),
map(lambda x: self._checkError(x)),
).subscribe(on_next=lambda x: orderingSubscriber(x), on_error=lambda err: publisher.on_error(err))
subscription: Disposable = publisher.pipe(
take_while(lambda x: self._checkNotComplete(x)),
map(lambda x: json.loads(x.payload)),
).subscribe(observer)
self._out.on_next(message)
def signalUnsubscribe():
msg = RxImpMessage(
message.topic, 0, RxImpMessage.STATE_DISPOSE, None, id=message.id)
secondSubscription.dispose()
subscription.dispose()
self._out.on_next(msg)
return lambda: signalUnsubscribe()
return create(subscriptionFunction)
def registerCall(self, topic: str, handler: Callable[[Dict], Observable]) -> Disposable:
"""
Parameters
---------
topic : str
Topic this call will be registered on
handler : Callable[[Dict], Observable]
Handler for this topic. Is called with payload provided by caller (maybe empty). Return an Observable handling this call
Returns
---------
Disposable
To remove registration
"""
def handleSubscription(msg: RxImpMessage):
currentCount = 0
def on_next(next):
nonlocal currentCount
nextMsg = RxImpMessage(
topic=msg.topic, count=currentCount, rx_state=RxImpMessage.STATE_NEXT, payload=json.dumps(next), id=msg.id)
currentCount += 1
self._out.on_next(nextMsg)
def on_error(error):
nonlocal currentCount
errorMsg = RxImpMessage(
topic=msg.topic, count=currentCount, rx_state=RxImpMessage.STATE_ERROR, payload=json.dumps(error), id=msg.id)
currentCount += 1
self._out.on_next(errorMsg)
def on_complete():
nonlocal currentCount
completeMsg = RxImpMessage(
topic=msg.topic, count=currentCount, rx_state=RxImpMessage.STATE_COMPLETE, payload=None, id=msg.id)
currentCount += 1
self._out.on_next(completeMsg)
handler(json.loads(msg.payload)).pipe(
take_until(self._in.pipe(
filter(lambda x: x.rx_state == RxImpMessage.STATE_DISPOSE),
filter(lambda x: x.id == msg.id),
take(1)
))
).subscribe(on_next=lambda x: on_next(x),
on_error=lambda x: on_error(
x),
on_completed=lambda: on_complete())
return self._in.pipe(
filter(lambda x: x.rx_state == RxImpMessage.STATE_SUBSCRIBE),
filter(lambda x: x.topic == topic)
).subscribe(on_next=lambda x: handleSubscription(x))
def _mapIncoming(self, data):
return RxImpMessage.fromBytes(data)
def _mapOutgoing(self, msg: RxImpMessage):
return msg.toBytes()
def _checkError(self, msg: RxImpMessage) -> RxImpMessage:
if msg.rx_state == RxImpMessage.STATE_ERROR:
raise Exception(json.loads(msg.payload))
else:
return msg
def _checkNotComplete(self, msg: RxImpMessage) -> bool:
if msg.rx_state == RxImpMessage.STATE_COMPLETE:
return False
else:
return True
| from rx.subject import Subject
from rx import Observable, defer, create
from rx.disposable import Disposable
from typing import Callable, Dict
from rx.operators import map, publish, filter, take_while, replay, do, share, take_until, take, do_action
import json
from types import FunctionType
from threading import Lock
from .message import RxImpMessage
class RxImp(object):
def __init__(self, inObs: Observable, outSubject: Subject):
"""
Parameters
---------
inObs : Observable<bytes>
Observable<bytes> that the instance subscribes to in order to receive data packets. The Observable should emit objects of type bytes
outSubject : Subject<bytes>
Subscribe to the outSubject to publish messages (i. e. send the to the receiver)
"""
super().__init__()
self._in: Observable = inObs.pipe(
map(self._mapIncoming),
publish())
self._in.connect()
self._out = Subject()
self._out.pipe(map(self._mapOutgoing)).subscribe(outSubject)
def observableCall(self, topic: str, payload) -> Observable:
"""
Parameters
---------
topic : str
Topic mapped to this call. Other side must register a handler for this topic first using 'registerCall'
payload : any
Payload will be send to the other side in JSON format. May be None!
Returns
---------
Observable : dict
Observable that will emit items received from the other side. Will also emit termination events
"""
def subscriptionFunction(observer, scheduler):
message = RxImpMessage(
topic, 0, RxImpMessage.STATE_SUBSCRIBE, json.dumps(payload))
publisher: Subject = Subject()
lock = Lock()
currentCount = 0
queue = []
def orderingSubscriber(msg: RxImpMessage):
nonlocal currentCount
nonlocal queue
with lock:
currentCount += 1
queue.append(msg)
queue.sort(key=lambda x: x.count)
toNext = [msg for msg in queue if msg.count < currentCount]
queue = [msg for msg in queue if msg.count >= currentCount]
for msg in toNext:
publisher.on_next(msg)
def isRelevant(msg: RxImpMessage):
return msg.rx_state == RxImpMessage.STATE_COMPLETE or msg.rx_state == RxImpMessage.STATE_ERROR or msg.rx_state == RxImpMessage.STATE_NEXT
secondSubscription: Disposable = self._in.pipe(
filter(lambda x: x.id == message.id),
filter(lambda x: isRelevant(x)),
map(lambda x: self._checkError(x)),
).subscribe(on_next=lambda x: orderingSubscriber(x), on_error=lambda err: publisher.on_error(err))
subscription: Disposable = publisher.pipe(
take_while(lambda x: self._checkNotComplete(x)),
map(lambda x: json.loads(x.payload)),
).subscribe(observer)
self._out.on_next(message)
def signalUnsubscribe():
msg = RxImpMessage(
message.topic, 0, RxImpMessage.STATE_DISPOSE, None, id=message.id)
secondSubscription.dispose()
subscription.dispose()
self._out.on_next(msg)
return lambda: signalUnsubscribe()
return create(subscriptionFunction)
def registerCall(self, topic: str, handler: Callable[[Dict], Observable]) -> Disposable:
"""
Parameters
---------
topic : str
Topic this call will be registered on
handler : Callable[[Dict], Observable]
Handler for this topic. Is called with payload provided by caller (maybe empty). Return an Observable handling this call
Returns
---------
Disposable
To remove registration
"""
def handleSubscription(msg: RxImpMessage):
currentCount = 0
def on_next(next):
nonlocal currentCount
nextMsg = RxImpMessage(
topic=msg.topic, count=currentCount, rx_state=RxImpMessage.STATE_NEXT, payload=json.dumps(next), id=msg.id)
currentCount += 1
self._out.on_next(nextMsg)
def on_error(error):
nonlocal currentCount
errorMsg = RxImpMessage(
topic=msg.topic, count=currentCount, rx_state=RxImpMessage.STATE_ERROR, payload=json.dumps(error), id=msg.id)
currentCount += 1
self._out.on_next(errorMsg)
def on_complete():
nonlocal currentCount
completeMsg = RxImpMessage(
topic=msg.topic, count=currentCount, rx_state=RxImpMessage.STATE_COMPLETE, payload=None, id=msg.id)
currentCount += 1
self._out.on_next(completeMsg)
handler(json.loads(msg.payload)).pipe(
take_until(self._in.pipe(
filter(lambda x: x.rx_state == RxImpMessage.STATE_DISPOSE),
filter(lambda x: x.id == msg.id),
take(1)
))
).subscribe(on_next=lambda x: on_next(x),
on_error=lambda x: on_error(
x),
on_completed=lambda: on_complete())
return self._in.pipe(
filter(lambda x: x.rx_state == RxImpMessage.STATE_SUBSCRIBE),
filter(lambda x: x.topic == topic)
).subscribe(on_next=lambda x: handleSubscription(x))
def _mapIncoming(self, data):
return RxImpMessage.fromBytes(data)
def _mapOutgoing(self, msg: RxImpMessage):
return msg.toBytes()
def _checkError(self, msg: RxImpMessage) -> RxImpMessage:
if msg.rx_state == RxImpMessage.STATE_ERROR:
raise Exception(json.loads(msg.payload))
else:
return msg
def _checkNotComplete(self, msg: RxImpMessage) -> bool:
if msg.rx_state == RxImpMessage.STATE_COMPLETE:
return False
else:
return True
| en | 0.713416 | Parameters --------- inObs : Observable<bytes> Observable<bytes> that the instance subscribes to in order to receive data packets. The Observable should emit objects of type bytes outSubject : Subject<bytes> Subscribe to the outSubject to publish messages (i. e. send the to the receiver) Parameters --------- topic : str Topic mapped to this call. Other side must register a handler for this topic first using 'registerCall' payload : any Payload will be send to the other side in JSON format. May be None! Returns --------- Observable : dict Observable that will emit items received from the other side. Will also emit termination events Parameters --------- topic : str Topic this call will be registered on handler : Callable[[Dict], Observable] Handler for this topic. Is called with payload provided by caller (maybe empty). Return an Observable handling this call Returns --------- Disposable To remove registration | 2.617522 | 3 |
core/machine/urls.py | rauanisanfelice/python-analise-credito | 2 | 6631912 | <reponame>rauanisanfelice/python-analise-credito
from django.contrib import admin
from django.urls import path, include
from django.contrib.auth import views as auth_views
from machine.views import *
urlpatterns = [
path('', index.as_view(), name='index'),
path('validacao/', validacao, name='validacao'),
]
| from django.contrib import admin
from django.urls import path, include
from django.contrib.auth import views as auth_views
from machine.views import *
urlpatterns = [
path('', index.as_view(), name='index'),
path('validacao/', validacao, name='validacao'),
] | none | 1 | 1.610818 | 2 |
|
standalone/BC_plus_script.py | vwxyzjn/MineRL2021-Intro-baselines | 8 | 6631913 | <gh_stars>1-10
"""
Behavioural Cloning agent that trains on MineRLTreechop data. It is then evaluated on MineRLObtainDiamond by running it
for a certain number of steps and then switching to the scripted part that crafts a wooden_pickaxe and digs down to get
some cobblestone.
With default parameters it trains in 5-10 mins on a machine with a GeForce RTX 2080 Ti GPU.
It uses less than 8GB RAM and achieves an average reward of 8.6.
"""
from tqdm import tqdm
import numpy as np
import torch as th
from torch import nn
import gym
import minerl
# Parameters:
DATA_DIR = "data" # path to where MineRL dataset resides (should contain "MineRLTreechop-v0" directory).
EPOCHS = 3 # How many times we train over the dataset.
LEARNING_RATE = 0.0001 # Learning rate for the neural network.
TRAIN_MODEL_NAME = 'another_potato.pth' # name to use when saving the trained agent.
TEST_MODEL_NAME = 'another_potato.pth' # name to use when loading the trained agent.
TEST_EPISODES = 10 # number of episodes to test the agent for.
MAX_TEST_EPISODE_LEN = 18000 # 18k is the default for MineRLObtainDiamond.
TREECHOP_STEPS = 2000 # number of steps to run BC lumberjack for in evaluations.
class NatureCNN(nn.Module):
"""
CNN from DQN nature paper:
Mnih, Volodymyr, et al.
"Human-level control through deep reinforcement learning."
Nature 518.7540 (2015): 529-533.
:param input_shape: A three-item tuple telling image dimensions in (C, H, W)
:param output_dim: Dimensionality of the output vector
"""
def __init__(self, input_shape, output_dim):
super().__init__()
n_input_channels = input_shape[0]
self.cnn = nn.Sequential(
nn.Conv2d(n_input_channels, 32, kernel_size=8, stride=4, padding=0),
nn.ReLU(),
nn.Conv2d(32, 64, kernel_size=4, stride=2, padding=0),
nn.ReLU(),
nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=0),
nn.ReLU(),
nn.Flatten(),
)
# Compute shape by doing one forward pass
with th.no_grad():
n_flatten = self.cnn(th.zeros(1, *input_shape)).shape[1]
self.linear = nn.Sequential(
nn.Linear(n_flatten, 512),
nn.ReLU(),
nn.Linear(512, output_dim)
)
def forward(self, observations: th.Tensor) -> th.Tensor:
return self.linear(self.cnn(observations))
class ActionShaping(gym.ActionWrapper):
"""
The default MineRL action space is the following dict:
Dict(attack:Discrete(2),
back:Discrete(2),
camera:Box(low=-180.0, high=180.0, shape=(2,)),
craft:Enum(crafting_table,none,planks,stick,torch),
equip:Enum(air,iron_axe,iron_pickaxe,none,stone_axe,stone_pickaxe,wooden_axe,wooden_pickaxe),
forward:Discrete(2),
jump:Discrete(2),
left:Discrete(2),
nearbyCraft:Enum(furnace,iron_axe,iron_pickaxe,none,stone_axe,stone_pickaxe,wooden_axe,wooden_pickaxe),
nearbySmelt:Enum(coal,iron_ingot,none),
place:Enum(cobblestone,crafting_table,dirt,furnace,none,stone,torch),
right:Discrete(2),
sneak:Discrete(2),
sprint:Discrete(2))
It can be viewed as:
- buttons, like attack, back, forward, sprint that are either pressed or not.
- mouse, i.e. the continuous camera action in degrees. The two values are pitch (up/down), where up is
negative, down is positive, and yaw (left/right), where left is negative, right is positive.
- craft/equip/place actions for items specified above.
So an example action could be sprint + forward + jump + attack + turn camera, all in one action.
This wrapper makes the action space much smaller by selecting a few common actions and making the camera actions
discrete. You can change these actions by changing self._actions below. That should just work with the RL agent,
but would require some further tinkering below with the BC one.
"""
def __init__(self, env, camera_angle=10, always_attack=False):
super().__init__(env)
self.camera_angle = camera_angle
self.always_attack = always_attack
self._actions = [
[('attack', 1)],
[('forward', 1)],
# [('back', 1)],
# [('left', 1)],
# [('right', 1)],
# [('jump', 1)],
# [('forward', 1), ('attack', 1)],
# [('craft', 'planks')],
[('forward', 1), ('jump', 1)],
[('camera', [-self.camera_angle, 0])],
[('camera', [self.camera_angle, 0])],
[('camera', [0, self.camera_angle])],
[('camera', [0, -self.camera_angle])],
]
self.actions = []
for actions in self._actions:
act = self.env.action_space.noop()
for a, v in actions:
act[a] = v
if self.always_attack:
act['attack'] = 1
self.actions.append(act)
self.action_space = gym.spaces.Discrete(len(self.actions))
def action(self, action):
return self.actions[action]
def dataset_action_batch_to_actions(dataset_actions, camera_margin=5):
"""
Turn a batch of actions from dataset (`batch_iter`) to a numpy
array that corresponds to batch of actions of ActionShaping wrapper (_actions).
Camera margin sets the threshold what is considered "moving camera".
Note: Hardcoded to work for actions in ActionShaping._actions, with "intuitive"
ordering of actions.
If you change ActionShaping._actions, remember to change this!
Array elements are integers corresponding to actions, or "-1"
for actions that did not have any corresponding discrete match.
"""
# There are dummy dimensions of shape one
camera_actions = dataset_actions["camera"].squeeze()
attack_actions = dataset_actions["attack"].squeeze()
forward_actions = dataset_actions["forward"].squeeze()
jump_actions = dataset_actions["jump"].squeeze()
batch_size = len(camera_actions)
actions = np.zeros((batch_size,), dtype=np.int)
for i in range(len(camera_actions)):
# Moving camera is most important (horizontal first)
if camera_actions[i][0] < -camera_margin:
actions[i] = 3
elif camera_actions[i][0] > camera_margin:
actions[i] = 4
elif camera_actions[i][1] > camera_margin:
actions[i] = 5
elif camera_actions[i][1] < -camera_margin:
actions[i] = 6
elif forward_actions[i] == 1:
if jump_actions[i] == 1:
actions[i] = 2
else:
actions[i] = 1
elif attack_actions[i] == 1:
actions[i] = 0
else:
# No reasonable mapping (would be no-op)
actions[i] = -1
return actions
def train():
data = minerl.data.make("MineRLTreechop-v0", data_dir=DATA_DIR, num_workers=4)
# We know ActionShaping has seven discrete actions, so we create
# a network to map images to seven values (logits), which represent
# likelihoods of selecting those actions
network = NatureCNN((3, 64, 64), 7).cuda()
optimizer = th.optim.Adam(network.parameters(), lr=LEARNING_RATE)
loss_function = nn.CrossEntropyLoss()
iter_count = 0
losses = []
for dataset_obs, dataset_actions, _, _, _ in tqdm(data.batch_iter(num_epochs=EPOCHS, batch_size=32, seq_len=1)):
# We only use pov observations (also remove dummy dimensions)
obs = dataset_obs["pov"].squeeze().astype(np.float32)
# Transpose observations to be channel-first (BCHW instead of BHWC)
obs = obs.transpose(0, 3, 1, 2)
# Normalize observations
obs /= 255.0
# Actions need bit more work
actions = dataset_action_batch_to_actions(dataset_actions)
# Remove samples that had no corresponding action
mask = actions != -1
obs = obs[mask]
actions = actions[mask]
# Obtain logits of each action
logits = network(th.from_numpy(obs).float().cuda())
# Minimize cross-entropy with target labels.
# We could also compute the probability of demonstration actions and
# maximize them.
loss = loss_function(logits, th.from_numpy(actions).long().cuda())
# Standard PyTorch update
optimizer.zero_grad()
loss.backward()
optimizer.step()
iter_count += 1
losses.append(loss.item())
if (iter_count % 1000) == 0:
mean_loss = sum(losses) / len(losses)
tqdm.write("Iteration {}. Loss {:<10.3f}".format(iter_count, mean_loss))
losses.clear()
th.save(network.state_dict(), TRAIN_MODEL_NAME)
del data
def str_to_act(env, actions):
"""
Simplifies specifying actions for the scripted part of the agent.
Some examples for a string with a single action:
'craft:planks'
'camera:[10,0]'
'attack'
'jump'
''
There should be no spaces in single actions, as we use spaces to separate actions with multiple "buttons" pressed:
'attack sprint forward'
'forward camera:[0,10]'
:param env: base MineRL environment.
:param actions: string of actions.
:return: dict action, compatible with the base MineRL environment.
"""
act = env.action_space.noop()
for action in actions.split():
if ":" in action:
k, v = action.split(':')
if k == 'camera':
act[k] = eval(v)
else:
act[k] = v
else:
act[action] = 1
return act
def get_action_sequence():
"""
Specify the action sequence for the scripted part of the agent.
"""
# make planks, sticks, crafting table and wooden pickaxe:
action_sequence = []
action_sequence += [''] * 100
action_sequence += ['craft:planks'] * 4
action_sequence += ['craft:stick'] * 2
action_sequence += ['craft:crafting_table']
action_sequence += ['camera:[10,0]'] * 18
action_sequence += ['attack'] * 20
action_sequence += [''] * 10
action_sequence += ['jump']
action_sequence += [''] * 5
action_sequence += ['place:crafting_table']
action_sequence += [''] * 10
# bug: looking straight down at a crafting table doesn't let you craft. So we look up a bit before crafting.
action_sequence += ['camera:[-1,0]']
action_sequence += ['nearbyCraft:wooden_pickaxe']
action_sequence += ['camera:[1,0]']
action_sequence += [''] * 10
action_sequence += ['equip:wooden_pickaxe']
action_sequence += [''] * 10
# dig down:
action_sequence += ['attack'] * 600
action_sequence += [''] * 10
return action_sequence
def test():
network = NatureCNN((3, 64, 64), 7).cuda()
network.load_state_dict(th.load(TEST_MODEL_NAME))
env = gym.make('MineRLObtainDiamond-v0')
# optional interactive mode, where you can connect to your agent and play together (see link for details):
# https://minerl.io/docs/tutorials/minerl_tools.html#interactive-mode-minerl-interactor
# env.make_interactive(port=6666, realtime=True)
env = ActionShaping(env, always_attack=True)
env1 = env.unwrapped
num_actions = env.action_space.n
action_list = np.arange(num_actions)
action_sequence = get_action_sequence()
for episode in range(TEST_EPISODES):
obs = env.reset()
done = False
total_reward = 0
steps = 0
# BC part to get some logs:
for i in range(TREECHOP_STEPS):
# Process the action:
# - Add/remove batch dimensions
# - Transpose image (needs to be channels-last)
# - Normalize image
obs = th.from_numpy(obs['pov'].transpose(2, 0, 1)[None].astype(np.float32) / 255).cuda()
# Turn logits into probabilities
probabilities = th.softmax(network(obs), dim=1)[0]
# Into numpy
probabilities = probabilities.detach().cpu().numpy()
# Sample action according to the probabilities
action = np.random.choice(action_list, p=probabilities)
obs, reward, done, info = env.step(action)
total_reward += reward
steps += 1
if done:
break
# scripted part to use the logs:
if not done:
for i, action in enumerate(action_sequence[:MAX_TEST_EPISODE_LEN - TREECHOP_STEPS]):
obs, reward, done, _ = env1.step(str_to_act(env1, action))
total_reward += reward
steps += 1
if done:
break
print(f'Episode #{episode + 1} reward: {total_reward}\t\t episode length: {steps}')
env.close()
def main():
# train()
test()
if __name__ == '__main__':
main()
| """
Behavioural Cloning agent that trains on MineRLTreechop data. It is then evaluated on MineRLObtainDiamond by running it
for a certain number of steps and then switching to the scripted part that crafts a wooden_pickaxe and digs down to get
some cobblestone.
With default parameters it trains in 5-10 mins on a machine with a GeForce RTX 2080 Ti GPU.
It uses less than 8GB RAM and achieves an average reward of 8.6.
"""
from tqdm import tqdm
import numpy as np
import torch as th
from torch import nn
import gym
import minerl
# Parameters:
DATA_DIR = "data" # path to where MineRL dataset resides (should contain "MineRLTreechop-v0" directory).
EPOCHS = 3 # How many times we train over the dataset.
LEARNING_RATE = 0.0001 # Learning rate for the neural network.
TRAIN_MODEL_NAME = 'another_potato.pth' # name to use when saving the trained agent.
TEST_MODEL_NAME = 'another_potato.pth' # name to use when loading the trained agent.
TEST_EPISODES = 10 # number of episodes to test the agent for.
MAX_TEST_EPISODE_LEN = 18000 # 18k is the default for MineRLObtainDiamond.
TREECHOP_STEPS = 2000 # number of steps to run BC lumberjack for in evaluations.
class NatureCNN(nn.Module):
"""
CNN from DQN nature paper:
Mnih, Volodymyr, et al.
"Human-level control through deep reinforcement learning."
Nature 518.7540 (2015): 529-533.
:param input_shape: A three-item tuple telling image dimensions in (C, H, W)
:param output_dim: Dimensionality of the output vector
"""
def __init__(self, input_shape, output_dim):
super().__init__()
n_input_channels = input_shape[0]
self.cnn = nn.Sequential(
nn.Conv2d(n_input_channels, 32, kernel_size=8, stride=4, padding=0),
nn.ReLU(),
nn.Conv2d(32, 64, kernel_size=4, stride=2, padding=0),
nn.ReLU(),
nn.Conv2d(64, 64, kernel_size=3, stride=1, padding=0),
nn.ReLU(),
nn.Flatten(),
)
# Compute shape by doing one forward pass
with th.no_grad():
n_flatten = self.cnn(th.zeros(1, *input_shape)).shape[1]
self.linear = nn.Sequential(
nn.Linear(n_flatten, 512),
nn.ReLU(),
nn.Linear(512, output_dim)
)
def forward(self, observations: th.Tensor) -> th.Tensor:
return self.linear(self.cnn(observations))
class ActionShaping(gym.ActionWrapper):
"""
The default MineRL action space is the following dict:
Dict(attack:Discrete(2),
back:Discrete(2),
camera:Box(low=-180.0, high=180.0, shape=(2,)),
craft:Enum(crafting_table,none,planks,stick,torch),
equip:Enum(air,iron_axe,iron_pickaxe,none,stone_axe,stone_pickaxe,wooden_axe,wooden_pickaxe),
forward:Discrete(2),
jump:Discrete(2),
left:Discrete(2),
nearbyCraft:Enum(furnace,iron_axe,iron_pickaxe,none,stone_axe,stone_pickaxe,wooden_axe,wooden_pickaxe),
nearbySmelt:Enum(coal,iron_ingot,none),
place:Enum(cobblestone,crafting_table,dirt,furnace,none,stone,torch),
right:Discrete(2),
sneak:Discrete(2),
sprint:Discrete(2))
It can be viewed as:
- buttons, like attack, back, forward, sprint that are either pressed or not.
- mouse, i.e. the continuous camera action in degrees. The two values are pitch (up/down), where up is
negative, down is positive, and yaw (left/right), where left is negative, right is positive.
- craft/equip/place actions for items specified above.
So an example action could be sprint + forward + jump + attack + turn camera, all in one action.
This wrapper makes the action space much smaller by selecting a few common actions and making the camera actions
discrete. You can change these actions by changing self._actions below. That should just work with the RL agent,
but would require some further tinkering below with the BC one.
"""
def __init__(self, env, camera_angle=10, always_attack=False):
super().__init__(env)
self.camera_angle = camera_angle
self.always_attack = always_attack
self._actions = [
[('attack', 1)],
[('forward', 1)],
# [('back', 1)],
# [('left', 1)],
# [('right', 1)],
# [('jump', 1)],
# [('forward', 1), ('attack', 1)],
# [('craft', 'planks')],
[('forward', 1), ('jump', 1)],
[('camera', [-self.camera_angle, 0])],
[('camera', [self.camera_angle, 0])],
[('camera', [0, self.camera_angle])],
[('camera', [0, -self.camera_angle])],
]
self.actions = []
for actions in self._actions:
act = self.env.action_space.noop()
for a, v in actions:
act[a] = v
if self.always_attack:
act['attack'] = 1
self.actions.append(act)
self.action_space = gym.spaces.Discrete(len(self.actions))
def action(self, action):
return self.actions[action]
def dataset_action_batch_to_actions(dataset_actions, camera_margin=5):
"""
Turn a batch of actions from dataset (`batch_iter`) to a numpy
array that corresponds to batch of actions of ActionShaping wrapper (_actions).
Camera margin sets the threshold what is considered "moving camera".
Note: Hardcoded to work for actions in ActionShaping._actions, with "intuitive"
ordering of actions.
If you change ActionShaping._actions, remember to change this!
Array elements are integers corresponding to actions, or "-1"
for actions that did not have any corresponding discrete match.
"""
# There are dummy dimensions of shape one
camera_actions = dataset_actions["camera"].squeeze()
attack_actions = dataset_actions["attack"].squeeze()
forward_actions = dataset_actions["forward"].squeeze()
jump_actions = dataset_actions["jump"].squeeze()
batch_size = len(camera_actions)
actions = np.zeros((batch_size,), dtype=np.int)
for i in range(len(camera_actions)):
# Moving camera is most important (horizontal first)
if camera_actions[i][0] < -camera_margin:
actions[i] = 3
elif camera_actions[i][0] > camera_margin:
actions[i] = 4
elif camera_actions[i][1] > camera_margin:
actions[i] = 5
elif camera_actions[i][1] < -camera_margin:
actions[i] = 6
elif forward_actions[i] == 1:
if jump_actions[i] == 1:
actions[i] = 2
else:
actions[i] = 1
elif attack_actions[i] == 1:
actions[i] = 0
else:
# No reasonable mapping (would be no-op)
actions[i] = -1
return actions
def train():
data = minerl.data.make("MineRLTreechop-v0", data_dir=DATA_DIR, num_workers=4)
# We know ActionShaping has seven discrete actions, so we create
# a network to map images to seven values (logits), which represent
# likelihoods of selecting those actions
network = NatureCNN((3, 64, 64), 7).cuda()
optimizer = th.optim.Adam(network.parameters(), lr=LEARNING_RATE)
loss_function = nn.CrossEntropyLoss()
iter_count = 0
losses = []
for dataset_obs, dataset_actions, _, _, _ in tqdm(data.batch_iter(num_epochs=EPOCHS, batch_size=32, seq_len=1)):
# We only use pov observations (also remove dummy dimensions)
obs = dataset_obs["pov"].squeeze().astype(np.float32)
# Transpose observations to be channel-first (BCHW instead of BHWC)
obs = obs.transpose(0, 3, 1, 2)
# Normalize observations
obs /= 255.0
# Actions need bit more work
actions = dataset_action_batch_to_actions(dataset_actions)
# Remove samples that had no corresponding action
mask = actions != -1
obs = obs[mask]
actions = actions[mask]
# Obtain logits of each action
logits = network(th.from_numpy(obs).float().cuda())
# Minimize cross-entropy with target labels.
# We could also compute the probability of demonstration actions and
# maximize them.
loss = loss_function(logits, th.from_numpy(actions).long().cuda())
# Standard PyTorch update
optimizer.zero_grad()
loss.backward()
optimizer.step()
iter_count += 1
losses.append(loss.item())
if (iter_count % 1000) == 0:
mean_loss = sum(losses) / len(losses)
tqdm.write("Iteration {}. Loss {:<10.3f}".format(iter_count, mean_loss))
losses.clear()
th.save(network.state_dict(), TRAIN_MODEL_NAME)
del data
def str_to_act(env, actions):
"""
Simplifies specifying actions for the scripted part of the agent.
Some examples for a string with a single action:
'craft:planks'
'camera:[10,0]'
'attack'
'jump'
''
There should be no spaces in single actions, as we use spaces to separate actions with multiple "buttons" pressed:
'attack sprint forward'
'forward camera:[0,10]'
:param env: base MineRL environment.
:param actions: string of actions.
:return: dict action, compatible with the base MineRL environment.
"""
act = env.action_space.noop()
for action in actions.split():
if ":" in action:
k, v = action.split(':')
if k == 'camera':
act[k] = eval(v)
else:
act[k] = v
else:
act[action] = 1
return act
def get_action_sequence():
"""
Specify the action sequence for the scripted part of the agent.
"""
# make planks, sticks, crafting table and wooden pickaxe:
action_sequence = []
action_sequence += [''] * 100
action_sequence += ['craft:planks'] * 4
action_sequence += ['craft:stick'] * 2
action_sequence += ['craft:crafting_table']
action_sequence += ['camera:[10,0]'] * 18
action_sequence += ['attack'] * 20
action_sequence += [''] * 10
action_sequence += ['jump']
action_sequence += [''] * 5
action_sequence += ['place:crafting_table']
action_sequence += [''] * 10
# bug: looking straight down at a crafting table doesn't let you craft. So we look up a bit before crafting.
action_sequence += ['camera:[-1,0]']
action_sequence += ['nearbyCraft:wooden_pickaxe']
action_sequence += ['camera:[1,0]']
action_sequence += [''] * 10
action_sequence += ['equip:wooden_pickaxe']
action_sequence += [''] * 10
# dig down:
action_sequence += ['attack'] * 600
action_sequence += [''] * 10
return action_sequence
def test():
network = NatureCNN((3, 64, 64), 7).cuda()
network.load_state_dict(th.load(TEST_MODEL_NAME))
env = gym.make('MineRLObtainDiamond-v0')
# optional interactive mode, where you can connect to your agent and play together (see link for details):
# https://minerl.io/docs/tutorials/minerl_tools.html#interactive-mode-minerl-interactor
# env.make_interactive(port=6666, realtime=True)
env = ActionShaping(env, always_attack=True)
env1 = env.unwrapped
num_actions = env.action_space.n
action_list = np.arange(num_actions)
action_sequence = get_action_sequence()
for episode in range(TEST_EPISODES):
obs = env.reset()
done = False
total_reward = 0
steps = 0
# BC part to get some logs:
for i in range(TREECHOP_STEPS):
# Process the action:
# - Add/remove batch dimensions
# - Transpose image (needs to be channels-last)
# - Normalize image
obs = th.from_numpy(obs['pov'].transpose(2, 0, 1)[None].astype(np.float32) / 255).cuda()
# Turn logits into probabilities
probabilities = th.softmax(network(obs), dim=1)[0]
# Into numpy
probabilities = probabilities.detach().cpu().numpy()
# Sample action according to the probabilities
action = np.random.choice(action_list, p=probabilities)
obs, reward, done, info = env.step(action)
total_reward += reward
steps += 1
if done:
break
# scripted part to use the logs:
if not done:
for i, action in enumerate(action_sequence[:MAX_TEST_EPISODE_LEN - TREECHOP_STEPS]):
obs, reward, done, _ = env1.step(str_to_act(env1, action))
total_reward += reward
steps += 1
if done:
break
print(f'Episode #{episode + 1} reward: {total_reward}\t\t episode length: {steps}')
env.close()
def main():
# train()
test()
if __name__ == '__main__':
main() | en | 0.814114 | Behavioural Cloning agent that trains on MineRLTreechop data. It is then evaluated on MineRLObtainDiamond by running it for a certain number of steps and then switching to the scripted part that crafts a wooden_pickaxe and digs down to get some cobblestone. With default parameters it trains in 5-10 mins on a machine with a GeForce RTX 2080 Ti GPU. It uses less than 8GB RAM and achieves an average reward of 8.6. # Parameters: # path to where MineRL dataset resides (should contain "MineRLTreechop-v0" directory). # How many times we train over the dataset. # Learning rate for the neural network. # name to use when saving the trained agent. # name to use when loading the trained agent. # number of episodes to test the agent for. # 18k is the default for MineRLObtainDiamond. # number of steps to run BC lumberjack for in evaluations. CNN from DQN nature paper: Mnih, Volodymyr, et al. "Human-level control through deep reinforcement learning." Nature 518.7540 (2015): 529-533. :param input_shape: A three-item tuple telling image dimensions in (C, H, W) :param output_dim: Dimensionality of the output vector # Compute shape by doing one forward pass The default MineRL action space is the following dict: Dict(attack:Discrete(2), back:Discrete(2), camera:Box(low=-180.0, high=180.0, shape=(2,)), craft:Enum(crafting_table,none,planks,stick,torch), equip:Enum(air,iron_axe,iron_pickaxe,none,stone_axe,stone_pickaxe,wooden_axe,wooden_pickaxe), forward:Discrete(2), jump:Discrete(2), left:Discrete(2), nearbyCraft:Enum(furnace,iron_axe,iron_pickaxe,none,stone_axe,stone_pickaxe,wooden_axe,wooden_pickaxe), nearbySmelt:Enum(coal,iron_ingot,none), place:Enum(cobblestone,crafting_table,dirt,furnace,none,stone,torch), right:Discrete(2), sneak:Discrete(2), sprint:Discrete(2)) It can be viewed as: - buttons, like attack, back, forward, sprint that are either pressed or not. - mouse, i.e. the continuous camera action in degrees. The two values are pitch (up/down), where up is negative, down is positive, and yaw (left/right), where left is negative, right is positive. - craft/equip/place actions for items specified above. So an example action could be sprint + forward + jump + attack + turn camera, all in one action. This wrapper makes the action space much smaller by selecting a few common actions and making the camera actions discrete. You can change these actions by changing self._actions below. That should just work with the RL agent, but would require some further tinkering below with the BC one. # [('back', 1)], # [('left', 1)], # [('right', 1)], # [('jump', 1)], # [('forward', 1), ('attack', 1)], # [('craft', 'planks')], Turn a batch of actions from dataset (`batch_iter`) to a numpy array that corresponds to batch of actions of ActionShaping wrapper (_actions). Camera margin sets the threshold what is considered "moving camera". Note: Hardcoded to work for actions in ActionShaping._actions, with "intuitive" ordering of actions. If you change ActionShaping._actions, remember to change this! Array elements are integers corresponding to actions, or "-1" for actions that did not have any corresponding discrete match. # There are dummy dimensions of shape one # Moving camera is most important (horizontal first) # No reasonable mapping (would be no-op) # We know ActionShaping has seven discrete actions, so we create # a network to map images to seven values (logits), which represent # likelihoods of selecting those actions # We only use pov observations (also remove dummy dimensions) # Transpose observations to be channel-first (BCHW instead of BHWC) # Normalize observations # Actions need bit more work # Remove samples that had no corresponding action # Obtain logits of each action # Minimize cross-entropy with target labels. # We could also compute the probability of demonstration actions and # maximize them. # Standard PyTorch update Simplifies specifying actions for the scripted part of the agent. Some examples for a string with a single action: 'craft:planks' 'camera:[10,0]' 'attack' 'jump' '' There should be no spaces in single actions, as we use spaces to separate actions with multiple "buttons" pressed: 'attack sprint forward' 'forward camera:[0,10]' :param env: base MineRL environment. :param actions: string of actions. :return: dict action, compatible with the base MineRL environment. Specify the action sequence for the scripted part of the agent. # make planks, sticks, crafting table and wooden pickaxe: # bug: looking straight down at a crafting table doesn't let you craft. So we look up a bit before crafting. # dig down: # optional interactive mode, where you can connect to your agent and play together (see link for details): # https://minerl.io/docs/tutorials/minerl_tools.html#interactive-mode-minerl-interactor # env.make_interactive(port=6666, realtime=True) # BC part to get some logs: # Process the action: # - Add/remove batch dimensions # - Transpose image (needs to be channels-last) # - Normalize image # Turn logits into probabilities # Into numpy # Sample action according to the probabilities # scripted part to use the logs: #{episode + 1} reward: {total_reward}\t\t episode length: {steps}') # train() | 2.575444 | 3 |
download_alexnet_vlcs.py | belaalb/G2DM | 43 | 6631914 |
from google_drive_downloader import GoogleDriveDownloader as gdd
gdd.download_file_from_google_drive(file_id='1wUJTH1Joq2KAgrUDeKJghP1Wf7Q9w4z-',
dest_path='./alexnet_caffe.pth.tar',
unzip=False,
showsize=True,
overwrite=False)
## Download vlcs
from google_drive_downloader import GoogleDriveDownloader as gdd
gdd.download_file_from_google_drive(file_id='13qTR2jJqCQgfdbI3E_a6dLU7rEWpF37T',
dest_path='./data/vlcs/vlcs.tar.gz',
unzip=False,
showsize=True,
overwrite=False)
## unzip
import tarfile
fname= './data/vlcs/vlcs.tar.gz'
tar = tarfile.open(fname, "r:gz")
tar.extractall()
tar.close()
## Move to ./data/vlcs/prepared_data
import shutil, glob
for filePath in glob.glob('./VLCS' + '/*'):
shutil.move(filePath, './data/vlcs/prepared_data/')
|
from google_drive_downloader import GoogleDriveDownloader as gdd
gdd.download_file_from_google_drive(file_id='1wUJTH1Joq2KAgrUDeKJghP1Wf7Q9w4z-',
dest_path='./alexnet_caffe.pth.tar',
unzip=False,
showsize=True,
overwrite=False)
## Download vlcs
from google_drive_downloader import GoogleDriveDownloader as gdd
gdd.download_file_from_google_drive(file_id='13qTR2jJqCQgfdbI3E_a6dLU7rEWpF37T',
dest_path='./data/vlcs/vlcs.tar.gz',
unzip=False,
showsize=True,
overwrite=False)
## unzip
import tarfile
fname= './data/vlcs/vlcs.tar.gz'
tar = tarfile.open(fname, "r:gz")
tar.extractall()
tar.close()
## Move to ./data/vlcs/prepared_data
import shutil, glob
for filePath in glob.glob('./VLCS' + '/*'):
shutil.move(filePath, './data/vlcs/prepared_data/')
| en | 0.100935 | ## Download vlcs ## unzip ## Move to ./data/vlcs/prepared_data | 2.421149 | 2 |
beaker/services/image.py | allenai/beaker-py | 0 | 6631915 | <reponame>allenai/beaker-py<filename>beaker/services/image.py
from typing import TYPE_CHECKING, Dict, Optional, Union
from docker.models.images import Image as DockerImage
from ..data_model import *
from ..exceptions import *
from .service_client import ServiceClient
if TYPE_CHECKING:
from rich.progress import TaskID
class ImageClient(ServiceClient):
"""
Accessed via :data:`Beaker.image <beaker.Beaker.image>`.
"""
def get(self, image: str) -> Image:
"""
Get info about an image on Beaker.
:param image: The Beaker image ID or name.
:raises ImageNotFound: If the image can't be found on Beaker.
:raises HTTPError: Any other HTTP exception that can occur.
"""
def _get(id: str) -> Image:
return Image.from_json(
self.request(
f"images/{self.url_quote(id)}",
exceptions_for_status={404: ImageNotFound(self._not_found_err_msg(id))},
).json()
)
try:
# Could be an ID or full name, so we try that first.
return _get(image)
except ImageNotFound:
if "/" not in image:
# Now try with adding the account name.
try:
return _get(f"{self.beaker.account.name}/{image}")
except ImageNotFound:
pass
raise
def create(
self,
name: str,
image_tag: str,
workspace: Optional[str] = None,
description: Optional[str] = None,
quiet: bool = False,
commit: bool = True,
) -> Image:
"""
Upload a Docker image to Beaker.
:param name: The name to assign to the image on Beaker.
:param image_tag: The tag of the local image you're uploading.
:param workspace: The workspace to upload the image to. If not specified,
:data:`Beaker.config.default_workspace <beaker.Config.default_workspace>` is used.
:param description: Text description of the image.
:param quiet: If ``True``, progress won't be displayed.
:param commit: Whether to commit the image after successful upload.
:raises ValueError: If the image name is invalid.
:raises ImageConflict: If an image with the given name already exists.
:raises WorkspaceNotSet: If neither ``workspace`` nor
:data:`Beaker.config.default_workspace <beaker.Config.default_workspace>` are set.
:raises BeakerError: Any other :class:`~beaker.exceptions.BeakerError` type that can occur.
:raises HTTPError: Any other HTTP exception that can occur.
"""
self.validate_beaker_name(name)
workspace: Workspace = self.resolve_workspace(workspace)
# Get local Docker image object.
image = self.docker.images.get(image_tag)
# Create new image on Beaker.
image_id = self.request(
"images",
method="POST",
data=ImageSpec(
workspace=workspace.id,
image_id=image.id,
image_tag=image_tag,
description=description,
),
query={"name": name},
exceptions_for_status={409: ImageConflict(name)},
).json()["id"]
# Get the repo data for the Beaker image.
repo = ImageRepo.from_json(
self.request(f"images/{image_id}/repository", query={"upload": True}).json()
)
# Tag the local image with the new tag for the Beaker image.
image.tag(repo.image_tag)
# Push the image to Beaker.
from ..progress import get_image_upload_progress
with get_image_upload_progress(quiet) as progress:
layer_id_to_task: Dict[str, "TaskID"] = {}
for layer_state_data in self.docker.api.push(
repo.image_tag,
stream=True,
decode=True,
auth_config={
"username": repo.auth.user,
"password": repo.auth.password,
"server_address": repo.auth.server_address,
},
):
if "id" not in layer_state_data or "status" not in layer_state_data:
continue
layer_state = DockerLayerUploadState.from_json(layer_state_data)
# Get progress task ID for layer, initializing if it doesn't already exist.
task_id: "TaskID"
if layer_state.id not in layer_id_to_task:
task_id = progress.add_task(layer_state.id, start=True, total=1)
layer_id_to_task[layer_state.id] = task_id
else:
task_id = layer_id_to_task[layer_state.id]
# Update task progress description.
progress.update(
task_id, description=f"{layer_state.id}: {layer_state.status.title()}"
)
# Update task progress total and completed.
if (
layer_state.progress_detail.total is not None
and layer_state.progress_detail.current is not None
):
progress.update(
task_id,
total=layer_state.progress_detail.total,
completed=layer_state.progress_detail.current,
)
elif layer_state.status in {
DockerLayerUploadStatus.preparing,
DockerLayerUploadStatus.waiting,
}:
progress.update(
task_id,
total=1,
completed=0,
)
elif layer_state.status in {
DockerLayerUploadStatus.pushed,
DockerLayerUploadStatus.already_exists,
}:
progress.update(
task_id,
total=1,
completed=1,
)
if commit:
return self.commit(image_id)
else:
return self.get(image_id)
def commit(self, image: Union[str, Image]) -> Image:
"""
Commit an image.
:param image: The Beaker image ID, name, or object.
:raises ImageNotFound: If the image can't be found on Beaker.
:raises BeakerError: Any other :class:`~beaker.exceptions.BeakerError` type that can occur.
:raises HTTPError: Any other HTTP exception that can occur.
"""
image_id = self.resolve_image(image).id
return Image.from_json(
self.request(
f"images/{image_id}",
method="PATCH",
data=ImagePatch(commit=True),
exceptions_for_status={404: ImageNotFound(self._not_found_err_msg(image))},
).json()
)
def delete(self, image: Union[str, Image]):
"""
Delete an image on Beaker.
:param image: The Beaker image ID, name, or object.
:raises ImageNotFound: If the image can't be found on Beaker.
:raises BeakerError: Any other :class:`~beaker.exceptions.BeakerError` type that can occur.
:raises HTTPError: Any other HTTP exception that can occur.
"""
image_id = self.resolve_image(image).id
self.request(
f"images/{self.url_quote(image_id)}",
method="DELETE",
exceptions_for_status={404: ImageNotFound(self._not_found_err_msg(image))},
)
def rename(self, image: Union[str, Image], name: str) -> Image:
"""
Rename an image on Beaker.
:param image: The Beaker image ID, name, or object.
:param name: The new name for the image.
:raises ImageNotFound: If the image can't be found on Beaker.
:raises ValueError: If the image name is invalid.
:raises ImageConflict: If an image with the given name already exists.
:raises BeakerError: Any other :class:`~beaker.exceptions.BeakerError` type that can occur.
:raises HTTPError: Any other HTTP exception that can occur.
"""
self.validate_beaker_name(name)
image_id = self.resolve_image(image).id
return Image.from_json(
self.request(
f"images/{image_id}",
method="PATCH",
data=ImagePatch(name=name),
exceptions_for_status={404: ImageNotFound(self._not_found_err_msg(image))},
).json()
)
def pull(self, image: Union[str, Image], quiet: bool = False) -> DockerImage:
"""
Pull an image from Beaker.
.. important::
This method returns a Docker :class:`~docker.models.images.Image`, not
a Beaker :class:`~beaker.data_model.image.Image`.
:param image: The Beaker image ID, name, or object.
:param quiet: If ``True``, progress won't be displayed.
:raises ImageNotFound: If the image can't be found on Beaker.
:raises BeakerError: Any other :class:`~beaker.exceptions.BeakerError` type that can occur.
:raises HTTPError: Any other HTTP exception that can occur.
"""
image_id = self.resolve_image(image).id
repo = ImageRepo.from_json(self.request(f"images/{image_id}/repository").json())
from ..progress import get_image_download_progress
with get_image_download_progress(quiet) as progress:
layer_id_to_task: Dict[str, "TaskID"] = {}
for layer_state_data in self.docker.api.pull(
repo.image_tag,
stream=True,
decode=True,
auth_config={
"username": repo.auth.user,
"password": <PASSWORD>,
"server_address": repo.auth.server_address,
},
):
if "id" not in layer_state_data or "status" not in layer_state_data:
continue
if layer_state_data["status"].lower().startswith("pulling "):
continue
layer_state = DockerLayerDownloadState.from_json(layer_state_data)
# Get progress task ID for layer, initializing if it doesn't already exist.
task_id: "TaskID"
if layer_state.id not in layer_id_to_task:
task_id = progress.add_task(layer_state.id, start=True, total=1)
layer_id_to_task[layer_state.id] = task_id
else:
task_id = layer_id_to_task[layer_state.id]
# Update task progress description.
progress.update(
task_id, description=f"{layer_state.id}: {layer_state.status.title()}"
)
# Update task progress total and completed.
if (
layer_state.progress_detail.total is not None
and layer_state.progress_detail.current is not None
):
progress.update(
task_id,
total=layer_state.progress_detail.total,
completed=layer_state.progress_detail.current,
)
elif layer_state.status in {
DockerLayerDownloadStatus.waiting,
DockerLayerDownloadStatus.extracting,
DockerLayerDownloadStatus.verifying_checksum,
}:
progress.update(
task_id,
total=1,
completed=0,
)
elif layer_state.status in {
DockerLayerDownloadStatus.download_complete,
DockerLayerDownloadStatus.pull_complete,
DockerLayerDownloadStatus.already_exists,
}:
progress.update(
task_id,
total=1,
completed=1,
)
local_image = self.docker.images.get(repo.image_tag)
return local_image
def url(self, image: Union[str, Image]) -> str:
"""
Get the URL for an image.
:param image: The Beaker image ID, name, or object.
:raises ImageNotFound: If the image can't be found on Beaker.
"""
image_id = self.resolve_image(image).id
return f"{self.config.agent_address}/im/{self.url_quote(image_id)}/details"
def _not_found_err_msg(self, image: Union[str, Image]) -> str:
image = image if isinstance(image, str) else image.id
return (
f"'{image}': Make sure you're using a valid Beaker image ID or the "
f"*full* name of the image (with the account prefix, e.g. 'username/image_name')"
)
| from typing import TYPE_CHECKING, Dict, Optional, Union
from docker.models.images import Image as DockerImage
from ..data_model import *
from ..exceptions import *
from .service_client import ServiceClient
if TYPE_CHECKING:
from rich.progress import TaskID
class ImageClient(ServiceClient):
"""
Accessed via :data:`Beaker.image <beaker.Beaker.image>`.
"""
def get(self, image: str) -> Image:
"""
Get info about an image on Beaker.
:param image: The Beaker image ID or name.
:raises ImageNotFound: If the image can't be found on Beaker.
:raises HTTPError: Any other HTTP exception that can occur.
"""
def _get(id: str) -> Image:
return Image.from_json(
self.request(
f"images/{self.url_quote(id)}",
exceptions_for_status={404: ImageNotFound(self._not_found_err_msg(id))},
).json()
)
try:
# Could be an ID or full name, so we try that first.
return _get(image)
except ImageNotFound:
if "/" not in image:
# Now try with adding the account name.
try:
return _get(f"{self.beaker.account.name}/{image}")
except ImageNotFound:
pass
raise
def create(
self,
name: str,
image_tag: str,
workspace: Optional[str] = None,
description: Optional[str] = None,
quiet: bool = False,
commit: bool = True,
) -> Image:
"""
Upload a Docker image to Beaker.
:param name: The name to assign to the image on Beaker.
:param image_tag: The tag of the local image you're uploading.
:param workspace: The workspace to upload the image to. If not specified,
:data:`Beaker.config.default_workspace <beaker.Config.default_workspace>` is used.
:param description: Text description of the image.
:param quiet: If ``True``, progress won't be displayed.
:param commit: Whether to commit the image after successful upload.
:raises ValueError: If the image name is invalid.
:raises ImageConflict: If an image with the given name already exists.
:raises WorkspaceNotSet: If neither ``workspace`` nor
:data:`Beaker.config.default_workspace <beaker.Config.default_workspace>` are set.
:raises BeakerError: Any other :class:`~beaker.exceptions.BeakerError` type that can occur.
:raises HTTPError: Any other HTTP exception that can occur.
"""
self.validate_beaker_name(name)
workspace: Workspace = self.resolve_workspace(workspace)
# Get local Docker image object.
image = self.docker.images.get(image_tag)
# Create new image on Beaker.
image_id = self.request(
"images",
method="POST",
data=ImageSpec(
workspace=workspace.id,
image_id=image.id,
image_tag=image_tag,
description=description,
),
query={"name": name},
exceptions_for_status={409: ImageConflict(name)},
).json()["id"]
# Get the repo data for the Beaker image.
repo = ImageRepo.from_json(
self.request(f"images/{image_id}/repository", query={"upload": True}).json()
)
# Tag the local image with the new tag for the Beaker image.
image.tag(repo.image_tag)
# Push the image to Beaker.
from ..progress import get_image_upload_progress
with get_image_upload_progress(quiet) as progress:
layer_id_to_task: Dict[str, "TaskID"] = {}
for layer_state_data in self.docker.api.push(
repo.image_tag,
stream=True,
decode=True,
auth_config={
"username": repo.auth.user,
"password": repo.auth.password,
"server_address": repo.auth.server_address,
},
):
if "id" not in layer_state_data or "status" not in layer_state_data:
continue
layer_state = DockerLayerUploadState.from_json(layer_state_data)
# Get progress task ID for layer, initializing if it doesn't already exist.
task_id: "TaskID"
if layer_state.id not in layer_id_to_task:
task_id = progress.add_task(layer_state.id, start=True, total=1)
layer_id_to_task[layer_state.id] = task_id
else:
task_id = layer_id_to_task[layer_state.id]
# Update task progress description.
progress.update(
task_id, description=f"{layer_state.id}: {layer_state.status.title()}"
)
# Update task progress total and completed.
if (
layer_state.progress_detail.total is not None
and layer_state.progress_detail.current is not None
):
progress.update(
task_id,
total=layer_state.progress_detail.total,
completed=layer_state.progress_detail.current,
)
elif layer_state.status in {
DockerLayerUploadStatus.preparing,
DockerLayerUploadStatus.waiting,
}:
progress.update(
task_id,
total=1,
completed=0,
)
elif layer_state.status in {
DockerLayerUploadStatus.pushed,
DockerLayerUploadStatus.already_exists,
}:
progress.update(
task_id,
total=1,
completed=1,
)
if commit:
return self.commit(image_id)
else:
return self.get(image_id)
def commit(self, image: Union[str, Image]) -> Image:
"""
Commit an image.
:param image: The Beaker image ID, name, or object.
:raises ImageNotFound: If the image can't be found on Beaker.
:raises BeakerError: Any other :class:`~beaker.exceptions.BeakerError` type that can occur.
:raises HTTPError: Any other HTTP exception that can occur.
"""
image_id = self.resolve_image(image).id
return Image.from_json(
self.request(
f"images/{image_id}",
method="PATCH",
data=ImagePatch(commit=True),
exceptions_for_status={404: ImageNotFound(self._not_found_err_msg(image))},
).json()
)
def delete(self, image: Union[str, Image]):
"""
Delete an image on Beaker.
:param image: The Beaker image ID, name, or object.
:raises ImageNotFound: If the image can't be found on Beaker.
:raises BeakerError: Any other :class:`~beaker.exceptions.BeakerError` type that can occur.
:raises HTTPError: Any other HTTP exception that can occur.
"""
image_id = self.resolve_image(image).id
self.request(
f"images/{self.url_quote(image_id)}",
method="DELETE",
exceptions_for_status={404: ImageNotFound(self._not_found_err_msg(image))},
)
def rename(self, image: Union[str, Image], name: str) -> Image:
"""
Rename an image on Beaker.
:param image: The Beaker image ID, name, or object.
:param name: The new name for the image.
:raises ImageNotFound: If the image can't be found on Beaker.
:raises ValueError: If the image name is invalid.
:raises ImageConflict: If an image with the given name already exists.
:raises BeakerError: Any other :class:`~beaker.exceptions.BeakerError` type that can occur.
:raises HTTPError: Any other HTTP exception that can occur.
"""
self.validate_beaker_name(name)
image_id = self.resolve_image(image).id
return Image.from_json(
self.request(
f"images/{image_id}",
method="PATCH",
data=ImagePatch(name=name),
exceptions_for_status={404: ImageNotFound(self._not_found_err_msg(image))},
).json()
)
def pull(self, image: Union[str, Image], quiet: bool = False) -> DockerImage:
"""
Pull an image from Beaker.
.. important::
This method returns a Docker :class:`~docker.models.images.Image`, not
a Beaker :class:`~beaker.data_model.image.Image`.
:param image: The Beaker image ID, name, or object.
:param quiet: If ``True``, progress won't be displayed.
:raises ImageNotFound: If the image can't be found on Beaker.
:raises BeakerError: Any other :class:`~beaker.exceptions.BeakerError` type that can occur.
:raises HTTPError: Any other HTTP exception that can occur.
"""
image_id = self.resolve_image(image).id
repo = ImageRepo.from_json(self.request(f"images/{image_id}/repository").json())
from ..progress import get_image_download_progress
with get_image_download_progress(quiet) as progress:
layer_id_to_task: Dict[str, "TaskID"] = {}
for layer_state_data in self.docker.api.pull(
repo.image_tag,
stream=True,
decode=True,
auth_config={
"username": repo.auth.user,
"password": <PASSWORD>,
"server_address": repo.auth.server_address,
},
):
if "id" not in layer_state_data or "status" not in layer_state_data:
continue
if layer_state_data["status"].lower().startswith("pulling "):
continue
layer_state = DockerLayerDownloadState.from_json(layer_state_data)
# Get progress task ID for layer, initializing if it doesn't already exist.
task_id: "TaskID"
if layer_state.id not in layer_id_to_task:
task_id = progress.add_task(layer_state.id, start=True, total=1)
layer_id_to_task[layer_state.id] = task_id
else:
task_id = layer_id_to_task[layer_state.id]
# Update task progress description.
progress.update(
task_id, description=f"{layer_state.id}: {layer_state.status.title()}"
)
# Update task progress total and completed.
if (
layer_state.progress_detail.total is not None
and layer_state.progress_detail.current is not None
):
progress.update(
task_id,
total=layer_state.progress_detail.total,
completed=layer_state.progress_detail.current,
)
elif layer_state.status in {
DockerLayerDownloadStatus.waiting,
DockerLayerDownloadStatus.extracting,
DockerLayerDownloadStatus.verifying_checksum,
}:
progress.update(
task_id,
total=1,
completed=0,
)
elif layer_state.status in {
DockerLayerDownloadStatus.download_complete,
DockerLayerDownloadStatus.pull_complete,
DockerLayerDownloadStatus.already_exists,
}:
progress.update(
task_id,
total=1,
completed=1,
)
local_image = self.docker.images.get(repo.image_tag)
return local_image
def url(self, image: Union[str, Image]) -> str:
"""
Get the URL for an image.
:param image: The Beaker image ID, name, or object.
:raises ImageNotFound: If the image can't be found on Beaker.
"""
image_id = self.resolve_image(image).id
return f"{self.config.agent_address}/im/{self.url_quote(image_id)}/details"
def _not_found_err_msg(self, image: Union[str, Image]) -> str:
image = image if isinstance(image, str) else image.id
return (
f"'{image}': Make sure you're using a valid Beaker image ID or the "
f"*full* name of the image (with the account prefix, e.g. 'username/image_name')"
) | en | 0.672215 | Accessed via :data:`Beaker.image <beaker.Beaker.image>`. Get info about an image on Beaker. :param image: The Beaker image ID or name. :raises ImageNotFound: If the image can't be found on Beaker. :raises HTTPError: Any other HTTP exception that can occur. # Could be an ID or full name, so we try that first. # Now try with adding the account name. Upload a Docker image to Beaker. :param name: The name to assign to the image on Beaker. :param image_tag: The tag of the local image you're uploading. :param workspace: The workspace to upload the image to. If not specified, :data:`Beaker.config.default_workspace <beaker.Config.default_workspace>` is used. :param description: Text description of the image. :param quiet: If ``True``, progress won't be displayed. :param commit: Whether to commit the image after successful upload. :raises ValueError: If the image name is invalid. :raises ImageConflict: If an image with the given name already exists. :raises WorkspaceNotSet: If neither ``workspace`` nor :data:`Beaker.config.default_workspace <beaker.Config.default_workspace>` are set. :raises BeakerError: Any other :class:`~beaker.exceptions.BeakerError` type that can occur. :raises HTTPError: Any other HTTP exception that can occur. # Get local Docker image object. # Create new image on Beaker. # Get the repo data for the Beaker image. # Tag the local image with the new tag for the Beaker image. # Push the image to Beaker. # Get progress task ID for layer, initializing if it doesn't already exist. # Update task progress description. # Update task progress total and completed. Commit an image. :param image: The Beaker image ID, name, or object. :raises ImageNotFound: If the image can't be found on Beaker. :raises BeakerError: Any other :class:`~beaker.exceptions.BeakerError` type that can occur. :raises HTTPError: Any other HTTP exception that can occur. Delete an image on Beaker. :param image: The Beaker image ID, name, or object. :raises ImageNotFound: If the image can't be found on Beaker. :raises BeakerError: Any other :class:`~beaker.exceptions.BeakerError` type that can occur. :raises HTTPError: Any other HTTP exception that can occur. Rename an image on Beaker. :param image: The Beaker image ID, name, or object. :param name: The new name for the image. :raises ImageNotFound: If the image can't be found on Beaker. :raises ValueError: If the image name is invalid. :raises ImageConflict: If an image with the given name already exists. :raises BeakerError: Any other :class:`~beaker.exceptions.BeakerError` type that can occur. :raises HTTPError: Any other HTTP exception that can occur. Pull an image from Beaker. .. important:: This method returns a Docker :class:`~docker.models.images.Image`, not a Beaker :class:`~beaker.data_model.image.Image`. :param image: The Beaker image ID, name, or object. :param quiet: If ``True``, progress won't be displayed. :raises ImageNotFound: If the image can't be found on Beaker. :raises BeakerError: Any other :class:`~beaker.exceptions.BeakerError` type that can occur. :raises HTTPError: Any other HTTP exception that can occur. # Get progress task ID for layer, initializing if it doesn't already exist. # Update task progress description. # Update task progress total and completed. Get the URL for an image. :param image: The Beaker image ID, name, or object. :raises ImageNotFound: If the image can't be found on Beaker. | 2.431826 | 2 |
blogtrans/blogger/BloggerImporter.py | miaout17/blogtrans | 3 | 6631916 | <filename>blogtrans/blogger/BloggerImporter.py
import codecs
import xml.etree.ElementTree as ET
from datetime import datetime
from blogtrans.data import *
# Blogger Import is not implemented yet
class BloggerImporter :
def __init__(self, filename) :
self.filename = filename
def parse(self) :
blogdata = BlogData()
f = codecs.open(self.filename, "r")
xml_data = f.read()
f.close()
print(type(xml_data))
tree = ET.fromstring(xml_data)
for entry in tree.findall("{http://www.w3.org/2005/Atom}entry") :
pass
#print entry.tag
"""
self.author = ""
self.title = ""
self.date = datetime.today()
self.category = []
self.status = Article.PUBLISH
self.allow_comments = True
self.allow_pings = True
#self.convert_breaks = True
self.body = ""
self.extended_body = ""
self.excerpt = ""
self.comments = []
self.pings = []"""
return blogdata
| <filename>blogtrans/blogger/BloggerImporter.py
import codecs
import xml.etree.ElementTree as ET
from datetime import datetime
from blogtrans.data import *
# Blogger Import is not implemented yet
class BloggerImporter :
def __init__(self, filename) :
self.filename = filename
def parse(self) :
blogdata = BlogData()
f = codecs.open(self.filename, "r")
xml_data = f.read()
f.close()
print(type(xml_data))
tree = ET.fromstring(xml_data)
for entry in tree.findall("{http://www.w3.org/2005/Atom}entry") :
pass
#print entry.tag
"""
self.author = ""
self.title = ""
self.date = datetime.today()
self.category = []
self.status = Article.PUBLISH
self.allow_comments = True
self.allow_pings = True
#self.convert_breaks = True
self.body = ""
self.extended_body = ""
self.excerpt = ""
self.comments = []
self.pings = []"""
return blogdata
| en | 0.5282 | # Blogger Import is not implemented yet #print entry.tag self.author = "" self.title = "" self.date = datetime.today() self.category = [] self.status = Article.PUBLISH self.allow_comments = True self.allow_pings = True #self.convert_breaks = True self.body = "" self.extended_body = "" self.excerpt = "" self.comments = [] self.pings = [] | 2.886065 | 3 |
utils/feats2npy.py | roshansh-cmu/espnet | 0 | 6631917 | #!/usr/bin/env python
# coding: utf-8
import argparse
import os
import sys
from os.path import join
import numpy as np
from kaldiio import ReadHelper
def get_parser():
parser = argparse.ArgumentParser(
description="Convet kaldi-style features to numpy arrays",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument("scp_file", type=str, help="scp file")
parser.add_argument("out_dir", type=str, help="output directory")
return parser
if __name__ == "__main__":
args = get_parser().parse_args(sys.argv[1:])
os.makedirs(args.out_dir, exist_ok=True)
with ReadHelper(f"scp:{args.scp_file}") as f:
for utt_id, arr in f:
out_path = join(args.out_dir, f"{utt_id}-feats.npy")
np.save(out_path, arr, allow_pickle=False)
sys.exit(0)
| #!/usr/bin/env python
# coding: utf-8
import argparse
import os
import sys
from os.path import join
import numpy as np
from kaldiio import ReadHelper
def get_parser():
parser = argparse.ArgumentParser(
description="Convet kaldi-style features to numpy arrays",
formatter_class=argparse.ArgumentDefaultsHelpFormatter,
)
parser.add_argument("scp_file", type=str, help="scp file")
parser.add_argument("out_dir", type=str, help="output directory")
return parser
if __name__ == "__main__":
args = get_parser().parse_args(sys.argv[1:])
os.makedirs(args.out_dir, exist_ok=True)
with ReadHelper(f"scp:{args.scp_file}") as f:
for utt_id, arr in f:
out_path = join(args.out_dir, f"{utt_id}-feats.npy")
np.save(out_path, arr, allow_pickle=False)
sys.exit(0)
| en | 0.325294 | #!/usr/bin/env python # coding: utf-8 | 3.110685 | 3 |
models/models.py | Luodian/Learning-Invariant-Representations-and-Risks | 17 | 6631918 | import torch
models = {}
__all__ = ['get_model']
def register_model(name):
def decorator(cls):
models[name] = cls
return cls
return decorator
def get_model(name, **args):
net = models[name].create(args)
if torch.cuda.is_available():
net = net.cuda()
return net
| import torch
models = {}
__all__ = ['get_model']
def register_model(name):
def decorator(cls):
models[name] = cls
return cls
return decorator
def get_model(name, **args):
net = models[name].create(args)
if torch.cuda.is_available():
net = net.cuda()
return net
| none | 1 | 2.58917 | 3 |
|
test/unit/test_util.py | lukasfro/bayesopt4ros | 4 | 6631919 | #!/usr/bin/env python3
import numpy as np
import os
import pytest
import torch
from botorch.exceptions import BotorchTensorDimensionError
from botorch.utils.containers import TrainingData
from scipy.optimize import Bounds
from bayesopt4ros.data_handler import DataHandler
@pytest.fixture(params=[1, 3, 10])
def test_data(request):
"""Set up a simple dataset to test the DataHandler class. The dimensionality
of the input data is specified by the fixture parameters."""
dim, n = request.param, 1000
x = torch.rand(n, dim) * 10 - 5
y = 3 + 0.5 * torch.randn(n, 1)
return TrainingData(Xs=x, Ys=y)
def test_data_handling(test_data):
dim = test_data.Xs.shape[1]
bounds = Bounds(lb=-5 * np.ones((dim,)), ub=5 * np.ones((dim,)))
# Using initilizer for setting data
dh = DataHandler(x=test_data.Xs, y=test_data.Ys)
x, y = dh.get_xy()
np.testing.assert_array_equal(x, test_data.Xs)
np.testing.assert_array_equal(y, test_data.Ys)
d = dh.get_xy(as_dict=True)
np.testing.assert_array_equal(d["train_inputs"], test_data.Xs)
np.testing.assert_array_equal(d["train_targets"], test_data.Ys)
# Using setter for setting data
dh = DataHandler(bounds)
np.testing.assert_equal(dh.n_data, 0)
dh.set_xy(x=test_data.Xs, y=test_data.Ys)
x, y = dh.get_xy()
np.testing.assert_array_equal(x, test_data.Xs)
np.testing.assert_array_equal(y, test_data.Ys)
d = dh.get_xy(as_dict=True)
np.testing.assert_array_equal(d["train_inputs"], test_data.Xs)
np.testing.assert_array_equal(d["train_targets"], test_data.Ys)
def test_adding_data(test_data):
dim = test_data.Xs.shape[1]
# Single data point
dh = DataHandler(x=test_data.Xs, y=test_data.Ys)
x_new, y_new = torch.rand(1, dim), torch.randn(1, 1)
dh.add_xy(x=x_new, y=y_new)
x, y = dh.get_xy()
np.testing.assert_array_equal(x, torch.cat((test_data.Xs, x_new)))
np.testing.assert_array_equal(y, torch.cat((test_data.Ys, y_new)))
np.testing.assert_equal(dh.n_data, test_data.Xs.shape[0] + 1)
np.testing.assert_equal(len(dh), test_data.Xs.shape[0] + 1)
# Multiple data points
dh = DataHandler(x=test_data.Xs, y=test_data.Ys)
x_new, y_new = torch.rand(10, dim), torch.randn(10, 1)
dh.add_xy(x=x_new, y=y_new)
x, y = dh.get_xy()
np.testing.assert_array_equal(x, torch.cat((test_data.Xs, x_new)))
np.testing.assert_array_equal(y, torch.cat((test_data.Ys, y_new)))
np.testing.assert_equal(dh.n_data, test_data.Xs.shape[0] + 10)
np.testing.assert_equal(len(dh), test_data.Xs.shape[0] + 10)
# Adding to empty DataHandler
dh = DataHandler()
x_new, y_new = torch.rand(1, dim), torch.randn(1, 1)
dh.add_xy(x=x_new, y=y_new)
x, y = dh.get_xy()
np.testing.assert_array_equal(x, x_new)
np.testing.assert_array_equal(y, y_new)
np.testing.assert_equal(dh.n_data, 1)
np.testing.assert_equal(len(dh), 1)
def test_wrong_inputs(test_data):
# Unequal number of inputs/outputs
with pytest.raises(BotorchTensorDimensionError):
DataHandler(x=test_data.Xs[:5], y=test_data.Ys[:6])
def test_from_single_file():
dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data")
for dim in [1, 2]:
data_file = os.path.join(dir, f"test_data_{dim}d_0.yaml")
dh = DataHandler.from_file(data_file)
x, y = dh.get_xy()
np.testing.assert_array_equal(x, dim * torch.ones(3, dim))
np.testing.assert_array_equal(y, dim * torch.ones(3, 1))
def test_from_multiple_files():
dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data")
for dim in [1, 2]:
data_files = [
os.path.join(dir, f"test_data_{dim}d_{i}.yaml") for i in [0, 1, 2]
]
dh = DataHandler.from_file(data_files)
x, y = dh.get_xy()
np.testing.assert_array_equal(x, dim * torch.ones(max(3 * dim, 6), dim))
np.testing.assert_array_equal(y, dim * torch.ones(max(3 * dim, 6), 1))
def test_from_incompatible_files():
dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data")
data_files = [
os.path.join(dir, "test_data_1d_0.yaml"),
os.path.join(dir, "test_data_2d_0.yaml"),
]
with pytest.raises(BotorchTensorDimensionError):
DataHandler.from_file(data_files)
| #!/usr/bin/env python3
import numpy as np
import os
import pytest
import torch
from botorch.exceptions import BotorchTensorDimensionError
from botorch.utils.containers import TrainingData
from scipy.optimize import Bounds
from bayesopt4ros.data_handler import DataHandler
@pytest.fixture(params=[1, 3, 10])
def test_data(request):
"""Set up a simple dataset to test the DataHandler class. The dimensionality
of the input data is specified by the fixture parameters."""
dim, n = request.param, 1000
x = torch.rand(n, dim) * 10 - 5
y = 3 + 0.5 * torch.randn(n, 1)
return TrainingData(Xs=x, Ys=y)
def test_data_handling(test_data):
dim = test_data.Xs.shape[1]
bounds = Bounds(lb=-5 * np.ones((dim,)), ub=5 * np.ones((dim,)))
# Using initilizer for setting data
dh = DataHandler(x=test_data.Xs, y=test_data.Ys)
x, y = dh.get_xy()
np.testing.assert_array_equal(x, test_data.Xs)
np.testing.assert_array_equal(y, test_data.Ys)
d = dh.get_xy(as_dict=True)
np.testing.assert_array_equal(d["train_inputs"], test_data.Xs)
np.testing.assert_array_equal(d["train_targets"], test_data.Ys)
# Using setter for setting data
dh = DataHandler(bounds)
np.testing.assert_equal(dh.n_data, 0)
dh.set_xy(x=test_data.Xs, y=test_data.Ys)
x, y = dh.get_xy()
np.testing.assert_array_equal(x, test_data.Xs)
np.testing.assert_array_equal(y, test_data.Ys)
d = dh.get_xy(as_dict=True)
np.testing.assert_array_equal(d["train_inputs"], test_data.Xs)
np.testing.assert_array_equal(d["train_targets"], test_data.Ys)
def test_adding_data(test_data):
dim = test_data.Xs.shape[1]
# Single data point
dh = DataHandler(x=test_data.Xs, y=test_data.Ys)
x_new, y_new = torch.rand(1, dim), torch.randn(1, 1)
dh.add_xy(x=x_new, y=y_new)
x, y = dh.get_xy()
np.testing.assert_array_equal(x, torch.cat((test_data.Xs, x_new)))
np.testing.assert_array_equal(y, torch.cat((test_data.Ys, y_new)))
np.testing.assert_equal(dh.n_data, test_data.Xs.shape[0] + 1)
np.testing.assert_equal(len(dh), test_data.Xs.shape[0] + 1)
# Multiple data points
dh = DataHandler(x=test_data.Xs, y=test_data.Ys)
x_new, y_new = torch.rand(10, dim), torch.randn(10, 1)
dh.add_xy(x=x_new, y=y_new)
x, y = dh.get_xy()
np.testing.assert_array_equal(x, torch.cat((test_data.Xs, x_new)))
np.testing.assert_array_equal(y, torch.cat((test_data.Ys, y_new)))
np.testing.assert_equal(dh.n_data, test_data.Xs.shape[0] + 10)
np.testing.assert_equal(len(dh), test_data.Xs.shape[0] + 10)
# Adding to empty DataHandler
dh = DataHandler()
x_new, y_new = torch.rand(1, dim), torch.randn(1, 1)
dh.add_xy(x=x_new, y=y_new)
x, y = dh.get_xy()
np.testing.assert_array_equal(x, x_new)
np.testing.assert_array_equal(y, y_new)
np.testing.assert_equal(dh.n_data, 1)
np.testing.assert_equal(len(dh), 1)
def test_wrong_inputs(test_data):
# Unequal number of inputs/outputs
with pytest.raises(BotorchTensorDimensionError):
DataHandler(x=test_data.Xs[:5], y=test_data.Ys[:6])
def test_from_single_file():
dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data")
for dim in [1, 2]:
data_file = os.path.join(dir, f"test_data_{dim}d_0.yaml")
dh = DataHandler.from_file(data_file)
x, y = dh.get_xy()
np.testing.assert_array_equal(x, dim * torch.ones(3, dim))
np.testing.assert_array_equal(y, dim * torch.ones(3, 1))
def test_from_multiple_files():
dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data")
for dim in [1, 2]:
data_files = [
os.path.join(dir, f"test_data_{dim}d_{i}.yaml") for i in [0, 1, 2]
]
dh = DataHandler.from_file(data_files)
x, y = dh.get_xy()
np.testing.assert_array_equal(x, dim * torch.ones(max(3 * dim, 6), dim))
np.testing.assert_array_equal(y, dim * torch.ones(max(3 * dim, 6), 1))
def test_from_incompatible_files():
dir = os.path.join(os.path.dirname(os.path.realpath(__file__)), "data")
data_files = [
os.path.join(dir, "test_data_1d_0.yaml"),
os.path.join(dir, "test_data_2d_0.yaml"),
]
with pytest.raises(BotorchTensorDimensionError):
DataHandler.from_file(data_files)
| en | 0.515579 | #!/usr/bin/env python3 Set up a simple dataset to test the DataHandler class. The dimensionality of the input data is specified by the fixture parameters. # Using initilizer for setting data # Using setter for setting data # Single data point # Multiple data points # Adding to empty DataHandler # Unequal number of inputs/outputs | 2.362616 | 2 |
libs/QtGUI.py | haakonsh/FSR-Desktop | 0 | 6631920 | try:
import math
import sys
from PyQt4 import QtGui, QtCore
from random import randint
from time import sleep
except ImportError as ie:
print (str(ie))
# Catched if any packages are missing
missing = str(ie).split("named")[1]
print("Software needs %s installed\nPlease run pip install %s and restart\r\n" % (missing, missing))
input("Press any key to exit...")
exit()
except ValueError as e:
print (str(e))
input("Press any key to exit...")
exit()
def guiAppInit():
try:
app = QtGui.QApplication(sys.argv)
except Exception as e:
print("Unable to start QApplication.")
print(str(e))
exit()
return app
def samplesToGui(device, qt_app = None):
try:
color_list = [0] * device.number_of_sensors
for i in range(0, len(device.sensorMappedSamples[0])):
for j in range(0, device.number_of_sensors):
# 60 is added to the results in order to offset the HSV color. The HSV color RED is centered
# around 0/360, this poses a problem where the minimum and maximum values are equal in color.
# The fix is to offset the HSV value by 60.
hue = (device.sensorMappedSamples[j][i]) + 60
temp_color = QtGui.QColor()
temp_color.setHsv(hue, 255, 255, 255) # saturation[j][i], 255, 255)
temp_brush = QtGui.QBrush(temp_color)
color_list[j] = temp_brush
# TODO Call HexGridWidget.updateColor(colorList)
qt_app.display.updateColors(color_list)
qt_app.display.repaint()
sleep(0.02)
except Exception as e:
print("Unable to update colors.")
print(str(e))
exit()
class Window(QtGui.QMainWindow):
def __init__(self, number_of_hexagons, number_of_samples, parent = None):
super(Window, self).__init__(parent)
self.number_of_sensors = number_of_hexagons
self.number_of_samples = number_of_samples
self.setUpdatesEnabled(True) # Needed in order to trigger the paintEvent of a QWidget
self.setGeometry(100, 35, 750, 940) # (pos x, pos y, width, height)
self.setWindowTitle('MainWindow')
self.mainWidget = QtGui.QWidget(self)
self.verticalLayout = QtGui.QVBoxLayout(self)
self.mainWidget.setLayout(self.verticalLayout) # Vertical division of mainWidget
self.display = HexGridWidget(vertices = 6, radius = 40, angularOffset = 0, number_of_hexagons = number_of_hexagons)
self.verticalLayout.addWidget(self.display) # Adds the hex grid to the mainWidget
self.control = QtGui.QWidget(self)
self.controlLayout = QtGui.QHBoxLayout(self) # Horizontal division of the control QWidget
self.playButton = QtGui.QPushButton("Play", self)
self.stopButton = QtGui.QPushButton('Stop', self)
self.resetButton = QtGui.QPushButton('Reset', self)
self.playButton.clicked.connect(self.play)
self.stopButton.clicked.connect(self.stop)
self.resetButton.clicked.connect(self.reset)
self.controlLayout.addWidget(self.playButton)
self.controlLayout.addWidget(self.stopButton)
self.controlLayout.addWidget(self.resetButton)
self.verticalLayout.addLayout(self.controlLayout) # Adds the control buttons to the mainWidget
self.setCentralWidget(self.mainWidget)
self.mainWidget.resize(self.mainWidget.sizeHint())
self.show() # Triggers the Window's paintEvent
print ("Window initialized!")
self.colors = []
#TODO Remove block
for i in range(0, number_of_hexagons):
self.colors.append(QtGui.QBrush(QtGui.QColor(randint(0, 255), randint(0, 255), randint(0, 255), 255)))
self.display.updateColors(self.colors)
# End of block
def play(self):
print ("Clicked Play!")
#TODO Decode FSR hex value to a RGB int
def stop(self):
print ("Clicked Stop!")
#TODO Stop playback of FSR
def reset(self):
print ("Clicked Reset!")
#TODO Reset playback
for i in range(0, self.number_of_samples):
self.display.repaint()
sleep(0.02)
class HexGridWidget(QtGui.QWidget):
def __init__(self, vertices, radius, number_of_hexagons, angularOffset = 0, parent = None):
super(HexGridWidget, self).__init__(parent)
self.number_of_hexagons = number_of_hexagons
self.setGeometry(100, 35, 600, 840)
self.setUpdatesEnabled(True)
self.pen = QtGui.QPen(QtGui.QColor(0, 0, 0))
self.pen.setWidth = 3
self.brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 255))
self.brushList = []
self.polygon = []
for i in range(0, 6):
for j in range(0, 6):
self.polygon.append(self.createHexagon(vertices, radius, angularOffset))
# Move the polygon points to the next position in the grid
offsetRow = self.polygon[i * 6 + j].at(1) - self.polygon[i * 6 + j].at(3)
offsetCol = self.polygon[i * 6 + j].at(5) - self.polygon[i * 6 + j].at(3)
self.polygon[i * 6 + j].translate(j * offsetCol.x() + i * offsetRow.x(),
j * offsetCol.y() + i * offsetRow.y())
for i in range(0, self.number_of_hexagons):
self.brushList.append(QtGui.QBrush(QtGui.QColor(255, 255, 255, 255)))
def createHexagon(self, n, r, s):
hexagon = QtGui.QPolygon()
w = 360 / n
for i in range(n):
t = w * i + s
x = r * math.cos(math.radians(t))
y = r * math.sin(math.radians(t))
hexagon.append(QtCore.QPoint(x + r, (self.height() / 2) + y))
return hexagon
def updateColors(self, colorList):
for i in range(0, self.number_of_hexagons):
self.brushList[i] = colorList[i]
#return self.repaint()
def paintEvent(self, event):
painter = QtGui.QPainter(self)
painter.setPen(self.pen)
painter.setBrush(self.brush)
for i in range(0, self.number_of_hexagons):
painter.setBrush(self.brushList[i])
painter.drawPolygon(self.polygon[i])
| try:
import math
import sys
from PyQt4 import QtGui, QtCore
from random import randint
from time import sleep
except ImportError as ie:
print (str(ie))
# Catched if any packages are missing
missing = str(ie).split("named")[1]
print("Software needs %s installed\nPlease run pip install %s and restart\r\n" % (missing, missing))
input("Press any key to exit...")
exit()
except ValueError as e:
print (str(e))
input("Press any key to exit...")
exit()
def guiAppInit():
try:
app = QtGui.QApplication(sys.argv)
except Exception as e:
print("Unable to start QApplication.")
print(str(e))
exit()
return app
def samplesToGui(device, qt_app = None):
try:
color_list = [0] * device.number_of_sensors
for i in range(0, len(device.sensorMappedSamples[0])):
for j in range(0, device.number_of_sensors):
# 60 is added to the results in order to offset the HSV color. The HSV color RED is centered
# around 0/360, this poses a problem where the minimum and maximum values are equal in color.
# The fix is to offset the HSV value by 60.
hue = (device.sensorMappedSamples[j][i]) + 60
temp_color = QtGui.QColor()
temp_color.setHsv(hue, 255, 255, 255) # saturation[j][i], 255, 255)
temp_brush = QtGui.QBrush(temp_color)
color_list[j] = temp_brush
# TODO Call HexGridWidget.updateColor(colorList)
qt_app.display.updateColors(color_list)
qt_app.display.repaint()
sleep(0.02)
except Exception as e:
print("Unable to update colors.")
print(str(e))
exit()
class Window(QtGui.QMainWindow):
def __init__(self, number_of_hexagons, number_of_samples, parent = None):
super(Window, self).__init__(parent)
self.number_of_sensors = number_of_hexagons
self.number_of_samples = number_of_samples
self.setUpdatesEnabled(True) # Needed in order to trigger the paintEvent of a QWidget
self.setGeometry(100, 35, 750, 940) # (pos x, pos y, width, height)
self.setWindowTitle('MainWindow')
self.mainWidget = QtGui.QWidget(self)
self.verticalLayout = QtGui.QVBoxLayout(self)
self.mainWidget.setLayout(self.verticalLayout) # Vertical division of mainWidget
self.display = HexGridWidget(vertices = 6, radius = 40, angularOffset = 0, number_of_hexagons = number_of_hexagons)
self.verticalLayout.addWidget(self.display) # Adds the hex grid to the mainWidget
self.control = QtGui.QWidget(self)
self.controlLayout = QtGui.QHBoxLayout(self) # Horizontal division of the control QWidget
self.playButton = QtGui.QPushButton("Play", self)
self.stopButton = QtGui.QPushButton('Stop', self)
self.resetButton = QtGui.QPushButton('Reset', self)
self.playButton.clicked.connect(self.play)
self.stopButton.clicked.connect(self.stop)
self.resetButton.clicked.connect(self.reset)
self.controlLayout.addWidget(self.playButton)
self.controlLayout.addWidget(self.stopButton)
self.controlLayout.addWidget(self.resetButton)
self.verticalLayout.addLayout(self.controlLayout) # Adds the control buttons to the mainWidget
self.setCentralWidget(self.mainWidget)
self.mainWidget.resize(self.mainWidget.sizeHint())
self.show() # Triggers the Window's paintEvent
print ("Window initialized!")
self.colors = []
#TODO Remove block
for i in range(0, number_of_hexagons):
self.colors.append(QtGui.QBrush(QtGui.QColor(randint(0, 255), randint(0, 255), randint(0, 255), 255)))
self.display.updateColors(self.colors)
# End of block
def play(self):
print ("Clicked Play!")
#TODO Decode FSR hex value to a RGB int
def stop(self):
print ("Clicked Stop!")
#TODO Stop playback of FSR
def reset(self):
print ("Clicked Reset!")
#TODO Reset playback
for i in range(0, self.number_of_samples):
self.display.repaint()
sleep(0.02)
class HexGridWidget(QtGui.QWidget):
def __init__(self, vertices, radius, number_of_hexagons, angularOffset = 0, parent = None):
super(HexGridWidget, self).__init__(parent)
self.number_of_hexagons = number_of_hexagons
self.setGeometry(100, 35, 600, 840)
self.setUpdatesEnabled(True)
self.pen = QtGui.QPen(QtGui.QColor(0, 0, 0))
self.pen.setWidth = 3
self.brush = QtGui.QBrush(QtGui.QColor(255, 255, 255, 255))
self.brushList = []
self.polygon = []
for i in range(0, 6):
for j in range(0, 6):
self.polygon.append(self.createHexagon(vertices, radius, angularOffset))
# Move the polygon points to the next position in the grid
offsetRow = self.polygon[i * 6 + j].at(1) - self.polygon[i * 6 + j].at(3)
offsetCol = self.polygon[i * 6 + j].at(5) - self.polygon[i * 6 + j].at(3)
self.polygon[i * 6 + j].translate(j * offsetCol.x() + i * offsetRow.x(),
j * offsetCol.y() + i * offsetRow.y())
for i in range(0, self.number_of_hexagons):
self.brushList.append(QtGui.QBrush(QtGui.QColor(255, 255, 255, 255)))
def createHexagon(self, n, r, s):
hexagon = QtGui.QPolygon()
w = 360 / n
for i in range(n):
t = w * i + s
x = r * math.cos(math.radians(t))
y = r * math.sin(math.radians(t))
hexagon.append(QtCore.QPoint(x + r, (self.height() / 2) + y))
return hexagon
def updateColors(self, colorList):
for i in range(0, self.number_of_hexagons):
self.brushList[i] = colorList[i]
#return self.repaint()
def paintEvent(self, event):
painter = QtGui.QPainter(self)
painter.setPen(self.pen)
painter.setBrush(self.brush)
for i in range(0, self.number_of_hexagons):
painter.setBrush(self.brushList[i])
painter.drawPolygon(self.polygon[i])
| en | 0.717492 | # Catched if any packages are missing # 60 is added to the results in order to offset the HSV color. The HSV color RED is centered # around 0/360, this poses a problem where the minimum and maximum values are equal in color. # The fix is to offset the HSV value by 60. # saturation[j][i], 255, 255) # TODO Call HexGridWidget.updateColor(colorList) # Needed in order to trigger the paintEvent of a QWidget # (pos x, pos y, width, height) # Vertical division of mainWidget # Adds the hex grid to the mainWidget # Horizontal division of the control QWidget # Adds the control buttons to the mainWidget # Triggers the Window's paintEvent #TODO Remove block # End of block #TODO Decode FSR hex value to a RGB int #TODO Stop playback of FSR #TODO Reset playback # Move the polygon points to the next position in the grid #return self.repaint() | 2.804955 | 3 |
src/utils/benchmark.py | krypt-n/vroom-scripts | 0 | 6631921 | # -*- coding: utf-8 -*-
import math
# TSPLIB canonic rounding.
def nint(x):
return int(x + 0.5);
def euc_2D(c1, c2, PRECISION = 1):
xd = c1[0] - c2[0]
yd = c1[1] - c2[1]
return nint(PRECISION * math.sqrt(xd * xd + yd * yd))
# Retrieve value for a one-liner TSPLIB entry.
def get_value(key, lines):
result = None
match = filter(lambda s: (s).startswith(key + ':'), lines)
if len(match) > 0:
result = match[0][len(key) + 1:].strip()
else:
# Also try with a space.
match = filter(lambda s: (s).startswith(key + ' :'), lines)
if len(match) > 0:
result = match[0][len(key) + 2:].strip()
return result
def parse_node_coords(s):
# Separate index and coordinates.
coords = s.strip().split(' ')
# Remove empty entries generated by multiple spaces.
return [v for v in coords if len(v) > 0]
# Compute matrix based on ordered list of coordinates.
def get_matrix(coords, PRECISION = 1):
N = len(coords)
matrix = [[0 for i in range(N)] for j in range(N)]
for i in range(N):
for j in range(i + 1, N):
value = euc_2D(coords[i], coords[j], PRECISION)
matrix[i][j] = value
matrix[j][i] = value
return matrix
| # -*- coding: utf-8 -*-
import math
# TSPLIB canonic rounding.
def nint(x):
return int(x + 0.5);
def euc_2D(c1, c2, PRECISION = 1):
xd = c1[0] - c2[0]
yd = c1[1] - c2[1]
return nint(PRECISION * math.sqrt(xd * xd + yd * yd))
# Retrieve value for a one-liner TSPLIB entry.
def get_value(key, lines):
result = None
match = filter(lambda s: (s).startswith(key + ':'), lines)
if len(match) > 0:
result = match[0][len(key) + 1:].strip()
else:
# Also try with a space.
match = filter(lambda s: (s).startswith(key + ' :'), lines)
if len(match) > 0:
result = match[0][len(key) + 2:].strip()
return result
def parse_node_coords(s):
# Separate index and coordinates.
coords = s.strip().split(' ')
# Remove empty entries generated by multiple spaces.
return [v for v in coords if len(v) > 0]
# Compute matrix based on ordered list of coordinates.
def get_matrix(coords, PRECISION = 1):
N = len(coords)
matrix = [[0 for i in range(N)] for j in range(N)]
for i in range(N):
for j in range(i + 1, N):
value = euc_2D(coords[i], coords[j], PRECISION)
matrix[i][j] = value
matrix[j][i] = value
return matrix
| en | 0.835303 | # -*- coding: utf-8 -*- # TSPLIB canonic rounding. # Retrieve value for a one-liner TSPLIB entry. # Also try with a space. # Separate index and coordinates. # Remove empty entries generated by multiple spaces. # Compute matrix based on ordered list of coordinates. | 2.759392 | 3 |
python/ray/train/checkpoint.py | johnpjust/ray | 21,382 | 6631922 | from dataclasses import dataclass
from typing import Optional
from ray.train.constants import TIMESTAMP
MAX = "max"
MIN = "min"
@dataclass
class CheckpointStrategy:
"""Configurable parameters for defining the Train checkpointing strategy.
Default behavior is to persist all checkpoints to disk. If
``num_to_keep`` is set, the default retention policy is to keep the
checkpoints with maximum timestamp, i.e. the most recent checkpoints.
Args:
num_to_keep (Optional[int]): The number of checkpoints to keep
on disk for this run. If a checkpoint is persisted to disk after
there are already this many checkpoints, then an existing
checkpoint will be deleted. If this is ``None`` then checkpoints
will not be deleted. If this is ``0`` then no checkpoints will be
persisted to disk.
checkpoint_score_attribute (str): The attribute that will be used to
score checkpoints to determine which checkpoints should be kept
on disk when there are greater than ``num_to_keep`` checkpoints.
This attribute must be a key from the checkpoint
dictionary which has a numerical value.
checkpoint_score_order (str). Either "max" or "min".
If "max", then checkpoints with highest values of
``checkpoint_score_attribute`` will be kept.
If "min", then checkpoints with lowest values of
``checkpoint_score_attribute`` will be kept.
"""
num_to_keep: Optional[int] = None
checkpoint_score_attribute: str = TIMESTAMP
checkpoint_score_order: str = MAX
def __post_init__(self):
if self.num_to_keep:
# TODO(matt): Implement num_to_keep deletion.
raise NotImplementedError("Deleting checkpoints is not yet "
"supported. Please use None to persist "
"all checkpoints or 0 to not persist "
"any checkpoints.")
if self.checkpoint_score_order not in (MAX, MIN):
raise ValueError(f"checkpoint_score_order must be either "
f"\"{MAX}\" or \"{MIN}\".")
| from dataclasses import dataclass
from typing import Optional
from ray.train.constants import TIMESTAMP
MAX = "max"
MIN = "min"
@dataclass
class CheckpointStrategy:
"""Configurable parameters for defining the Train checkpointing strategy.
Default behavior is to persist all checkpoints to disk. If
``num_to_keep`` is set, the default retention policy is to keep the
checkpoints with maximum timestamp, i.e. the most recent checkpoints.
Args:
num_to_keep (Optional[int]): The number of checkpoints to keep
on disk for this run. If a checkpoint is persisted to disk after
there are already this many checkpoints, then an existing
checkpoint will be deleted. If this is ``None`` then checkpoints
will not be deleted. If this is ``0`` then no checkpoints will be
persisted to disk.
checkpoint_score_attribute (str): The attribute that will be used to
score checkpoints to determine which checkpoints should be kept
on disk when there are greater than ``num_to_keep`` checkpoints.
This attribute must be a key from the checkpoint
dictionary which has a numerical value.
checkpoint_score_order (str). Either "max" or "min".
If "max", then checkpoints with highest values of
``checkpoint_score_attribute`` will be kept.
If "min", then checkpoints with lowest values of
``checkpoint_score_attribute`` will be kept.
"""
num_to_keep: Optional[int] = None
checkpoint_score_attribute: str = TIMESTAMP
checkpoint_score_order: str = MAX
def __post_init__(self):
if self.num_to_keep:
# TODO(matt): Implement num_to_keep deletion.
raise NotImplementedError("Deleting checkpoints is not yet "
"supported. Please use None to persist "
"all checkpoints or 0 to not persist "
"any checkpoints.")
if self.checkpoint_score_order not in (MAX, MIN):
raise ValueError(f"checkpoint_score_order must be either "
f"\"{MAX}\" or \"{MIN}\".")
| en | 0.853801 | Configurable parameters for defining the Train checkpointing strategy. Default behavior is to persist all checkpoints to disk. If ``num_to_keep`` is set, the default retention policy is to keep the checkpoints with maximum timestamp, i.e. the most recent checkpoints. Args: num_to_keep (Optional[int]): The number of checkpoints to keep on disk for this run. If a checkpoint is persisted to disk after there are already this many checkpoints, then an existing checkpoint will be deleted. If this is ``None`` then checkpoints will not be deleted. If this is ``0`` then no checkpoints will be persisted to disk. checkpoint_score_attribute (str): The attribute that will be used to score checkpoints to determine which checkpoints should be kept on disk when there are greater than ``num_to_keep`` checkpoints. This attribute must be a key from the checkpoint dictionary which has a numerical value. checkpoint_score_order (str). Either "max" or "min". If "max", then checkpoints with highest values of ``checkpoint_score_attribute`` will be kept. If "min", then checkpoints with lowest values of ``checkpoint_score_attribute`` will be kept. # TODO(matt): Implement num_to_keep deletion. | 3.531015 | 4 |
doctor_visits/delphi_doctor_visits/smooth.py | qx-teo/covidcast-indicators | 8 | 6631923 | <reponame>qx-teo/covidcast-indicators
"""
This file contains various filters used to smooth the 1-d signals.
Code is courtesy of <NAME> (minor adjustments by Maria).
Author: <NAME>
Created: 2020-04-16
"""
import numpy as np
def moving_avg(x, y, k=7):
"""Smooth the y-values using a rolling window with k observations.
Args:
x: indexing array of the signal
y: one dimensional signal to smooth
k: the number of observations to average
Returns: tuple of indexing array, without the first k-1 obs, and smoothed values
"""
n = len(y)
sy = np.zeros((n - k + 1, 1))
for i in range(len(sy)):
sy[i] = np.mean(y[i : (i + k)])
return x[(k - 1) :], sy
def padded_moving_avg(y, k=7):
"""Smooth the y-values using a rolling window with k observations. Pad the first k.
Args:
y: one dimensional signal to smooth.
k: the number of observations to average
Returns: smoothed values, where the first k-1 obs are padded with 0
"""
n = len(y)
sy = np.zeros((n - k + 1, 1))
for i in range(len(sy)):
sy[i] = np.mean(y[i : (i + k)])
# pad first k obs with 0
for i in range(k - 1):
sy = np.insert(sy, i, 0)
return sy.reshape(-1, 1)
def left_gauss(y, h=100):
"""Smooth the y-values using a left Gaussian filter.
Args:
y: one dimensional signal to smooth.
h: smoothing bandwidth (in terms of variance)
Returns: a smoothed 1D signal.
"""
t = np.zeros_like(y)
n = len(t)
indices = np.arange(n)
for i in range(1, n):
wts = np.exp(-(((i - 1) - indices[:i]) ** 2) / h)
t[i] = np.dot(wts, y[:i]) / np.sum(wts)
return t
def left_gauss_linear(s, h=250):
"""Smooth the y-values using a local linear left Gaussian filter.
Args:
y: one dimensional signal to smooth.
h: smoothing bandwidth (in terms of variance)
Returns: a smoothed 1D signal.
"""
n = len(s)
t = np.zeros_like(s)
X = np.vstack([np.ones(n), np.arange(n)]).T
for idx in range(n):
wts = np.exp(-((np.arange(idx + 1) - idx) ** 2) / h)
XwX = np.dot(X[: (idx + 1), :].T * wts, X[: (idx + 1), :])
Xwy = np.dot(X[: (idx + 1), :].T * wts, s[: (idx + 1)].reshape(-1, 1))
try:
beta = np.linalg.solve(XwX, Xwy)
t[idx] = np.dot(X[: (idx + 1), :], beta)[-1]
except np.linalg.LinAlgError:
t[idx] = np.nan
return t
| """
This file contains various filters used to smooth the 1-d signals.
Code is courtesy of <NAME> (minor adjustments by Maria).
Author: <NAME>
Created: 2020-04-16
"""
import numpy as np
def moving_avg(x, y, k=7):
"""Smooth the y-values using a rolling window with k observations.
Args:
x: indexing array of the signal
y: one dimensional signal to smooth
k: the number of observations to average
Returns: tuple of indexing array, without the first k-1 obs, and smoothed values
"""
n = len(y)
sy = np.zeros((n - k + 1, 1))
for i in range(len(sy)):
sy[i] = np.mean(y[i : (i + k)])
return x[(k - 1) :], sy
def padded_moving_avg(y, k=7):
"""Smooth the y-values using a rolling window with k observations. Pad the first k.
Args:
y: one dimensional signal to smooth.
k: the number of observations to average
Returns: smoothed values, where the first k-1 obs are padded with 0
"""
n = len(y)
sy = np.zeros((n - k + 1, 1))
for i in range(len(sy)):
sy[i] = np.mean(y[i : (i + k)])
# pad first k obs with 0
for i in range(k - 1):
sy = np.insert(sy, i, 0)
return sy.reshape(-1, 1)
def left_gauss(y, h=100):
"""Smooth the y-values using a left Gaussian filter.
Args:
y: one dimensional signal to smooth.
h: smoothing bandwidth (in terms of variance)
Returns: a smoothed 1D signal.
"""
t = np.zeros_like(y)
n = len(t)
indices = np.arange(n)
for i in range(1, n):
wts = np.exp(-(((i - 1) - indices[:i]) ** 2) / h)
t[i] = np.dot(wts, y[:i]) / np.sum(wts)
return t
def left_gauss_linear(s, h=250):
"""Smooth the y-values using a local linear left Gaussian filter.
Args:
y: one dimensional signal to smooth.
h: smoothing bandwidth (in terms of variance)
Returns: a smoothed 1D signal.
"""
n = len(s)
t = np.zeros_like(s)
X = np.vstack([np.ones(n), np.arange(n)]).T
for idx in range(n):
wts = np.exp(-((np.arange(idx + 1) - idx) ** 2) / h)
XwX = np.dot(X[: (idx + 1), :].T * wts, X[: (idx + 1), :])
Xwy = np.dot(X[: (idx + 1), :].T * wts, s[: (idx + 1)].reshape(-1, 1))
try:
beta = np.linalg.solve(XwX, Xwy)
t[idx] = np.dot(X[: (idx + 1), :], beta)[-1]
except np.linalg.LinAlgError:
t[idx] = np.nan
return t | en | 0.760162 | This file contains various filters used to smooth the 1-d signals. Code is courtesy of <NAME> (minor adjustments by Maria). Author: <NAME> Created: 2020-04-16 Smooth the y-values using a rolling window with k observations. Args: x: indexing array of the signal y: one dimensional signal to smooth k: the number of observations to average Returns: tuple of indexing array, without the first k-1 obs, and smoothed values Smooth the y-values using a rolling window with k observations. Pad the first k. Args: y: one dimensional signal to smooth. k: the number of observations to average Returns: smoothed values, where the first k-1 obs are padded with 0 # pad first k obs with 0 Smooth the y-values using a left Gaussian filter. Args: y: one dimensional signal to smooth. h: smoothing bandwidth (in terms of variance) Returns: a smoothed 1D signal. Smooth the y-values using a local linear left Gaussian filter. Args: y: one dimensional signal to smooth. h: smoothing bandwidth (in terms of variance) Returns: a smoothed 1D signal. | 3.571478 | 4 |
tricks/hashedEmbeddingBag/setup.py | yanzhoupan/dlrm_ssm | 3 | 6631924 | from setuptools import setup, Extension
from torch.utils.cpp_extension import BuildExtension, CUDAExtension
setup(
name='hashed_embedding_bag',
ext_modules=[CUDAExtension(
'hashed_embedding_bag',
[#'hashed_embedding_bag1.cpp',
'hashed_embedding_bag_kernel.cu'])],
py_modules=['hashedEmbeddingBag'],
cmdclass={'build_ext': BuildExtension}
) | from setuptools import setup, Extension
from torch.utils.cpp_extension import BuildExtension, CUDAExtension
setup(
name='hashed_embedding_bag',
ext_modules=[CUDAExtension(
'hashed_embedding_bag',
[#'hashed_embedding_bag1.cpp',
'hashed_embedding_bag_kernel.cu'])],
py_modules=['hashedEmbeddingBag'],
cmdclass={'build_ext': BuildExtension}
) | kn | 0.12984 | #'hashed_embedding_bag1.cpp', | 1.496127 | 1 |
test/mock.py | carefree0910/botorch | 0 | 6631925 | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from collections import OrderedDict
from typing import List, Optional
import torch
from botorch.models.model import Model
from botorch.posteriors import Posterior
from torch import Tensor
EMPTY_SIZE = torch.Size()
class MockPosterior(Posterior):
"""Mock object that implements dummy methods and feeds through specified outputs"""
def __init__(self, mean=None, variance=None, samples=None):
self._mean = mean
self._variance = variance
self._samples = samples
@property
def device(self) -> torch.device:
for t in (self._mean, self._variance, self._samples):
if torch.is_tensor(t):
return t.device
return torch.device("cpu")
@property
def dtype(self) -> torch.dtype:
for t in (self._mean, self._variance, self._samples):
if torch.is_tensor(t):
return t.dtype
return torch.float32
@property
def event_shape(self) -> torch.Size:
if self._samples is not None:
return self._samples.shape
if self._mean is not None:
return self._mean.shape
if self._variance is not None:
return self._variance.shape
return torch.Size()
@property
def mean(self):
return self._mean
@property
def variance(self):
return self._variance
def rsample(
self,
sample_shape: Optional[torch.Size] = None,
base_samples: Optional[Tensor] = None,
) -> Tensor:
"""Mock sample by repeating self._samples. If base_samples is provided,
do a shape check but return the same mock samples."""
if sample_shape is None:
sample_shape = torch.Size()
if sample_shape is not None and base_samples is not None:
# check the base_samples shape is consistent with the sample_shape
if base_samples.shape[: len(sample_shape)] != sample_shape:
raise RuntimeError("sample_shape disagrees with base_samples.")
return self._samples.expand(sample_shape + self._samples.shape)
class MockModel(Model):
"""Mock object that implements dummy methods and feeds through specified outputs"""
def __init__(self, posterior: MockPosterior):
super(Model, self).__init__()
self._posterior = posterior
def posterior(
self,
X: Tensor,
output_indices: Optional[List[int]] = None,
observation_noise: bool = False,
) -> MockPosterior:
return self._posterior
@property
def num_outputs(self) -> int:
event_shape = self._posterior.event_shape
return event_shape[-1] if len(event_shape) > 0 else 0
def state_dict(self) -> None:
pass
def load_state_dict(
self, state_dict: Optional[OrderedDict] = None, strict: bool = False
) -> None:
pass
| #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
from collections import OrderedDict
from typing import List, Optional
import torch
from botorch.models.model import Model
from botorch.posteriors import Posterior
from torch import Tensor
EMPTY_SIZE = torch.Size()
class MockPosterior(Posterior):
"""Mock object that implements dummy methods and feeds through specified outputs"""
def __init__(self, mean=None, variance=None, samples=None):
self._mean = mean
self._variance = variance
self._samples = samples
@property
def device(self) -> torch.device:
for t in (self._mean, self._variance, self._samples):
if torch.is_tensor(t):
return t.device
return torch.device("cpu")
@property
def dtype(self) -> torch.dtype:
for t in (self._mean, self._variance, self._samples):
if torch.is_tensor(t):
return t.dtype
return torch.float32
@property
def event_shape(self) -> torch.Size:
if self._samples is not None:
return self._samples.shape
if self._mean is not None:
return self._mean.shape
if self._variance is not None:
return self._variance.shape
return torch.Size()
@property
def mean(self):
return self._mean
@property
def variance(self):
return self._variance
def rsample(
self,
sample_shape: Optional[torch.Size] = None,
base_samples: Optional[Tensor] = None,
) -> Tensor:
"""Mock sample by repeating self._samples. If base_samples is provided,
do a shape check but return the same mock samples."""
if sample_shape is None:
sample_shape = torch.Size()
if sample_shape is not None and base_samples is not None:
# check the base_samples shape is consistent with the sample_shape
if base_samples.shape[: len(sample_shape)] != sample_shape:
raise RuntimeError("sample_shape disagrees with base_samples.")
return self._samples.expand(sample_shape + self._samples.shape)
class MockModel(Model):
"""Mock object that implements dummy methods and feeds through specified outputs"""
def __init__(self, posterior: MockPosterior):
super(Model, self).__init__()
self._posterior = posterior
def posterior(
self,
X: Tensor,
output_indices: Optional[List[int]] = None,
observation_noise: bool = False,
) -> MockPosterior:
return self._posterior
@property
def num_outputs(self) -> int:
event_shape = self._posterior.event_shape
return event_shape[-1] if len(event_shape) > 0 else 0
def state_dict(self) -> None:
pass
def load_state_dict(
self, state_dict: Optional[OrderedDict] = None, strict: bool = False
) -> None:
pass
| en | 0.812001 | #!/usr/bin/env python3 # Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved Mock object that implements dummy methods and feeds through specified outputs Mock sample by repeating self._samples. If base_samples is provided, do a shape check but return the same mock samples. # check the base_samples shape is consistent with the sample_shape Mock object that implements dummy methods and feeds through specified outputs | 2.423484 | 2 |
oslo_privsep/tests/test_comm.py | mail2nsrajesh/oslo.privsep | 0 | 6631926 | # Copyright 2015 Rackspace Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from oslotest import base
from oslo_privsep import comm
class BufSock(object):
def __init__(self):
self.readpos = 0
self.buf = six.BytesIO()
def recv(self, bufsize):
if self.buf.closed:
return b''
self.buf.seek(self.readpos, 0)
data = self.buf.read(bufsize)
self.readpos += len(data)
return data
def sendall(self, data):
self.buf.seek(0, 2)
self.buf.write(data)
def shutdown(self, _flag):
self.buf.close()
class TestSerialization(base.BaseTestCase):
def setUp(self):
super(TestSerialization, self).setUp()
sock = BufSock()
self.input = comm.Serializer(sock)
self.output = iter(comm.Deserializer(sock))
def send(self, data):
self.input.send(data)
return next(self.output)
def assertSendable(self, value):
self.assertEqual(value, self.send(value))
def test_none(self):
self.assertSendable(None)
def test_bool(self):
self.assertSendable(True)
self.assertSendable(False)
def test_int(self):
self.assertSendable(42)
self.assertSendable(-84)
def test_bytes(self):
data = b'\x00\x01\x02\xfd\xfe\xff'
self.assertSendable(data)
def test_unicode(self):
data = u'\u4e09\u9df9'
self.assertSendable(data)
def test_tuple(self):
self.assertSendable((1, 'foo'))
def test_list(self):
# NB! currently lists get converted to tuples by serialization.
self.assertEqual((1, 'foo'), self.send([1, 'foo']))
def test_dict(self):
self.assertSendable(
{
'a': 'b',
1: 2,
None: None,
(1, 2): (3, 4),
}
)
def test_badobj(self):
class UnknownClass(object):
pass
obj = UnknownClass()
self.assertRaises(TypeError, self.send, obj)
def test_eof(self):
self.input.close()
self.assertRaises(StopIteration, next, self.output)
| # Copyright 2015 Rackspace Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import six
from oslotest import base
from oslo_privsep import comm
class BufSock(object):
def __init__(self):
self.readpos = 0
self.buf = six.BytesIO()
def recv(self, bufsize):
if self.buf.closed:
return b''
self.buf.seek(self.readpos, 0)
data = self.buf.read(bufsize)
self.readpos += len(data)
return data
def sendall(self, data):
self.buf.seek(0, 2)
self.buf.write(data)
def shutdown(self, _flag):
self.buf.close()
class TestSerialization(base.BaseTestCase):
def setUp(self):
super(TestSerialization, self).setUp()
sock = BufSock()
self.input = comm.Serializer(sock)
self.output = iter(comm.Deserializer(sock))
def send(self, data):
self.input.send(data)
return next(self.output)
def assertSendable(self, value):
self.assertEqual(value, self.send(value))
def test_none(self):
self.assertSendable(None)
def test_bool(self):
self.assertSendable(True)
self.assertSendable(False)
def test_int(self):
self.assertSendable(42)
self.assertSendable(-84)
def test_bytes(self):
data = b'\x00\x01\x02\xfd\xfe\xff'
self.assertSendable(data)
def test_unicode(self):
data = u'\u4e09\u9df9'
self.assertSendable(data)
def test_tuple(self):
self.assertSendable((1, 'foo'))
def test_list(self):
# NB! currently lists get converted to tuples by serialization.
self.assertEqual((1, 'foo'), self.send([1, 'foo']))
def test_dict(self):
self.assertSendable(
{
'a': 'b',
1: 2,
None: None,
(1, 2): (3, 4),
}
)
def test_badobj(self):
class UnknownClass(object):
pass
obj = UnknownClass()
self.assertRaises(TypeError, self.send, obj)
def test_eof(self):
self.input.close()
self.assertRaises(StopIteration, next, self.output)
| en | 0.863705 | # Copyright 2015 Rackspace Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); you may # not use this file except in compliance with the License. You may obtain # a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations # under the License. # NB! currently lists get converted to tuples by serialization. | 2.080408 | 2 |
example.py | ssmid/pewinput | 1 | 6631927 | <filename>example.py
#!/usr/bin/python3
import time
from pewinput import *
# create devices
keyboard = Device([KEY_LEFTSHIFT, KEY_SPACE, KEY_H, KEY_E, KEY_L, KEY_O, KEY_W, KEY_R, KEY_D, KEY_1, KEY_COMMA])
mouse = Mouse()
# type "Hello, World!"
keyboard.click_combination([KEY_LEFTSHIFT, KEY_H])
for key in [KEY_E, KEY_L, KEY_L, KEY_O, KEY_COMMA, KEY_SPACE]:
keyboard.click(key)
keyboard.click_combination([KEY_LEFTSHIFT, KEY_W])
for key in [KEY_O, KEY_R, KEY_L, KEY_D]:
# alternatively you can simulate a key press yourself
keyboard.press(key, flush=False)
keyboard.release(key, flush=False)
keyboard.flush()
keyboard.click_combination([KEY_LEFTSHIFT, KEY_1]) # !
# move mouse to the bottom right
for i in range(20):
mouse.move_relative(5, 5)
time.sleep(0.02)
# move wheel
for i in range(20):
mouse.move_wheel(1)
time.sleep(0.02)
print()
# Optional, but recommended
keyboard.destroy()
mouse.destroy()
| <filename>example.py
#!/usr/bin/python3
import time
from pewinput import *
# create devices
keyboard = Device([KEY_LEFTSHIFT, KEY_SPACE, KEY_H, KEY_E, KEY_L, KEY_O, KEY_W, KEY_R, KEY_D, KEY_1, KEY_COMMA])
mouse = Mouse()
# type "Hello, World!"
keyboard.click_combination([KEY_LEFTSHIFT, KEY_H])
for key in [KEY_E, KEY_L, KEY_L, KEY_O, KEY_COMMA, KEY_SPACE]:
keyboard.click(key)
keyboard.click_combination([KEY_LEFTSHIFT, KEY_W])
for key in [KEY_O, KEY_R, KEY_L, KEY_D]:
# alternatively you can simulate a key press yourself
keyboard.press(key, flush=False)
keyboard.release(key, flush=False)
keyboard.flush()
keyboard.click_combination([KEY_LEFTSHIFT, KEY_1]) # !
# move mouse to the bottom right
for i in range(20):
mouse.move_relative(5, 5)
time.sleep(0.02)
# move wheel
for i in range(20):
mouse.move_wheel(1)
time.sleep(0.02)
print()
# Optional, but recommended
keyboard.destroy()
mouse.destroy()
| en | 0.608465 | #!/usr/bin/python3 # create devices # type "Hello, World!" # alternatively you can simulate a key press yourself # ! # move mouse to the bottom right # move wheel # Optional, but recommended | 3.541053 | 4 |
src/tf/util/nrrdToTfrecordsLabels.py | juanprietob/gan-brain | 1 | 6631928 |
"""Converts NRRD data to TFRecords file format with Example protos."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os
import sys
import glob
import tensorflow as tf
import nrrd
import numpy as np
from tensorflow.contrib.learn.python.learn.datasets import mnist
FLAGS = None
def _int64_feature(value):
return tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
def _bytes_feature(value):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))
def saveTFRecord(filename, filenamelabel, writer):
print("Reading:", filename)
img, head = nrrd.read(filename)
img = img.astype(np.float32)
print("Reading:", filenamelabel)
imglabel, headlabel = nrrd.read(filenamelabel)
imglabel = imglabel.astype(np.float32)
img_sizes = head["sizes"]
label_sizes = headlabel["sizes"]
if img_sizes[0] != label_sizes[0] or img_sizes[1] != label_sizes[1] or img_sizes[2] != label_sizes[2]:
print("Sizes in files:", filename, filenamelabel, "have different dimensions. Skipping...")
else:
height = img_sizes[0]
width = img_sizes[1]
depth = img_sizes[2]
img_raw = img.tostring()
imglabel_raw = imglabel.tostring()
example = tf.train.Example(features=tf.train.Features(feature={
'height': _int64_feature(height),
'width': _int64_feature(width),
'depth': _int64_feature(depth),
'raw': _bytes_feature(img_raw),
'label': _bytes_feature(imglabel_raw)
}))
writer.write(example.SerializeToString())
def main(unused_argv):
# Get the data.
print("Writing", FLAGS.output)
writer = tf.python_io.TFRecordWriter(FLAGS.output)
#label+=1
images = [ name for name in glob.glob(os.path.join(FLAGS.directory, "*.nrrd")) if "_label.nrrd" not in name ]
test_size = int(len(images)*(1. - FLAGS.validation_size))
p = np.random.permutation(len(images))
images = np.array(images)[p]
train_images = images[0:test_size]
test_images = images[test_size:]
for img in train_images:
labelimg = os.path.splitext(img)[0] + "_label.nrrd"
saveTFRecord(os.path.join(FLAGS.directory, img), os.path.join(FLAGS.directory, labelimg), writer)
writer.close()
outputtest = os.path.splitext(FLAGS.output)[0] + "_test.tfRecords"
print("Writing", outputtest)
writer = tf.python_io.TFRecordWriter(outputtest)
for img in test_images:
labelimg = os.path.splitext(img)[0] + "_label.nrrd"
saveTFRecord(os.path.join(FLAGS.directory, img), os.path.join(FLAGS.directory, labelimg), writer)
writer.close()
print("Total images:", len(images))
print("Train images:", len(train_images))
print("Test images:", len(test_images))
print("TFRecords:", FLAGS.output)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--directory',
type=str,
required=True,
help='The directory contains nrrd image files. There are pairs <some image filename>.nrrd and label images have suffix <some image filename>_label.nrrd, check the sampleImage executable to create the samples.'
)
parser.add_argument(
'--output',
type=str,
required=True,
help='Output filename for output tfRecords.'
)
parser.add_argument(
'--validation_size',
type=float,
default=0.2,
help="Divide the data for validation using this ratio"
)
FLAGS, unparsed = parser.parse_known_args()
tf.app.run(main=main, argv=[sys.argv[0]] + unparsed) |
"""Converts NRRD data to TFRecords file format with Example protos."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os
import sys
import glob
import tensorflow as tf
import nrrd
import numpy as np
from tensorflow.contrib.learn.python.learn.datasets import mnist
FLAGS = None
def _int64_feature(value):
return tf.train.Feature(int64_list=tf.train.Int64List(value=[value]))
def _bytes_feature(value):
return tf.train.Feature(bytes_list=tf.train.BytesList(value=[value]))
def saveTFRecord(filename, filenamelabel, writer):
print("Reading:", filename)
img, head = nrrd.read(filename)
img = img.astype(np.float32)
print("Reading:", filenamelabel)
imglabel, headlabel = nrrd.read(filenamelabel)
imglabel = imglabel.astype(np.float32)
img_sizes = head["sizes"]
label_sizes = headlabel["sizes"]
if img_sizes[0] != label_sizes[0] or img_sizes[1] != label_sizes[1] or img_sizes[2] != label_sizes[2]:
print("Sizes in files:", filename, filenamelabel, "have different dimensions. Skipping...")
else:
height = img_sizes[0]
width = img_sizes[1]
depth = img_sizes[2]
img_raw = img.tostring()
imglabel_raw = imglabel.tostring()
example = tf.train.Example(features=tf.train.Features(feature={
'height': _int64_feature(height),
'width': _int64_feature(width),
'depth': _int64_feature(depth),
'raw': _bytes_feature(img_raw),
'label': _bytes_feature(imglabel_raw)
}))
writer.write(example.SerializeToString())
def main(unused_argv):
# Get the data.
print("Writing", FLAGS.output)
writer = tf.python_io.TFRecordWriter(FLAGS.output)
#label+=1
images = [ name for name in glob.glob(os.path.join(FLAGS.directory, "*.nrrd")) if "_label.nrrd" not in name ]
test_size = int(len(images)*(1. - FLAGS.validation_size))
p = np.random.permutation(len(images))
images = np.array(images)[p]
train_images = images[0:test_size]
test_images = images[test_size:]
for img in train_images:
labelimg = os.path.splitext(img)[0] + "_label.nrrd"
saveTFRecord(os.path.join(FLAGS.directory, img), os.path.join(FLAGS.directory, labelimg), writer)
writer.close()
outputtest = os.path.splitext(FLAGS.output)[0] + "_test.tfRecords"
print("Writing", outputtest)
writer = tf.python_io.TFRecordWriter(outputtest)
for img in test_images:
labelimg = os.path.splitext(img)[0] + "_label.nrrd"
saveTFRecord(os.path.join(FLAGS.directory, img), os.path.join(FLAGS.directory, labelimg), writer)
writer.close()
print("Total images:", len(images))
print("Train images:", len(train_images))
print("Test images:", len(test_images))
print("TFRecords:", FLAGS.output)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--directory',
type=str,
required=True,
help='The directory contains nrrd image files. There are pairs <some image filename>.nrrd and label images have suffix <some image filename>_label.nrrd, check the sampleImage executable to create the samples.'
)
parser.add_argument(
'--output',
type=str,
required=True,
help='Output filename for output tfRecords.'
)
parser.add_argument(
'--validation_size',
type=float,
default=0.2,
help="Divide the data for validation using this ratio"
)
FLAGS, unparsed = parser.parse_known_args()
tf.app.run(main=main, argv=[sys.argv[0]] + unparsed) | en | 0.400848 | Converts NRRD data to TFRecords file format with Example protos. # Get the data. #label+=1 | 2.522555 | 3 |
authors/apps/authentication/tests/test_login.py | andela/ah-magnificent6 | 0 | 6631929 | """ module to test login. """
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APITestCase
from authors.apps.authentication.models import User
class AuthenticationTests(APITestCase):
def setUp(self):
""" Setup data for the tests """
self.valid_user = {
"username": "user1",
"email": "<EMAIL>",
"password": "<PASSWORD>"
}
self.registration_url = reverse('authentication:register')
self.login_url = reverse('authentication:login')
self.client.post(self.registration_url, self.valid_user, format='json')
user = User.objects.get(username=self.valid_user["username"])
user.is_active = True
user.save()
def test_successful_login_user(self):
""" Test that a user successfully logs in """
response = self.client.post(
self.login_url, self.valid_user, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertIn('token', response.data)
self.assertTrue(response.data['token'])
def test_unsuccessful_login_with_wrong_password(self):
""" Test unsuccessful log in with a wrong email """
self.valid_user['password'] = "<PASSWORD>"
response = self.client.post(
self.login_url, self.valid_user, format='json')
self.assertEqual(response.status_code,
status.HTTP_400_BAD_REQUEST)
def test_unsuccessful_not_registered_user_login(self):
""" Test unsuccessful login for unregistered user. """
response = self.client.post(
self.login_url, {
"email": "<EMAIL>",
"password": "<PASSWORD>"
}, format='json')
self.assertEqual(response.status_code,
status.HTTP_400_BAD_REQUEST)
| """ module to test login. """
from django.urls import reverse
from rest_framework import status
from rest_framework.test import APITestCase
from authors.apps.authentication.models import User
class AuthenticationTests(APITestCase):
def setUp(self):
""" Setup data for the tests """
self.valid_user = {
"username": "user1",
"email": "<EMAIL>",
"password": "<PASSWORD>"
}
self.registration_url = reverse('authentication:register')
self.login_url = reverse('authentication:login')
self.client.post(self.registration_url, self.valid_user, format='json')
user = User.objects.get(username=self.valid_user["username"])
user.is_active = True
user.save()
def test_successful_login_user(self):
""" Test that a user successfully logs in """
response = self.client.post(
self.login_url, self.valid_user, format='json')
self.assertEqual(response.status_code, status.HTTP_200_OK)
self.assertIn('token', response.data)
self.assertTrue(response.data['token'])
def test_unsuccessful_login_with_wrong_password(self):
""" Test unsuccessful log in with a wrong email """
self.valid_user['password'] = "<PASSWORD>"
response = self.client.post(
self.login_url, self.valid_user, format='json')
self.assertEqual(response.status_code,
status.HTTP_400_BAD_REQUEST)
def test_unsuccessful_not_registered_user_login(self):
""" Test unsuccessful login for unregistered user. """
response = self.client.post(
self.login_url, {
"email": "<EMAIL>",
"password": "<PASSWORD>"
}, format='json')
self.assertEqual(response.status_code,
status.HTTP_400_BAD_REQUEST)
| en | 0.843727 | module to test login. Setup data for the tests Test that a user successfully logs in Test unsuccessful log in with a wrong email Test unsuccessful login for unregistered user. | 3.036034 | 3 |
projects/continual-lm/learner/static_per_domain_learner.py | germank/CommAI-env | 1 | 6631930 | <filename>projects/continual-lm/learner/static_per_domain_learner.py
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
import torch
from torch import nn, optim
import model
from model import repackage_hidden
from .base_learner import BaseLearner
class StaticPerDomainLearner(BaseLearner):
def __init__(self, optimizer_type, lr, model_type, vocsize, emsize, nhid, nlayers, dropout, tied, batch_size, clip, learn_iterations):
criterion = nn.CrossEntropyLoss()
super(StaticPerDomainLearner, self).__init__(criterion, vocsize, learn_iterations)
self.models = {}
self.hiddens = {}
self.optimizers = {}
self.is_cuda = False
def create_new_rnn(self):
m = model.OriginalRNNModel(
model_type, vocsize, emsize, nhid, nlayers, dropout, tied)
if self.cuda:
m = m.cuda()
m.train()
optimizer = getattr(optim, optimizer_type)(m.parameters(), lr=lr)
hidden = m.init_hidden(batch_size)
return m, hidden, optimizer
# hacky way for avoiding saving all arguments :)
StaticPerDomainLearner.create_new_rnn = create_new_rnn
self.vocsize = vocsize
self.clip = clip
def forward(self, data, hidden, domain_id):
output, hidden = self.models[domain_id](data, hidden)
return output, hidden
def get_lr(self):
return self.optimizers[self.last_domain_id].param_groups[0]['lr']
def learn(self, data, targets, domain_id):
self.last_domain_id = domain_id
if domain_id not in self.models:
model, hidden, optimizer = self.create_new_rnn()
self.models[domain_id] = model
self.hiddens[domain_id] = hidden
self.optimizers[domain_id] = optimizer
print('Number of parameters:', self.get_num_parameters())
return super(StaticPerDomainLearner, self).learn(data, targets)
def get_state(self):
domain_id = self.last_domain_id
return repackage_hidden(self.hiddens[domain_id])
def set_state(self, hidden):
domain_id = self.last_domain_id
self.hiddens[domain_id] = hidden
def predict(self, data, hidden):
domain_id = self.last_domain_id
output, hidden = self.forward(
data, hidden, domain_id)
return output, hidden
def train_model(self, loss, prediction, data, targets):
domain_id = self.last_domain_id
self.optimizers[domain_id].zero_grad()
loss = self.criterion(prediction.view(-1, self.vocsize), targets)
loss.backward()
torch.nn.utils.clip_grad_norm_(self.models[domain_id].parameters(), self.clip)
self.optimizers[domain_id].step()
def cuda(self):
for domain_id in self.models:
self.model[domain_id] = self.model[domain_id].cuda()
try:
self.hidden[domain_id] = self.hidden[domain_id].cuda()
except AttributeError:
self.hidden[domain_id] = tuple(h.cuda() for h in self.hidden[domain_id])
self.is_cuda = True
def generate(self, data, hidden, domain_id):
hidden = repackage_hidden(hidden)
output, hidden = self.forward(data, hidden, domain_id)
return output.view(-1, self.vocsize), hidden
def train_mode(self):
for model in self.models.values():
model.train()
def evaluate_mode(self):
for model in self.models.values():
model.eval()
def create_hidden_states(self, sz):
return next(iter(self.models.values())).init_hidden(sz)
def get_num_parameters(self):
return sum(p.view(-1).size(0) for rnn in self.models.values() for p in rnn.parameters())
| <filename>projects/continual-lm/learner/static_per_domain_learner.py
# Copyright (c) Facebook, Inc. and its affiliates.
# All rights reserved.
#
# This source code is licensed under the license found in the
# LICENSE file in the root directory of this source tree.
import torch
from torch import nn, optim
import model
from model import repackage_hidden
from .base_learner import BaseLearner
class StaticPerDomainLearner(BaseLearner):
def __init__(self, optimizer_type, lr, model_type, vocsize, emsize, nhid, nlayers, dropout, tied, batch_size, clip, learn_iterations):
criterion = nn.CrossEntropyLoss()
super(StaticPerDomainLearner, self).__init__(criterion, vocsize, learn_iterations)
self.models = {}
self.hiddens = {}
self.optimizers = {}
self.is_cuda = False
def create_new_rnn(self):
m = model.OriginalRNNModel(
model_type, vocsize, emsize, nhid, nlayers, dropout, tied)
if self.cuda:
m = m.cuda()
m.train()
optimizer = getattr(optim, optimizer_type)(m.parameters(), lr=lr)
hidden = m.init_hidden(batch_size)
return m, hidden, optimizer
# hacky way for avoiding saving all arguments :)
StaticPerDomainLearner.create_new_rnn = create_new_rnn
self.vocsize = vocsize
self.clip = clip
def forward(self, data, hidden, domain_id):
output, hidden = self.models[domain_id](data, hidden)
return output, hidden
def get_lr(self):
return self.optimizers[self.last_domain_id].param_groups[0]['lr']
def learn(self, data, targets, domain_id):
self.last_domain_id = domain_id
if domain_id not in self.models:
model, hidden, optimizer = self.create_new_rnn()
self.models[domain_id] = model
self.hiddens[domain_id] = hidden
self.optimizers[domain_id] = optimizer
print('Number of parameters:', self.get_num_parameters())
return super(StaticPerDomainLearner, self).learn(data, targets)
def get_state(self):
domain_id = self.last_domain_id
return repackage_hidden(self.hiddens[domain_id])
def set_state(self, hidden):
domain_id = self.last_domain_id
self.hiddens[domain_id] = hidden
def predict(self, data, hidden):
domain_id = self.last_domain_id
output, hidden = self.forward(
data, hidden, domain_id)
return output, hidden
def train_model(self, loss, prediction, data, targets):
domain_id = self.last_domain_id
self.optimizers[domain_id].zero_grad()
loss = self.criterion(prediction.view(-1, self.vocsize), targets)
loss.backward()
torch.nn.utils.clip_grad_norm_(self.models[domain_id].parameters(), self.clip)
self.optimizers[domain_id].step()
def cuda(self):
for domain_id in self.models:
self.model[domain_id] = self.model[domain_id].cuda()
try:
self.hidden[domain_id] = self.hidden[domain_id].cuda()
except AttributeError:
self.hidden[domain_id] = tuple(h.cuda() for h in self.hidden[domain_id])
self.is_cuda = True
def generate(self, data, hidden, domain_id):
hidden = repackage_hidden(hidden)
output, hidden = self.forward(data, hidden, domain_id)
return output.view(-1, self.vocsize), hidden
def train_mode(self):
for model in self.models.values():
model.train()
def evaluate_mode(self):
for model in self.models.values():
model.eval()
def create_hidden_states(self, sz):
return next(iter(self.models.values())).init_hidden(sz)
def get_num_parameters(self):
return sum(p.view(-1).size(0) for rnn in self.models.values() for p in rnn.parameters())
| en | 0.886316 | # Copyright (c) Facebook, Inc. and its affiliates. # All rights reserved. # # This source code is licensed under the license found in the # LICENSE file in the root directory of this source tree. # hacky way for avoiding saving all arguments :) | 1.952679 | 2 |
src/grokcore/component/tests/adapter/implementsmany.py | bielbienne/grokcore.component | 0 | 6631931 | """
Subclasses of grok.Adapter and grok.MultiAdapter must implement exactly one
interface:
>>> grok.testing.grok(__name__)
Traceback (most recent call last):
...
GrokError: <class 'grokcore.component.tests.adapter.implementsmany.Home'> is implementing
more than one interface (use grok.provides to specify which one to use).
"""
import grokcore.component as grok
from zope import interface
class Cave(grok.Context):
pass
class IHome(interface.Interface):
pass
class IFireplace(interface.Interface):
pass
class Home(grok.Adapter):
grok.implements(IHome, IFireplace)
| """
Subclasses of grok.Adapter and grok.MultiAdapter must implement exactly one
interface:
>>> grok.testing.grok(__name__)
Traceback (most recent call last):
...
GrokError: <class 'grokcore.component.tests.adapter.implementsmany.Home'> is implementing
more than one interface (use grok.provides to specify which one to use).
"""
import grokcore.component as grok
from zope import interface
class Cave(grok.Context):
pass
class IHome(interface.Interface):
pass
class IFireplace(interface.Interface):
pass
class Home(grok.Adapter):
grok.implements(IHome, IFireplace)
| en | 0.494384 | Subclasses of grok.Adapter and grok.MultiAdapter must implement exactly one interface: >>> grok.testing.grok(__name__) Traceback (most recent call last): ... GrokError: <class 'grokcore.component.tests.adapter.implementsmany.Home'> is implementing more than one interface (use grok.provides to specify which one to use). | 2.091275 | 2 |
basic/count.py | aniketnaikdesai/anik-lib | 0 | 6631932 | def most_frequent_in_list(x):
return max(set(x), key = x.count)
| def most_frequent_in_list(x):
return max(set(x), key = x.count)
| none | 1 | 2.313483 | 2 |
|
discovery-provider/src/monitors/monitoring_queue.py | atticwip/audius-protocol | 4 | 6631933 | import logging
import time
from src.monitors import monitor_names
from src.monitors.monitors import MONITORS, get_monitor_redis_key
from src.tasks.celery_app import celery
logger = logging.getLogger(__name__)
def refresh(redis, db, monitor):
"""
Refreshes the cached value for a monitor
Args:
redis: Singleton redis instance
db: Singleton database instance
monitor: dict The monitor dictionary qwith name, func, ttl, and type
"""
key = get_monitor_redis_key(monitor)
ttl_key = f"{key}:ttl"
is_fresh = redis.get(ttl_key)
if is_fresh:
return
# Invoke the monitor function with kwargs for db and redis.
# This allows any monitor to access the db and/or redis connection.
value = monitor[monitor_names.func](db=db, redis=redis)
logger.info(
f"monitoring_queue.py | Computed value for {monitor[monitor_names.name]} {value}"
)
redis.set(key, value)
if "ttl" in monitor:
# Set a TTL (in seconds) key to track when this value needs refreshing.
# We store a separate TTL key rather than expiring the value itself
# so that in the case of an error, the current value can still be read
redis.set(ttl_key, 1, monitor["ttl"])
@celery.task(name="monitoring_queue", bind=True)
def monitoring_queue_task(self):
"""
A persistent cron-style queue that periodically monitors various
health metrics and caches values in redis.
The queue runs every minute on cron, but individual monitors establish
their own freshness/refresh rate to operate on.
1. The queue spins up and for each monitor checks to see if it needs a refresh
2. Refreshes the value and stores the update in redis
"""
db = monitoring_queue_task.db
redis = monitoring_queue_task.redis
have_lock = False
update_lock = redis.lock("monitoring_queue_lock", timeout=2000)
try:
have_lock = update_lock.acquire(blocking=False)
if have_lock:
start_time = time.time()
for monitor in MONITORS.values():
try:
refresh(redis, db, monitor)
except Exception as e:
logger.warning(
f"monitoring_queue.py | Error computing {monitor['name']} {e}"
)
end_time = time.time()
logger.info(
f"monitoring_queue.py | Finished monitoring_queue in {end_time - start_time} seconds"
)
else:
logger.info("monitoring_queue.py | Failed to acquire lock")
except Exception as e:
logger.error("monitoring_queue.py | Fatal error in main loop", exc_info=True)
raise e
finally:
if have_lock:
update_lock.release()
| import logging
import time
from src.monitors import monitor_names
from src.monitors.monitors import MONITORS, get_monitor_redis_key
from src.tasks.celery_app import celery
logger = logging.getLogger(__name__)
def refresh(redis, db, monitor):
"""
Refreshes the cached value for a monitor
Args:
redis: Singleton redis instance
db: Singleton database instance
monitor: dict The monitor dictionary qwith name, func, ttl, and type
"""
key = get_monitor_redis_key(monitor)
ttl_key = f"{key}:ttl"
is_fresh = redis.get(ttl_key)
if is_fresh:
return
# Invoke the monitor function with kwargs for db and redis.
# This allows any monitor to access the db and/or redis connection.
value = monitor[monitor_names.func](db=db, redis=redis)
logger.info(
f"monitoring_queue.py | Computed value for {monitor[monitor_names.name]} {value}"
)
redis.set(key, value)
if "ttl" in monitor:
# Set a TTL (in seconds) key to track when this value needs refreshing.
# We store a separate TTL key rather than expiring the value itself
# so that in the case of an error, the current value can still be read
redis.set(ttl_key, 1, monitor["ttl"])
@celery.task(name="monitoring_queue", bind=True)
def monitoring_queue_task(self):
"""
A persistent cron-style queue that periodically monitors various
health metrics and caches values in redis.
The queue runs every minute on cron, but individual monitors establish
their own freshness/refresh rate to operate on.
1. The queue spins up and for each monitor checks to see if it needs a refresh
2. Refreshes the value and stores the update in redis
"""
db = monitoring_queue_task.db
redis = monitoring_queue_task.redis
have_lock = False
update_lock = redis.lock("monitoring_queue_lock", timeout=2000)
try:
have_lock = update_lock.acquire(blocking=False)
if have_lock:
start_time = time.time()
for monitor in MONITORS.values():
try:
refresh(redis, db, monitor)
except Exception as e:
logger.warning(
f"monitoring_queue.py | Error computing {monitor['name']} {e}"
)
end_time = time.time()
logger.info(
f"monitoring_queue.py | Finished monitoring_queue in {end_time - start_time} seconds"
)
else:
logger.info("monitoring_queue.py | Failed to acquire lock")
except Exception as e:
logger.error("monitoring_queue.py | Fatal error in main loop", exc_info=True)
raise e
finally:
if have_lock:
update_lock.release()
| en | 0.779559 | Refreshes the cached value for a monitor Args: redis: Singleton redis instance db: Singleton database instance monitor: dict The monitor dictionary qwith name, func, ttl, and type # Invoke the monitor function with kwargs for db and redis. # This allows any monitor to access the db and/or redis connection. # Set a TTL (in seconds) key to track when this value needs refreshing. # We store a separate TTL key rather than expiring the value itself # so that in the case of an error, the current value can still be read A persistent cron-style queue that periodically monitors various health metrics and caches values in redis. The queue runs every minute on cron, but individual monitors establish their own freshness/refresh rate to operate on. 1. The queue spins up and for each monitor checks to see if it needs a refresh 2. Refreshes the value and stores the update in redis | 2.63873 | 3 |
venv/lib/python3.8/site-packages/tmdbv3api/objs/genre.py | lfbox7/flixster-kivy | 0 | 6631934 | <filename>venv/lib/python3.8/site-packages/tmdbv3api/objs/genre.py
from tmdbv3api.tmdb import TMDb
class Genre(TMDb):
_urls = {"movie_list": "/genre/movie/list", "tv_list": "/genre/tv/list"}
def movie_list(self):
return self._get_obj(self._call(self._urls["movie_list"], ""), key="genres")
def tv_list(self):
return self._get_obj(self._call(self._urls["tv_list"], ""), key="genres")
| <filename>venv/lib/python3.8/site-packages/tmdbv3api/objs/genre.py
from tmdbv3api.tmdb import TMDb
class Genre(TMDb):
_urls = {"movie_list": "/genre/movie/list", "tv_list": "/genre/tv/list"}
def movie_list(self):
return self._get_obj(self._call(self._urls["movie_list"], ""), key="genres")
def tv_list(self):
return self._get_obj(self._call(self._urls["tv_list"], ""), key="genres")
| none | 1 | 2.247025 | 2 |
|
Lecture_04/hw_02_while_loop.py | YouWatanabe/fp | 0 | 6631935 | def gcd(x, y):
while y > 0:
x, y = y, x % y
return x
print(gcd(24, 32))
| def gcd(x, y):
while y > 0:
x, y = y, x % y
return x
print(gcd(24, 32))
| none | 1 | 3.516458 | 4 |
|
tests/external_plugins/external_plugin.py | vbabiy/Flexget | 0 | 6631936 | <filename>tests/external_plugins/external_plugin.py
from __future__ import unicode_literals, division, absolute_import
from flexget import plugin
from flexget.entry import Entry
from flexget.event import event
class ExternalPlugin(object):
schema = {'type': 'boolean'}
def on_task_input(self, task, config):
return [Entry('test entry', 'fake url')]
@event('plugin.register')
def register_plugin():
plugin.register(ExternalPlugin, 'external_plugin', api_ver=2)
| <filename>tests/external_plugins/external_plugin.py
from __future__ import unicode_literals, division, absolute_import
from flexget import plugin
from flexget.entry import Entry
from flexget.event import event
class ExternalPlugin(object):
schema = {'type': 'boolean'}
def on_task_input(self, task, config):
return [Entry('test entry', 'fake url')]
@event('plugin.register')
def register_plugin():
plugin.register(ExternalPlugin, 'external_plugin', api_ver=2)
| none | 1 | 1.769075 | 2 |
|
blog/extensions/auth/forms.py | victorcto/blog | 0 | 6631937 | import wtforms as wtf
from flask_wtf import FlaskForm
class SignInForm(FlaskForm):
username = wtf.StringField('Username', [wtf.validators.DataRequired()])
password = wtf.PasswordField('Password', [wtf.validators.DataRequired()])
remember_me = wtf.BooleanField('Remember me')
submit = wtf.SubmitField('Sign In')
| import wtforms as wtf
from flask_wtf import FlaskForm
class SignInForm(FlaskForm):
username = wtf.StringField('Username', [wtf.validators.DataRequired()])
password = wtf.PasswordField('Password', [wtf.validators.DataRequired()])
remember_me = wtf.BooleanField('Remember me')
submit = wtf.SubmitField('Sign In')
| none | 1 | 2.589349 | 3 |
|
python/wx/foo.py | rha1063/misc | 0 | 6631938 | '''this is a doc string'''
class Bar:
'this is also a doc string'
def b(self):
'this is a third doc string'
def Car():
'this is a fourth doc string'
| '''this is a doc string'''
class Bar:
'this is also a doc string'
def b(self):
'this is a third doc string'
def Car():
'this is a fourth doc string'
| en | 0.762826 | this is a doc string | 2.207494 | 2 |
qrplay.py | foldedpaper/qrocodile | 0 | 6631939 | <gh_stars>0
#!/usr/bin/python
import logging
import argparse
import json
import os
import pickle
import subprocess
import sys
from time import sleep
import RPi.GPIO as GPIO
import spotipy
import spotipy.util as util
import soco
from soco.data_structures import DidlItem, DidlResource
# Set up logfile
LOG_FORMAT = '%(levelname)s %(asctime)s - %(message)s'
logging.basicConfig(#filename = 'qrplay.log',
#filemode = 'w',
level = logging.INFO,
format = LOG_FORMAT)
logger = logging.getLogger()
# check python version
if sys.version_info[0] < 3:
raise Exception("Python 3 or a more recent version is required.")
# set up GPIO for wired LED
GPIO.setmode(GPIO.BOARD)
GPIO.setup(7, GPIO.OUT)
# make sure it's turned on
GPIO.output(7,True)
# load defaults from my_defaults.txt
current_path = os.getcwd()
defaults = json.load(open('my_defaults.txt','r'))
default_room = defaults['default_room']
default_spotify_user = defaults['default_spotify_user']
# set spotify authentication variables
sp_client_id = defaults['SPOTIPY_CLIENT_ID']
sp_client_secret = defaults['SPOTIPY_CLIENT_SECRET']
sp_redirect = defaults['SPOTIPY_REDIRECT_URI']
# set player uuid for use in building album URIs
album_prefix = defaults['album_uuid_prefix']
logger.info('Imported defaults: %s' % (defaults))
# Parse the command line arguments
arg_parser = argparse.ArgumentParser(description='Translates QR codes detected by a camera into Sonos commands.')
arg_parser.add_argument('--default-device', default=default_room, help='the name of your default device/room')
arg_parser.add_argument('--linein-source', default='Living Room', help='the name of the device/room used as the line-in source')
arg_parser.add_argument('--debug-file', help='read commands from a file instead of launching scanner')
arg_parser.add_argument('--spotify-username', default=default_spotify_user, help='the username used to setup Spotify access(only needed if you want to use cards for Spotify tracks)')
args = arg_parser.parse_args()
# set filename for pickle of hashed library items
hashed_tracks = 'hashed_tracks.dat'
hashed_albums = 'hashed_albums.dat'
# UNUSED until SoCo restores support for spotify
if args.spotify_username:
# Set up Spotify access
scope = 'user-library-read'
token = util.prompt_for_user_token(args.spotify_username,scope,client_id=sp_client_id,client_secret=sp_client_secret,redirect_uri=sp_redirect)
if token:
sp = spotipy.Spotify(auth=token)
logger.info("logged into Spotify")
else:
raise ValueError('Can\'t get Spotify token for ' + username)
logger.info('Can\'t get Spotify token for ' + username)
else:
# No Spotify
sp = None
logger.info('Not using a Spotify account')
# Load the most recently used device, if available, otherwise fall back on the `default-device` argument
try:
with open('.last-device', 'r') as device_file:
current_device = device_file.read().replace('\n', '')
logger.info('Defaulting to last used room: ' + current_device)
except:
current_device = defaults['default_room']
current_device = args.default_device
logger.info('Initial room: ' + current_device)
# set soco instance for accessing sonos speaker
spkr = soco.discovery.by_name(current_device).group.coordinator
# Keep track of the last-seen code
last_qrcode = ''
class Mode:
PLAY_SONG_IMMEDIATELY = 1
PLAY_ALBUM_IMMEDIATELY = 2
BUILD_QUEUE = 3
current_mode = Mode.PLAY_ALBUM_IMMEDIATELY
def switch_to_room(room):
global spkr
if spkr.player_name != room:
spkr = soco.discovery.by_name(room)
current_device = spkr.player_name
with open(".last-device", "w") as device_file:
device_file.write(current_device)
# Causes the onboard green LED to blink on and off twice. (This assumes Raspberry Pi 3 Model B; your
# mileage may vary.)
def blink_led():
duration = 0.15
def led_off():
subprocess.call("echo 0 > /sys/class/leds/led0/brightness", shell=True)
GPIO.output(7,False)
def led_on():
subprocess.call("echo 1 > /sys/class/leds/led0/brightness", shell=True)
GPIO.output(7,True)
# Technically we only need to do this once when the script launches
subprocess.call("echo none > /sys/class/leds/led0/trigger", shell=True)
led_on()
sleep(duration)
led_off()
sleep(duration)
led_on()
sleep(duration)
led_off()
# we need the GPIO LED to stay on because it illuminates the cards for the camera
GPIO.output(7,True)
def handle_command(qrcode):
global current_mode
global spkr
logger.info('HANDLING COMMAND: ' + qrcode)
if qrcode == 'cmd:turntable':
spkr.switch_to_line_in(source=args.linein_source)
spkr.play()
elif qrcode.startswith('changezone:'):
newroom = qrcode.split(":")[1]
logger.info('Switching to '+ newroom)
switch_to_room(newroom)
elif qrcode.startswith('cmd:'):
action = qrcode.split(":")[1]
if action == 'play':
spkr.play()
elif action == 'pause':
spkr.pause()
elif action == 'next':
spkr.next()
elif action == 'prev':
spkr.previous()
elif action == 'stop':
spkr.stop()
elif action == 'shuffle/on':
spkr.play_mode = 'SHUFFLE_NOREPEAT'
elif action == 'shuffle/off':
spkr.play_mode = 'NORMAL'
elif qrcode == 'mode:songonly':
current_mode = Mode.PLAY_SONG_IMMEDIATELY
elif qrcode == 'mode:wholealbum':
current_mode = Mode.PLAY_ALBUM_IMMEDIATELY
elif qrcode == 'mode:buildqueue':
current_mode = Mode.BUILD_QUEUE
spkr.pause()
spkr.clear_queue()
else:
logger.info('No recognized command in handle_command.')
def handle_library_item(uri):
global spkr
global album_prefix
logger.info('PLAYING FROM LIBRARY: ' + uri)
# TODO: re-implement queue-building as in chrispcampbell original
############
#
# Playing albums
#
#############
# to playback, construct a dummy DidlMusicAlbum to send to sonos queue
# needed to play album: URI, and album_id
# all albums share URI, which is:
# x-rincon-playlist:RINCON_[uuid of sonos zone]
# album_id can be got from QR code. It looks like:
# alb:A:ALBUM/Bone%20Machine
# albums can also be hashed. they look like:
# alb:hsh:[hashed_id]
if 'alb:' in uri:
# if this is a 'hashed' album, get album id from hashed resource
if 'hsh:' in uri:
with open(hashed_albums, 'rb') as r:
b = pickle.loads(r.read())
album_id = b[uri]
else:
album_id = uri[4:]
album_fullURI = album_prefix + '#' + album_id
logging.info('sending full uri %s' % (album_fullURI))
# SoCo needs a DidlResource object to play albums
# We can construct a 'dummy' DidlResource with generic metadata,
# and when this is passed to the speaker, SoCo/Sonos will be able to fetch
# the album from the music library.
res = [DidlResource(uri=album_fullURI, protocol_info='dummy')]
didl = soco.data_structures.DidlMusicAlbum(title='dummy',parent_id='dummy',item_id=album_id,resources=res)
spkr.clear_queue()
spkr.add_to_queue(didl)
spkr.play()
########
#
# Playing playlists or tracks
#
#########
# to playback, you need only the URI of the playlist/track,
# which comes in one of two forms.
# Sonos playlist looks like:
# file:///jffs/settings/savedqueues.rsq#0
# Imported itunes playlist looks like:
# x-file-cifs://computer/music/iTunes/iTunes%20Music%20Library.xml#9D1D3FDCFDBB6751
# Track looks like:
# x-file-cifs://computer/music/iTunes/Music/Original%20Soundtrack/Chants%20From%20The%20Thin%20Red%20Line/01%20Jisas%20Yu%20Hand%20Blong%20Mi.mp3
elif 'pl:' in uri:
pluri = uri[3:]
spkr.clear_queue()
spkr.add_uri_to_queue(uri=pluri)
spkr.play()
elif 'trk:' in uri:
# look up hashuri in hashed tracks
with open(hashed_tracks, 'rb') as r:
b = pickle.loads(r.read())
trkuri = b[uri]
spkr.clear_queue()
spkr.add_uri_to_queue(uri=trkuri)
spkr.play()
# UNUSED until SoCo restores support for spotify
def handle_spotify_item(uri):
logger.info('PLAYING FROM SPOTIFY: ' + uri)
if current_mode == Mode.BUILD_QUEUE:
action = 'queue'
elif current_mode == Mode.PLAY_ALBUM_IMMEDIATELY:
action = 'clearqueueandplayalbum'
else:
action = 'clearqueueandplaysong'
perform_room_request('spotify/{0}/{1}'.format(action, uri))
# UNUSED until SoCo restores support for spotify
def handle_spotify_album(uri):
logger.info('PLAYING ALBUM FROM SPOTIFY: ' + uri)
album_raw = sp.album(uri)
album_name = album_raw['name']
artist_name = album_raw['artists'][0]['name']
# create and update the track list
album_tracks_raw = sp.album_tracks(uri,limit=50,offset=0)
album_tracks = {}
# clear the sonos queue
action = 'clearqueue'
perform_room_request('{0}'.format(action))
# turn off shuffle before starting the new queue
action = 'shuffle/off'
perform_room_request('{0}'.format(action))
for tack in album_tracks_raw['items']:
track_number = track['track_number']
track_name = track['name']
track_uri = track['uri']
album_tracks.update({track_number: {}})
album_tracks[track_number].update({'uri': track_uri})
album_tracks[track_number].update({'name': track_name})
logger.info(track_number)
if track_number == int('1'):
# play track 1 immediately
action = 'now'
perform_room_request('spotify/{0}/{1}'.format(action, str(track_uri)))
else:
# add all remaining tracks to queue
action = 'queue'
perform_room_request('spotify/{0}/{1}'.format(action, str(track_uri)))
# UNUSED until SoCo restores support for spotify
def handle_spotify_playlist(uri):
logger.info('PLAYING PLAYLIST FROM SPOTIFY: ' + uri)
sp_user = uri.split(":")[2]
playlist_raw = sp.user_playlist(sp_user,uri)
playlist_name = playlist_raw["name"]
# clear the sonos queue
spkr.clear_queue()
# create and update the track list
playlist_tracks_raw = sp.user_playlist_tracks(sp_user,uri,limit=50,offset=0)
playlist_tracks = {}
# turn off shuffle before starting the new queue
spkr.play_mode = 'NORMAL'
# when not able to add a track to the queue, spotipy resets the track # to 1
# in this case I just handled the track nr separately with n
n = 0
for track in playlist_tracks_raw['items']:
n = n + 1
#track_number = track['track']['track_number'] # disabled as causing issues with non-playable tracks
track_number = n
track_name = track['track']["name"]
track_uri = track['track']["uri"]
playlist_tracks.update({track_number: {}})
playlist_tracks[track_number].update({"uri" : track_uri})
playlist_tracks[track_number].update({"name" : track_name})
logger.info(track_number)
if track_number == int("1"):
# play track 1 immediately
spkr.add_uri_to_queue(uri=track_uri)
spkr.play()
else:
# add all remaining tracks to queue
spkr.add_uri_to_queue(uri=track_uri)
def handle_qrcode(qrcode):
global last_qrcode
store_qr = True
# Ignore redundant codes, except for commands like "whatsong", where you might
# want to perform it multiple times
if qrcode == last_qrcode and not qrcode.startswith('cmd:'):
print('IGNORING REDUNDANT QRCODE: ' + qrcode)
return
print('HANDLING QRCODE: ' + qrcode)
if qrcode.startswith('cmd:'):
handle_command(qrcode)
elif qrcode.startswith('mode:'):
handle_command(qrcode)
elif qrcode.startswith('spotify:album:'):
handle_spotify_album(qrcode)
elif qrcode.startswith('spotify:artist:'):
# TODO
handle_spotify_artist(qrcode)
elif qrcode.startswith('spotify:user:'):
if (':playlist:') in qrcode:
handle_spotify_playlist(qrcode)
elif qrcode.startswith('spotify:'):
handle_spotify_item(qrcode)
elif qrcode.startswith('changezone:'):
handle_command(qrcode)
elif qrcode.startswith('pl:'):
handle_library_item(qrcode)
elif qrcode.startswith('trk:'):
handle_library_item(qrcode)
elif qrcode.startswith('alb:'):
handle_library_item(qrcode)
else:
# if qr code is not recognized, don't replace valid last_qrcode
print('QR code does not match known card patterns. Will not attempt play.')
store_qr = False
# Blink the onboard LED to give some visual indication that a code was handled
# (especially useful for cases where there's no other auditory feedback, like
# when adding songs to the queue)
if not args.debug_file:
blink_led()
if store_qr:
last_qrcode = qrcode
# Monitor the output of the QR code scanner.
def start_scan():
while True:
data = p.readline()
qrcode = str(data)[8:]
if qrcode:
qrcode = qrcode.rstrip()
handle_qrcode(qrcode)
# Read from the `debug.txt` file and handle one code at a time.
def read_debug_script():
# Read codes from `debug.txt`
with open(args.debug_file) as f:
debug_codes = f.readlines()
# Handle each code followed by a short delay
for code in debug_codes:
# Remove any trailing comments and newline (and ignore any empty or comment-only lines)
code = code.split("#")[0]
code = code.strip()
if code:
handle_qrcode(code)
sleep(4)
if args.debug_file:
# Run through a list of codes from a local file
read_debug_script()
else:
# Start the QR code reader
# --nodisplay required as running pi headless, to avoid invalid argument (22) errors
p = os.popen('/usr/bin/zbarcam --nodisplay --prescale=300x200', 'r')
try:
start_scan()
except KeyboardInterrupt:
print('Stopping scanner...')
finally:
GPIO.cleanup()
p.close()
| #!/usr/bin/python
import logging
import argparse
import json
import os
import pickle
import subprocess
import sys
from time import sleep
import RPi.GPIO as GPIO
import spotipy
import spotipy.util as util
import soco
from soco.data_structures import DidlItem, DidlResource
# Set up logfile
LOG_FORMAT = '%(levelname)s %(asctime)s - %(message)s'
logging.basicConfig(#filename = 'qrplay.log',
#filemode = 'w',
level = logging.INFO,
format = LOG_FORMAT)
logger = logging.getLogger()
# check python version
if sys.version_info[0] < 3:
raise Exception("Python 3 or a more recent version is required.")
# set up GPIO for wired LED
GPIO.setmode(GPIO.BOARD)
GPIO.setup(7, GPIO.OUT)
# make sure it's turned on
GPIO.output(7,True)
# load defaults from my_defaults.txt
current_path = os.getcwd()
defaults = json.load(open('my_defaults.txt','r'))
default_room = defaults['default_room']
default_spotify_user = defaults['default_spotify_user']
# set spotify authentication variables
sp_client_id = defaults['SPOTIPY_CLIENT_ID']
sp_client_secret = defaults['SPOTIPY_CLIENT_SECRET']
sp_redirect = defaults['SPOTIPY_REDIRECT_URI']
# set player uuid for use in building album URIs
album_prefix = defaults['album_uuid_prefix']
logger.info('Imported defaults: %s' % (defaults))
# Parse the command line arguments
arg_parser = argparse.ArgumentParser(description='Translates QR codes detected by a camera into Sonos commands.')
arg_parser.add_argument('--default-device', default=default_room, help='the name of your default device/room')
arg_parser.add_argument('--linein-source', default='Living Room', help='the name of the device/room used as the line-in source')
arg_parser.add_argument('--debug-file', help='read commands from a file instead of launching scanner')
arg_parser.add_argument('--spotify-username', default=default_spotify_user, help='the username used to setup Spotify access(only needed if you want to use cards for Spotify tracks)')
args = arg_parser.parse_args()
# set filename for pickle of hashed library items
hashed_tracks = 'hashed_tracks.dat'
hashed_albums = 'hashed_albums.dat'
# UNUSED until SoCo restores support for spotify
if args.spotify_username:
# Set up Spotify access
scope = 'user-library-read'
token = util.prompt_for_user_token(args.spotify_username,scope,client_id=sp_client_id,client_secret=sp_client_secret,redirect_uri=sp_redirect)
if token:
sp = spotipy.Spotify(auth=token)
logger.info("logged into Spotify")
else:
raise ValueError('Can\'t get Spotify token for ' + username)
logger.info('Can\'t get Spotify token for ' + username)
else:
# No Spotify
sp = None
logger.info('Not using a Spotify account')
# Load the most recently used device, if available, otherwise fall back on the `default-device` argument
try:
with open('.last-device', 'r') as device_file:
current_device = device_file.read().replace('\n', '')
logger.info('Defaulting to last used room: ' + current_device)
except:
current_device = defaults['default_room']
current_device = args.default_device
logger.info('Initial room: ' + current_device)
# set soco instance for accessing sonos speaker
spkr = soco.discovery.by_name(current_device).group.coordinator
# Keep track of the last-seen code
last_qrcode = ''
class Mode:
PLAY_SONG_IMMEDIATELY = 1
PLAY_ALBUM_IMMEDIATELY = 2
BUILD_QUEUE = 3
current_mode = Mode.PLAY_ALBUM_IMMEDIATELY
def switch_to_room(room):
global spkr
if spkr.player_name != room:
spkr = soco.discovery.by_name(room)
current_device = spkr.player_name
with open(".last-device", "w") as device_file:
device_file.write(current_device)
# Causes the onboard green LED to blink on and off twice. (This assumes Raspberry Pi 3 Model B; your
# mileage may vary.)
def blink_led():
duration = 0.15
def led_off():
subprocess.call("echo 0 > /sys/class/leds/led0/brightness", shell=True)
GPIO.output(7,False)
def led_on():
subprocess.call("echo 1 > /sys/class/leds/led0/brightness", shell=True)
GPIO.output(7,True)
# Technically we only need to do this once when the script launches
subprocess.call("echo none > /sys/class/leds/led0/trigger", shell=True)
led_on()
sleep(duration)
led_off()
sleep(duration)
led_on()
sleep(duration)
led_off()
# we need the GPIO LED to stay on because it illuminates the cards for the camera
GPIO.output(7,True)
def handle_command(qrcode):
global current_mode
global spkr
logger.info('HANDLING COMMAND: ' + qrcode)
if qrcode == 'cmd:turntable':
spkr.switch_to_line_in(source=args.linein_source)
spkr.play()
elif qrcode.startswith('changezone:'):
newroom = qrcode.split(":")[1]
logger.info('Switching to '+ newroom)
switch_to_room(newroom)
elif qrcode.startswith('cmd:'):
action = qrcode.split(":")[1]
if action == 'play':
spkr.play()
elif action == 'pause':
spkr.pause()
elif action == 'next':
spkr.next()
elif action == 'prev':
spkr.previous()
elif action == 'stop':
spkr.stop()
elif action == 'shuffle/on':
spkr.play_mode = 'SHUFFLE_NOREPEAT'
elif action == 'shuffle/off':
spkr.play_mode = 'NORMAL'
elif qrcode == 'mode:songonly':
current_mode = Mode.PLAY_SONG_IMMEDIATELY
elif qrcode == 'mode:wholealbum':
current_mode = Mode.PLAY_ALBUM_IMMEDIATELY
elif qrcode == 'mode:buildqueue':
current_mode = Mode.BUILD_QUEUE
spkr.pause()
spkr.clear_queue()
else:
logger.info('No recognized command in handle_command.')
def handle_library_item(uri):
global spkr
global album_prefix
logger.info('PLAYING FROM LIBRARY: ' + uri)
# TODO: re-implement queue-building as in chrispcampbell original
############
#
# Playing albums
#
#############
# to playback, construct a dummy DidlMusicAlbum to send to sonos queue
# needed to play album: URI, and album_id
# all albums share URI, which is:
# x-rincon-playlist:RINCON_[uuid of sonos zone]
# album_id can be got from QR code. It looks like:
# alb:A:ALBUM/Bone%20Machine
# albums can also be hashed. they look like:
# alb:hsh:[hashed_id]
if 'alb:' in uri:
# if this is a 'hashed' album, get album id from hashed resource
if 'hsh:' in uri:
with open(hashed_albums, 'rb') as r:
b = pickle.loads(r.read())
album_id = b[uri]
else:
album_id = uri[4:]
album_fullURI = album_prefix + '#' + album_id
logging.info('sending full uri %s' % (album_fullURI))
# SoCo needs a DidlResource object to play albums
# We can construct a 'dummy' DidlResource with generic metadata,
# and when this is passed to the speaker, SoCo/Sonos will be able to fetch
# the album from the music library.
res = [DidlResource(uri=album_fullURI, protocol_info='dummy')]
didl = soco.data_structures.DidlMusicAlbum(title='dummy',parent_id='dummy',item_id=album_id,resources=res)
spkr.clear_queue()
spkr.add_to_queue(didl)
spkr.play()
########
#
# Playing playlists or tracks
#
#########
# to playback, you need only the URI of the playlist/track,
# which comes in one of two forms.
# Sonos playlist looks like:
# file:///jffs/settings/savedqueues.rsq#0
# Imported itunes playlist looks like:
# x-file-cifs://computer/music/iTunes/iTunes%20Music%20Library.xml#9D1D3FDCFDBB6751
# Track looks like:
# x-file-cifs://computer/music/iTunes/Music/Original%20Soundtrack/Chants%20From%20The%20Thin%20Red%20Line/01%20Jisas%20Yu%20Hand%20Blong%20Mi.mp3
elif 'pl:' in uri:
pluri = uri[3:]
spkr.clear_queue()
spkr.add_uri_to_queue(uri=pluri)
spkr.play()
elif 'trk:' in uri:
# look up hashuri in hashed tracks
with open(hashed_tracks, 'rb') as r:
b = pickle.loads(r.read())
trkuri = b[uri]
spkr.clear_queue()
spkr.add_uri_to_queue(uri=trkuri)
spkr.play()
# UNUSED until SoCo restores support for spotify
def handle_spotify_item(uri):
logger.info('PLAYING FROM SPOTIFY: ' + uri)
if current_mode == Mode.BUILD_QUEUE:
action = 'queue'
elif current_mode == Mode.PLAY_ALBUM_IMMEDIATELY:
action = 'clearqueueandplayalbum'
else:
action = 'clearqueueandplaysong'
perform_room_request('spotify/{0}/{1}'.format(action, uri))
# UNUSED until SoCo restores support for spotify
def handle_spotify_album(uri):
logger.info('PLAYING ALBUM FROM SPOTIFY: ' + uri)
album_raw = sp.album(uri)
album_name = album_raw['name']
artist_name = album_raw['artists'][0]['name']
# create and update the track list
album_tracks_raw = sp.album_tracks(uri,limit=50,offset=0)
album_tracks = {}
# clear the sonos queue
action = 'clearqueue'
perform_room_request('{0}'.format(action))
# turn off shuffle before starting the new queue
action = 'shuffle/off'
perform_room_request('{0}'.format(action))
for tack in album_tracks_raw['items']:
track_number = track['track_number']
track_name = track['name']
track_uri = track['uri']
album_tracks.update({track_number: {}})
album_tracks[track_number].update({'uri': track_uri})
album_tracks[track_number].update({'name': track_name})
logger.info(track_number)
if track_number == int('1'):
# play track 1 immediately
action = 'now'
perform_room_request('spotify/{0}/{1}'.format(action, str(track_uri)))
else:
# add all remaining tracks to queue
action = 'queue'
perform_room_request('spotify/{0}/{1}'.format(action, str(track_uri)))
# UNUSED until SoCo restores support for spotify
def handle_spotify_playlist(uri):
logger.info('PLAYING PLAYLIST FROM SPOTIFY: ' + uri)
sp_user = uri.split(":")[2]
playlist_raw = sp.user_playlist(sp_user,uri)
playlist_name = playlist_raw["name"]
# clear the sonos queue
spkr.clear_queue()
# create and update the track list
playlist_tracks_raw = sp.user_playlist_tracks(sp_user,uri,limit=50,offset=0)
playlist_tracks = {}
# turn off shuffle before starting the new queue
spkr.play_mode = 'NORMAL'
# when not able to add a track to the queue, spotipy resets the track # to 1
# in this case I just handled the track nr separately with n
n = 0
for track in playlist_tracks_raw['items']:
n = n + 1
#track_number = track['track']['track_number'] # disabled as causing issues with non-playable tracks
track_number = n
track_name = track['track']["name"]
track_uri = track['track']["uri"]
playlist_tracks.update({track_number: {}})
playlist_tracks[track_number].update({"uri" : track_uri})
playlist_tracks[track_number].update({"name" : track_name})
logger.info(track_number)
if track_number == int("1"):
# play track 1 immediately
spkr.add_uri_to_queue(uri=track_uri)
spkr.play()
else:
# add all remaining tracks to queue
spkr.add_uri_to_queue(uri=track_uri)
def handle_qrcode(qrcode):
global last_qrcode
store_qr = True
# Ignore redundant codes, except for commands like "whatsong", where you might
# want to perform it multiple times
if qrcode == last_qrcode and not qrcode.startswith('cmd:'):
print('IGNORING REDUNDANT QRCODE: ' + qrcode)
return
print('HANDLING QRCODE: ' + qrcode)
if qrcode.startswith('cmd:'):
handle_command(qrcode)
elif qrcode.startswith('mode:'):
handle_command(qrcode)
elif qrcode.startswith('spotify:album:'):
handle_spotify_album(qrcode)
elif qrcode.startswith('spotify:artist:'):
# TODO
handle_spotify_artist(qrcode)
elif qrcode.startswith('spotify:user:'):
if (':playlist:') in qrcode:
handle_spotify_playlist(qrcode)
elif qrcode.startswith('spotify:'):
handle_spotify_item(qrcode)
elif qrcode.startswith('changezone:'):
handle_command(qrcode)
elif qrcode.startswith('pl:'):
handle_library_item(qrcode)
elif qrcode.startswith('trk:'):
handle_library_item(qrcode)
elif qrcode.startswith('alb:'):
handle_library_item(qrcode)
else:
# if qr code is not recognized, don't replace valid last_qrcode
print('QR code does not match known card patterns. Will not attempt play.')
store_qr = False
# Blink the onboard LED to give some visual indication that a code was handled
# (especially useful for cases where there's no other auditory feedback, like
# when adding songs to the queue)
if not args.debug_file:
blink_led()
if store_qr:
last_qrcode = qrcode
# Monitor the output of the QR code scanner.
def start_scan():
while True:
data = p.readline()
qrcode = str(data)[8:]
if qrcode:
qrcode = qrcode.rstrip()
handle_qrcode(qrcode)
# Read from the `debug.txt` file and handle one code at a time.
def read_debug_script():
# Read codes from `debug.txt`
with open(args.debug_file) as f:
debug_codes = f.readlines()
# Handle each code followed by a short delay
for code in debug_codes:
# Remove any trailing comments and newline (and ignore any empty or comment-only lines)
code = code.split("#")[0]
code = code.strip()
if code:
handle_qrcode(code)
sleep(4)
if args.debug_file:
# Run through a list of codes from a local file
read_debug_script()
else:
# Start the QR code reader
# --nodisplay required as running pi headless, to avoid invalid argument (22) errors
p = os.popen('/usr/bin/zbarcam --nodisplay --prescale=300x200', 'r')
try:
start_scan()
except KeyboardInterrupt:
print('Stopping scanner...')
finally:
GPIO.cleanup()
p.close() | en | 0.847989 | #!/usr/bin/python # Set up logfile #filename = 'qrplay.log', #filemode = 'w', # check python version # set up GPIO for wired LED # make sure it's turned on # load defaults from my_defaults.txt # set spotify authentication variables # set player uuid for use in building album URIs # Parse the command line arguments # set filename for pickle of hashed library items # UNUSED until SoCo restores support for spotify # Set up Spotify access # No Spotify # Load the most recently used device, if available, otherwise fall back on the `default-device` argument # set soco instance for accessing sonos speaker # Keep track of the last-seen code # Causes the onboard green LED to blink on and off twice. (This assumes Raspberry Pi 3 Model B; your # mileage may vary.) # Technically we only need to do this once when the script launches # we need the GPIO LED to stay on because it illuminates the cards for the camera # TODO: re-implement queue-building as in chrispcampbell original ############ # # Playing albums # ############# # to playback, construct a dummy DidlMusicAlbum to send to sonos queue # needed to play album: URI, and album_id # all albums share URI, which is: # x-rincon-playlist:RINCON_[uuid of sonos zone] # album_id can be got from QR code. It looks like: # alb:A:ALBUM/Bone%20Machine # albums can also be hashed. they look like: # alb:hsh:[hashed_id] # if this is a 'hashed' album, get album id from hashed resource # SoCo needs a DidlResource object to play albums # We can construct a 'dummy' DidlResource with generic metadata, # and when this is passed to the speaker, SoCo/Sonos will be able to fetch # the album from the music library. ######## # # Playing playlists or tracks # ######### # to playback, you need only the URI of the playlist/track, # which comes in one of two forms. # Sonos playlist looks like: # file:///jffs/settings/savedqueues.rsq#0 # Imported itunes playlist looks like: # x-file-cifs://computer/music/iTunes/iTunes%20Music%20Library.xml#9D1D3FDCFDBB6751 # Track looks like: # x-file-cifs://computer/music/iTunes/Music/Original%20Soundtrack/Chants%20From%20The%20Thin%20Red%20Line/01%20Jisas%20Yu%20Hand%20Blong%20Mi.mp3 # look up hashuri in hashed tracks # UNUSED until SoCo restores support for spotify # UNUSED until SoCo restores support for spotify # create and update the track list # clear the sonos queue # turn off shuffle before starting the new queue # play track 1 immediately # add all remaining tracks to queue # UNUSED until SoCo restores support for spotify # clear the sonos queue # create and update the track list # turn off shuffle before starting the new queue # when not able to add a track to the queue, spotipy resets the track # to 1 # in this case I just handled the track nr separately with n #track_number = track['track']['track_number'] # disabled as causing issues with non-playable tracks # play track 1 immediately # add all remaining tracks to queue # Ignore redundant codes, except for commands like "whatsong", where you might # want to perform it multiple times # TODO # if qr code is not recognized, don't replace valid last_qrcode # Blink the onboard LED to give some visual indication that a code was handled # (especially useful for cases where there's no other auditory feedback, like # when adding songs to the queue) # Monitor the output of the QR code scanner. # Read from the `debug.txt` file and handle one code at a time. # Read codes from `debug.txt` # Handle each code followed by a short delay # Remove any trailing comments and newline (and ignore any empty or comment-only lines) # Run through a list of codes from a local file # Start the QR code reader # --nodisplay required as running pi headless, to avoid invalid argument (22) errors | 2.381895 | 2 |
paddle/dummy_provider.py | tensor-tang/DeepSpeech2 | 0 | 6631940 | #import io, os
import numpy as np
from paddle.trainer.PyDataProvider2 import *
def initHook(settings, uttLengths, counts, lblLengths, batch_size, **kwargs):
settings.uttLengths = uttLengths
settings.counts = counts
settings.lblLengths = lblLengths
settings.freqBins = kwargs.get('freqBins', 161)
settings.charNum = kwargs.get('charNum', 29)
settings.scaleNum = kwargs.get('scaleNum', 1280)
assert settings.scaleNum % batch_size == 0 # TODO: in real data should consider more
for i in xrange(len(counts)):
settings.counts[i] = counts[i] * settings.scaleNum
settings.slots = {
'data': dense_vector_sequence(settings.freqBins), # fixed dim
'label': integer_value_sequence(settings.charNum) # classes range
}
settings.logger.info("freq bin: %d" % (settings.freqBins))
@provider(
init_hook=initHook, pool_size = 128, should_shuffle=False) # min_pool_size=-1, cache=CacheType.CACHE_PASS_IN_MEM)
def process(settings, file_list):
max_utt_len = max(settings.uttLengths)
max_lbl_len = max(settings.lblLengths)
extra_len = 1000 # just for more space of random table
data_table = np.random.rand(max_utt_len + extra_len, settings.freqBins) #(1500+1000, 161)
data_table = data_table.astype('float32')
label_table = np.random.random_integers(0, settings.charNum-1, max_lbl_len + extra_len) # TODO: range (0~max) or (1~max-1)
label_table = label_table.astype('int')
#print(data_table.shape, label_table.shape)
i = 0
for cnts in settings.counts:
for cnt in xrange(cnts):
utt_len = settings.uttLengths[i]
lbl_len = settings.lblLengths[i]
dat_start_idx = np.random.randint(0, max_utt_len + extra_len - utt_len) # [0, len)
lbl_start_idx = np.random.randint(0, max_lbl_len + extra_len - lbl_len)
#print ("data range", dat_start_idx, "~", dat_start_idx+utt_len)
#print ("labl range", lbl_start_idx, "~", lbl_start_idx+lbl_len)
dat = data_table[dat_start_idx : dat_start_idx+utt_len][:]
lbl = label_table[lbl_start_idx : lbl_start_idx+lbl_len]
yield {
'data': dat.tolist(),
'label': lbl.tolist()
}
i += 1 | #import io, os
import numpy as np
from paddle.trainer.PyDataProvider2 import *
def initHook(settings, uttLengths, counts, lblLengths, batch_size, **kwargs):
settings.uttLengths = uttLengths
settings.counts = counts
settings.lblLengths = lblLengths
settings.freqBins = kwargs.get('freqBins', 161)
settings.charNum = kwargs.get('charNum', 29)
settings.scaleNum = kwargs.get('scaleNum', 1280)
assert settings.scaleNum % batch_size == 0 # TODO: in real data should consider more
for i in xrange(len(counts)):
settings.counts[i] = counts[i] * settings.scaleNum
settings.slots = {
'data': dense_vector_sequence(settings.freqBins), # fixed dim
'label': integer_value_sequence(settings.charNum) # classes range
}
settings.logger.info("freq bin: %d" % (settings.freqBins))
@provider(
init_hook=initHook, pool_size = 128, should_shuffle=False) # min_pool_size=-1, cache=CacheType.CACHE_PASS_IN_MEM)
def process(settings, file_list):
max_utt_len = max(settings.uttLengths)
max_lbl_len = max(settings.lblLengths)
extra_len = 1000 # just for more space of random table
data_table = np.random.rand(max_utt_len + extra_len, settings.freqBins) #(1500+1000, 161)
data_table = data_table.astype('float32')
label_table = np.random.random_integers(0, settings.charNum-1, max_lbl_len + extra_len) # TODO: range (0~max) or (1~max-1)
label_table = label_table.astype('int')
#print(data_table.shape, label_table.shape)
i = 0
for cnts in settings.counts:
for cnt in xrange(cnts):
utt_len = settings.uttLengths[i]
lbl_len = settings.lblLengths[i]
dat_start_idx = np.random.randint(0, max_utt_len + extra_len - utt_len) # [0, len)
lbl_start_idx = np.random.randint(0, max_lbl_len + extra_len - lbl_len)
#print ("data range", dat_start_idx, "~", dat_start_idx+utt_len)
#print ("labl range", lbl_start_idx, "~", lbl_start_idx+lbl_len)
dat = data_table[dat_start_idx : dat_start_idx+utt_len][:]
lbl = label_table[lbl_start_idx : lbl_start_idx+lbl_len]
yield {
'data': dat.tolist(),
'label': lbl.tolist()
}
i += 1 | en | 0.238235 | #import io, os # TODO: in real data should consider more # fixed dim # classes range # min_pool_size=-1, cache=CacheType.CACHE_PASS_IN_MEM) # just for more space of random table #(1500+1000, 161) # TODO: range (0~max) or (1~max-1) #print(data_table.shape, label_table.shape) # [0, len) #print ("data range", dat_start_idx, "~", dat_start_idx+utt_len) #print ("labl range", lbl_start_idx, "~", lbl_start_idx+lbl_len) | 2.087668 | 2 |
add_cookie.py | WYEEE/JDMemberCloseAccount | 0 | 6631941 | import re
from utils.config import get_config
from utils.selenium_browser import get_browser
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import WebDriverException
if __name__ == '__main__':
"""
用于获取手机端cookie
"""
browser = get_browser(get_config()["selenium"])
browser.get("https://plogin.m.jd.com/login/login")
try:
wait = WebDriverWait(browser, 135)
print("请在网页端通过手机号码登录")
wait.until(EC.presence_of_element_located((By.ID, 'msShortcutMenu')))
browser.get("https://home.m.jd.com/myJd/newhome.action")
username = wait.until(EC.presence_of_element_located((By.CLASS_NAME, 'my_header_name'))).text
cookie = ""
for _ in browser.get_cookies():
if _["name"] == "pt_key" or _["name"] == "pt_pin":
cookie += _["name"] + "=" + _["value"] + ";"
print("获取的cookie是:" + cookie)
new_lines = []
rf = open("config.yaml", 'r', encoding='utf-8')
line = rf.readline()
while line:
if "cookie:" in line:
lineReg = re.compile(r'cookie: \"(.*?)\"')
line = lineReg.sub('cookie: \"%s\"' % cookie, line)
new_lines.append(line)
line = rf.readline()
rf.close()
wf = open("config.yaml", 'w', encoding='utf-8')
for line in new_lines:
wf.write(line)
wf.close()
print("成功添加", username)
except WebDriverException:
print("添加失败")
finally:
browser.close()
| import re
from utils.config import get_config
from utils.selenium_browser import get_browser
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.common.exceptions import WebDriverException
if __name__ == '__main__':
"""
用于获取手机端cookie
"""
browser = get_browser(get_config()["selenium"])
browser.get("https://plogin.m.jd.com/login/login")
try:
wait = WebDriverWait(browser, 135)
print("请在网页端通过手机号码登录")
wait.until(EC.presence_of_element_located((By.ID, 'msShortcutMenu')))
browser.get("https://home.m.jd.com/myJd/newhome.action")
username = wait.until(EC.presence_of_element_located((By.CLASS_NAME, 'my_header_name'))).text
cookie = ""
for _ in browser.get_cookies():
if _["name"] == "pt_key" or _["name"] == "pt_pin":
cookie += _["name"] + "=" + _["value"] + ";"
print("获取的cookie是:" + cookie)
new_lines = []
rf = open("config.yaml", 'r', encoding='utf-8')
line = rf.readline()
while line:
if "cookie:" in line:
lineReg = re.compile(r'cookie: \"(.*?)\"')
line = lineReg.sub('cookie: \"%s\"' % cookie, line)
new_lines.append(line)
line = rf.readline()
rf.close()
wf = open("config.yaml", 'w', encoding='utf-8')
for line in new_lines:
wf.write(line)
wf.close()
print("成功添加", username)
except WebDriverException:
print("添加失败")
finally:
browser.close()
| zh | 0.748964 | 用于获取手机端cookie | 2.507674 | 3 |
gir2cpp/typeref.py | sakhnik/git2cpp | 1 | 6631942 | <filename>gir2cpp/typeref.py
from .xml import Xml
from .config import Config
import xml.etree.ElementTree as ET
class TypeRef:
def __init__(self, et: ET, namespace, xml: Xml, config: Config):
self.namespace = namespace
# Pass through the output parameters for now
self.is_out = et.attrib.get("direction", "") == "out"
for x in et:
if x.tag == xml.ns("type"):
self.name = x.get('name')
self.c_type = x.attrib.get(xml.ns('type', 'c'))
# Resolve clash with the namespace
if self.c_type and self.c_type.startswith("GObject"):
self.c_type = self.c_type.replace("GObject", "::GObject")
if config.skip_check(namespace.name, self.name):
self.name = None
elif self.name == "none" or self.name == "utf8" \
or self.name == "Value" or self.name == "filename":
self.name = None
elif self.is_built_in():
self.name = None
elif self.name == "va_list":
self.name = None
self.c_type == '...'
elif x.tag == xml.ns("varargs"):
self.name = None
self.c_type = '...'
elif x.tag == xml.ns("array"):
self.c_type = x.attrib.get(xml.ns('type', 'c'))
self.name = None
elif x.tag == xml.ns("doc") or x.tag == xml.ns("attribute"):
pass
else:
self.name = None
print("Unknown type", x.tag)
built_in_types = frozenset((
"gchar", "guchar", "gshort", "gushort",
"gint", "guint", "glong", "gulong", "gssize", "gsize", "gintptr",
"guintptr", "gpointer", "gconstpointer", "gboolean", "gint8", "gint16",
"guint8", "guint16", "gint32", "guint32", "gint64", "guint64",
"gfloat", "gdouble", "GType", "utf8", "gunichar"
))
def is_built_in(self):
return not self.name or self.name in TypeRef.built_in_types
def cpp_type(self):
if not self.name or self.is_out:
return self.c_type
repository = self.namespace.get_repository()
typedef = repository.get_typedef(self.name, self.namespace.name)
if typedef:
return typedef.cpp_type(self.c_type)
return self.name.replace(".", "::")
def c_type_decl(self):
if not self.name:
return self.c_type
repository = self.namespace.get_repository()
typedef = repository.get_typedef(self.name, self.namespace.name)
if typedef:
return typedef.c_type_decl()
return self.name
def cast_from_c(self, varname):
if not self.name:
return varname
repository = self.namespace.get_repository()
typedef = repository.get_typedef(self.name, self.namespace.name)
if not typedef or self.is_out:
return varname
return typedef.cast_from_c(varname)
def cast_to_c(self, varname):
if not self.name:
return varname
repository = self.namespace.get_repository()
typedef = repository.get_typedef(self.name, self.namespace.name)
if not typedef or self.is_out:
return varname
return typedef.cast_to_c(varname, self.c_type)
| <filename>gir2cpp/typeref.py
from .xml import Xml
from .config import Config
import xml.etree.ElementTree as ET
class TypeRef:
def __init__(self, et: ET, namespace, xml: Xml, config: Config):
self.namespace = namespace
# Pass through the output parameters for now
self.is_out = et.attrib.get("direction", "") == "out"
for x in et:
if x.tag == xml.ns("type"):
self.name = x.get('name')
self.c_type = x.attrib.get(xml.ns('type', 'c'))
# Resolve clash with the namespace
if self.c_type and self.c_type.startswith("GObject"):
self.c_type = self.c_type.replace("GObject", "::GObject")
if config.skip_check(namespace.name, self.name):
self.name = None
elif self.name == "none" or self.name == "utf8" \
or self.name == "Value" or self.name == "filename":
self.name = None
elif self.is_built_in():
self.name = None
elif self.name == "va_list":
self.name = None
self.c_type == '...'
elif x.tag == xml.ns("varargs"):
self.name = None
self.c_type = '...'
elif x.tag == xml.ns("array"):
self.c_type = x.attrib.get(xml.ns('type', 'c'))
self.name = None
elif x.tag == xml.ns("doc") or x.tag == xml.ns("attribute"):
pass
else:
self.name = None
print("Unknown type", x.tag)
built_in_types = frozenset((
"gchar", "guchar", "gshort", "gushort",
"gint", "guint", "glong", "gulong", "gssize", "gsize", "gintptr",
"guintptr", "gpointer", "gconstpointer", "gboolean", "gint8", "gint16",
"guint8", "guint16", "gint32", "guint32", "gint64", "guint64",
"gfloat", "gdouble", "GType", "utf8", "gunichar"
))
def is_built_in(self):
return not self.name or self.name in TypeRef.built_in_types
def cpp_type(self):
if not self.name or self.is_out:
return self.c_type
repository = self.namespace.get_repository()
typedef = repository.get_typedef(self.name, self.namespace.name)
if typedef:
return typedef.cpp_type(self.c_type)
return self.name.replace(".", "::")
def c_type_decl(self):
if not self.name:
return self.c_type
repository = self.namespace.get_repository()
typedef = repository.get_typedef(self.name, self.namespace.name)
if typedef:
return typedef.c_type_decl()
return self.name
def cast_from_c(self, varname):
if not self.name:
return varname
repository = self.namespace.get_repository()
typedef = repository.get_typedef(self.name, self.namespace.name)
if not typedef or self.is_out:
return varname
return typedef.cast_from_c(varname)
def cast_to_c(self, varname):
if not self.name:
return varname
repository = self.namespace.get_repository()
typedef = repository.get_typedef(self.name, self.namespace.name)
if not typedef or self.is_out:
return varname
return typedef.cast_to_c(varname, self.c_type)
| en | 0.575047 | # Pass through the output parameters for now # Resolve clash with the namespace | 2.681972 | 3 |
tritam_customer/models/tritam_country_state.py | kenysmile/test_facebook | 0 | 6631943 | # -*- coding: utf-8 -*-
from odoo import models, fields, api, _
import logging
import datetime
from odoo.exceptions import UserError
class tritam_country(models.Model):
_inherit = 'res.country'
x_country_code = fields.Char('Mã tỉnh VTP')
ems_country_code = fields.Char('Mã tỉnh EMS')
class tritam_country(models.Model):
_inherit = 'res.country.state'
x_state_code = fields.Char('Mã tỉnh VTP')
ems_state_code = fields.Char('Mã tỉnh EMS')
| # -*- coding: utf-8 -*-
from odoo import models, fields, api, _
import logging
import datetime
from odoo.exceptions import UserError
class tritam_country(models.Model):
_inherit = 'res.country'
x_country_code = fields.Char('Mã tỉnh VTP')
ems_country_code = fields.Char('Mã tỉnh EMS')
class tritam_country(models.Model):
_inherit = 'res.country.state'
x_state_code = fields.Char('Mã tỉnh VTP')
ems_state_code = fields.Char('Mã tỉnh EMS')
| en | 0.769321 | # -*- coding: utf-8 -*- | 1.894623 | 2 |
imaginaire/model_utils/rename_inputs.py | hw07216/imaginaire | 3,308 | 6631944 | <filename>imaginaire/model_utils/rename_inputs.py
# Copyright (C) 2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# This work is made available under the Nvidia Source Code License-NC.
# To view a copy of this license, check out LICENSE.md
def rename_inputs(cfg, is_inference, data):
assert hasattr(cfg, 'rename_inputs')
attr = getattr(cfg, 'rename_inputs')
for key in attr.keys():
value = attr[key]
data[key] = data[value]
# Delete the old key.
del data[value]
return data
| <filename>imaginaire/model_utils/rename_inputs.py
# Copyright (C) 2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# This work is made available under the Nvidia Source Code License-NC.
# To view a copy of this license, check out LICENSE.md
def rename_inputs(cfg, is_inference, data):
assert hasattr(cfg, 'rename_inputs')
attr = getattr(cfg, 'rename_inputs')
for key in attr.keys():
value = attr[key]
data[key] = data[value]
# Delete the old key.
del data[value]
return data
| en | 0.825102 | # Copyright (C) 2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved. # # This work is made available under the Nvidia Source Code License-NC. # To view a copy of this license, check out LICENSE.md # Delete the old key. | 1.990435 | 2 |
scripts/build/check_release.py | exasol/sphinx-github-pages-generator | 0 | 6631945 | <gh_stars>0
import re
from pathlib import Path
from git import Repo
import toml
def get_git_version():
repo = Repo()
assert not repo.bare
tag_strings = [t.name for t in repo.tags]
tag_strings.sort(reverse=True)
if len(tag_strings) > 0:
latest_tag = tag_strings[0].strip()
return latest_tag
else:
return None
def get_poetry_version():
parsed_toml = toml.load('pyproject.toml')
return parsed_toml["tool"]["poetry"]["version"].strip()
def get_change_log_version():
# Path overloads __truediv__
path_to_changelog = Path(__file__).parent / ".." / ".." / "doc" / "changes" / "changelog.md"
with open(path_to_changelog) as changelog:
changelog_str = changelog.read()
# Search for the FIRST pattern like: "* [0.5.0](changes_0.5.0.md)" in the changelog file.
# Note that we encapsulate the [(0.5.0)] with parenthesis, which tells re to return the matching string as group
version_match = re.search(r"\* \[([0-9]+.[0-9]+.[0-9]+)]\(\S+\)", changelog_str)
return version_match.groups()[0]
if __name__ == '__main__':
poetry_version = get_poetry_version()
latest_tag = get_git_version()
changelog_version = get_change_log_version()
print(f'Changelog version: "{changelog_version}"')
print(f'Current version: "{poetry_version}"')
print(f'Latest git tag: "{latest_tag}"')
if latest_tag is None and poetry_version != "0.1.0":
raise ValueError("You don't have yet a release. Your Poetry version needs to be 0.1.0!")
# We expect that the current version in pyproject.toml is alway greater than the latest tag.
# Thus we avoid creating a release without having the version number updated.
if poetry_version == latest_tag:
raise ValueError("Poetry version needs to be updated!")
if changelog_version != poetry_version:
raise ValueError("Poetry version differs from Changelog version!")
print("Everything looks good")
| import re
from pathlib import Path
from git import Repo
import toml
def get_git_version():
repo = Repo()
assert not repo.bare
tag_strings = [t.name for t in repo.tags]
tag_strings.sort(reverse=True)
if len(tag_strings) > 0:
latest_tag = tag_strings[0].strip()
return latest_tag
else:
return None
def get_poetry_version():
parsed_toml = toml.load('pyproject.toml')
return parsed_toml["tool"]["poetry"]["version"].strip()
def get_change_log_version():
# Path overloads __truediv__
path_to_changelog = Path(__file__).parent / ".." / ".." / "doc" / "changes" / "changelog.md"
with open(path_to_changelog) as changelog:
changelog_str = changelog.read()
# Search for the FIRST pattern like: "* [0.5.0](changes_0.5.0.md)" in the changelog file.
# Note that we encapsulate the [(0.5.0)] with parenthesis, which tells re to return the matching string as group
version_match = re.search(r"\* \[([0-9]+.[0-9]+.[0-9]+)]\(\S+\)", changelog_str)
return version_match.groups()[0]
if __name__ == '__main__':
poetry_version = get_poetry_version()
latest_tag = get_git_version()
changelog_version = get_change_log_version()
print(f'Changelog version: "{changelog_version}"')
print(f'Current version: "{poetry_version}"')
print(f'Latest git tag: "{latest_tag}"')
if latest_tag is None and poetry_version != "0.1.0":
raise ValueError("You don't have yet a release. Your Poetry version needs to be 0.1.0!")
# We expect that the current version in pyproject.toml is alway greater than the latest tag.
# Thus we avoid creating a release without having the version number updated.
if poetry_version == latest_tag:
raise ValueError("Poetry version needs to be updated!")
if changelog_version != poetry_version:
raise ValueError("Poetry version differs from Changelog version!")
print("Everything looks good") | en | 0.837452 | # Path overloads __truediv__ # Search for the FIRST pattern like: "* [0.5.0](changes_0.5.0.md)" in the changelog file. # Note that we encapsulate the [(0.5.0)] with parenthesis, which tells re to return the matching string as group # We expect that the current version in pyproject.toml is alway greater than the latest tag. # Thus we avoid creating a release without having the version number updated. | 2.721006 | 3 |
tests/test_pta.py | AaronDJohnson/enterprise | 35 | 6631946 | <reponame>AaronDJohnson/enterprise
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_pta
----------------------------------
Tests for common signal and PTA class modules.
"""
# import os
# import pickle
import itertools
import unittest
import numpy as np
from enterprise.pulsar import Pulsar
from enterprise.signals import gp_signals, parameter, signal_base, utils, white_signals
from .enterprise_test_data import datadir
# note function is now defined in enterprise.signals.parameter
@signal_base.function
def hd_orf_generic(pos1, pos2, a=1.5, b=0.25, c=0.25):
if np.all(pos1 == pos2):
return 1
else:
xi = 1 - np.dot(pos1, pos2)
omc2 = (1 - np.cos(xi)) / 2
return a * omc2 * np.log(omc2) - b * omc2 + c
@signal_base.function
def hd_powerlaw(f, pos1, pos2, log10_A=-15, gamma=4.3):
return utils.powerlaw(f, log10_A, gamma) * utils.hd_orf(pos1, pos2)
class TestPTASignals(unittest.TestCase):
@classmethod
def setUpClass(cls):
"""Setup the Pulsar object."""
cls.psrs = [
Pulsar(datadir + "/B1855+09_NANOGrav_9yv1.gls.par", datadir + "/B1855+09_NANOGrav_9yv1.tim"),
Pulsar(datadir + "/J1909-3744_NANOGrav_9yv1.gls.par", datadir + "/J1909-3744_NANOGrav_9yv1.tim"),
]
def test_parameterized_orf(self):
T1 = 3.16e8
pl = utils.powerlaw(log10_A=parameter.Uniform(-18, -12), gamma=parameter.Uniform(1, 7))
orf = hd_orf_generic(a=parameter.Uniform(0, 5), b=parameter.Uniform(0, 5), c=parameter.Uniform(0, 5))
rn = gp_signals.FourierBasisGP(spectrum=pl, Tspan=T1, components=30)
crn = gp_signals.FourierBasisCommonGP(spectrum=pl, orf=orf, components=30, name="gw", Tspan=T1)
model = rn + crn
pta = model(self.psrs[0]) + model(self.psrs[1])
lA1, gamma1 = -13, 1e-15
lA2, gamma2 = -13.3, 1e-15
lAc, gammac = -13.1, 1e-15
a, b, c = 1.9, 0.4, 0.23
params = {
"gw_log10_A": lAc,
"gw_gamma": gammac,
"gw_a": a,
"gw_b": b,
"gw_c": c,
"B1855+09_red_noise_log10_A": lA1,
"B1855+09_red_noise_gamma": gamma1,
"J1909-3744_red_noise_log10_A": lA2,
"J1909-3744_red_noise_gamma": gamma2,
}
phi = pta.get_phi(params)
phiinv = pta.get_phiinv(params)
F1, f1 = utils.createfourierdesignmatrix_red(self.psrs[0].toas, nmodes=30, Tspan=T1)
F2, f2 = utils.createfourierdesignmatrix_red(self.psrs[1].toas, nmodes=30, Tspan=T1)
msg = "F matrix incorrect"
assert np.allclose(pta.get_basis(params)[0], F1, rtol=1e-10), msg
assert np.allclose(pta.get_basis(params)[1], F2, rtol=1e-10), msg
nftot = 120
phidiag = np.zeros(nftot)
phit = np.zeros((nftot, nftot))
phidiag[:60] = utils.powerlaw(f1, lA1, gamma1)
phidiag[:60] += utils.powerlaw(f1, lAc, gammac)
phidiag[60:] = utils.powerlaw(f2, lA2, gamma2)
phidiag[60:] += utils.powerlaw(f2, lAc, gammac)
phit[np.diag_indices(nftot)] = phidiag
orf = hd_orf_generic(self.psrs[0].pos, self.psrs[1].pos, a=a, b=b, c=c)
spec = utils.powerlaw(f1, log10_A=lAc, gamma=gammac)
phit[:60, 60:] = np.diag(orf * spec)
phit[60:, :60] = phit[:60, 60:]
msg = "{} {}".format(np.diag(phi), np.diag(phit))
assert np.allclose(phi, phit, rtol=1e-15, atol=1e-17), msg
msg = "PTA Phi inverse is incorrect {}.".format(params)
assert np.allclose(phiinv, np.linalg.inv(phit), rtol=1e-15, atol=1e-17), msg
def test_pta_phiinv_methods(self):
ef = white_signals.MeasurementNoise(efac=parameter.Uniform(0.1, 5))
span = np.max(self.psrs[0].toas) - np.min(self.psrs[0].toas)
pl = utils.powerlaw(log10_A=parameter.Uniform(-16, -13), gamma=parameter.Uniform(1, 7))
orf = utils.hd_orf()
vrf = utils.dipole_orf()
rn = gp_signals.FourierBasisGP(spectrum=pl, components=30, Tspan=span)
hdrn = gp_signals.FourierBasisCommonGP(spectrum=pl, orf=orf, components=20, Tspan=span, name="gw")
vrn = gp_signals.FourierBasisCommonGP(spectrum=pl, orf=vrf, components=20, Tspan=span, name="vec")
vrn2 = gp_signals.FourierBasisCommonGP(spectrum=pl, orf=vrf, components=20, Tspan=span * 1.234, name="vec2")
# two common processes, sharing basis partially
model = ef + rn + hdrn # + vrn
pta = signal_base.PTA([model(psr) for psr in self.psrs])
ps = parameter.sample(pta.params)
phi = pta.get_phi(ps)
ldp = np.linalg.slogdet(phi)[1]
inv1, ld1 = pta.get_phiinv(ps, method="cliques", logdet=True)
inv2, ld2 = pta.get_phiinv(ps, method="partition", logdet=True)
inv3, ld3 = pta.get_phiinv(ps, method="sparse", logdet=True)
if not isinstance(inv3, np.ndarray):
inv3 = inv3.toarray()
for ld in [ld1, ld2, ld3]:
msg = "Wrong phi log determinant for two common processes"
assert np.allclose(ldp, ld, rtol=1e-15, atol=1e-6), msg
for inv in [inv1, inv2, inv3]:
msg = "Wrong phi inverse for two common processes"
assert np.allclose(np.dot(phi, inv), np.eye(phi.shape[0]), rtol=1e-15, atol=1e-6), msg
for inva, invb in itertools.combinations([inv1, inv2, inv3], 2):
assert np.allclose(inva, invb)
# two common processes, no sharing basis
model = ef + rn + vrn2
pta = signal_base.PTA([model(psr) for psr in self.psrs])
ps = parameter.sample(pta.params)
phi = pta.get_phi(ps)
ldp = np.linalg.slogdet(phi)[1]
inv1, ld1 = pta.get_phiinv(ps, method="cliques", logdet=True)
inv2, ld2 = pta.get_phiinv(ps, method="partition", logdet=True)
inv3, ld3 = pta.get_phiinv(ps, method="sparse", logdet=True)
if not isinstance(inv3, np.ndarray):
inv3 = inv3.toarray()
for ld in [ld1, ld2, ld3]:
msg = "Wrong phi log determinant for two common processes"
assert np.allclose(ldp, ld, rtol=1e-15, atol=1e-6), msg
for inv in [inv1, inv2, inv3]:
msg = "Wrong phi inverse for two processes"
assert np.allclose(np.dot(phi, inv), np.eye(phi.shape[0]), rtol=1e-15, atol=1e-6), msg
for inva, invb in itertools.combinations([inv1, inv2, inv3], 2):
assert np.allclose(inva, invb)
# three common processes, sharing basis partially
model = ef + rn + hdrn + vrn
pta = signal_base.PTA([model(psr) for psr in self.psrs])
ps = parameter.sample(pta.params)
phi = pta.get_phi(ps)
ldp = np.linalg.slogdet(phi)[1]
inv1, ld1 = pta.get_phiinv(ps, method="cliques", logdet=True)
inv2, ld2 = pta.get_phiinv(ps, method="partition", logdet=True)
inv3, ld3 = pta.get_phiinv(ps, method="sparse", logdet=True)
if not isinstance(inv3, np.ndarray):
inv3 = inv3.toarray()
for ld in [ld1, ld3]:
msg = "Wrong phi log determinant for two common processes"
assert np.allclose(ldp, ld, rtol=1e-15, atol=1e-6), msg
for inv in [inv1, inv3]:
msg = "Wrong phi inverse for three common processes"
assert np.allclose(np.dot(phi, inv), np.eye(phi.shape[0]), rtol=1e-15, atol=1e-6), msg
for inva, invb in itertools.combinations([inv1, inv3], 2):
assert np.allclose(inva, invb)
# four common processes, three sharing basis partially
model = ef + rn + hdrn + vrn + vrn2
pta = signal_base.PTA([model(psr) for psr in self.psrs])
ps = parameter.sample(pta.params)
phi = pta.get_phi(ps)
ldp = np.linalg.slogdet(phi)[1]
inv1, ld1 = pta.get_phiinv(ps, method="cliques", logdet=True)
inv2, ld2 = pta.get_phiinv(ps, method="partition", logdet=True)
inv3, ld3 = pta.get_phiinv(ps, method="sparse", logdet=True)
if not isinstance(inv3, np.ndarray):
inv3 = inv3.toarray()
for ld in [ld1, ld3]:
msg = "Wrong phi log determinant for two common processes"
assert np.allclose(ldp, ld, rtol=1e-15, atol=1e-6), msg
for inv in [inv1, inv3]:
msg = "Wrong phi inverse for four processes"
assert np.allclose(np.dot(phi, inv), np.eye(phi.shape[0]), rtol=1e-15, atol=1e-6), msg
for inva, invb in itertools.combinations([inv1, inv3], 2):
assert np.allclose(inva, invb)
def test_pta_phi(self):
T1, T2, T3 = 3.16e8, 3.16e8, 3.16e8
nf1, nf2, nf3 = 2, 2, 1
pl = utils.powerlaw(log10_A=parameter.Uniform(-18, -12), gamma=parameter.Uniform(1, 7))
orf = utils.hd_orf()
rn = gp_signals.FourierBasisGP(spectrum=pl, components=nf1, Tspan=T1)
crn = gp_signals.FourierBasisCommonGP(spectrum=pl, orf=orf, components=1, name="gw", Tspan=T3)
model = rn + crn
pta = model(self.psrs[0]) + model(self.psrs[1])
lA1, gamma1 = -13, 1e-15
lA2, gamma2 = -13.3, 1e-15
lAc, gammac = -13.1, 1e-15
params = {
"gw_log10_A": lAc,
"gw_gamma": gammac,
"B1855+09_red_noise_log10_A": lA1,
"B1855+09_red_noise_gamma": gamma1,
"J1909-3744_red_noise_log10_A": lA2,
"J1909-3744_red_noise_gamma": gamma2,
}
phi = pta.get_phi(params)
phiinv = pta.get_phiinv(params)
T1, T2, T3 = 3.16e8, 3.16e8, 3.16e8
nf1, nf2, nf3 = 2, 2, 1
F1, f1 = utils.createfourierdesignmatrix_red(self.psrs[0].toas, nf1, Tspan=T1)
F2, f2 = utils.createfourierdesignmatrix_red(self.psrs[1].toas, nf2, Tspan=T2)
F1c, fc = utils.createfourierdesignmatrix_red(self.psrs[0].toas, nf3, Tspan=T3)
F2c, fc = utils.createfourierdesignmatrix_red(self.psrs[1].toas, nf3, Tspan=T3)
nftot = 2 * 2 * nf1
phidiag = np.zeros(nftot)
phit = np.zeros((nftot, nftot))
phidiag[:4] = utils.powerlaw(f1, lA1, gamma1)
phidiag[:2] += utils.powerlaw(fc, lAc, gammac)
phidiag[4:] = utils.powerlaw(f2, lA2, gamma2)
phidiag[4:6] += utils.powerlaw(fc, lAc, gammac)
phit[np.diag_indices(nftot)] = phidiag
phit[:2, 4:6] = np.diag(hd_powerlaw(fc, self.psrs[0].pos, self.psrs[1].pos, lAc, gammac))
phit[4:6, :2] = np.diag(hd_powerlaw(fc, self.psrs[0].pos, self.psrs[1].pos, lAc, gammac))
msg = "{} {}".format(np.diag(phi), np.diag(phit))
assert np.allclose(phi, phit, rtol=1e-15, atol=1e-17), msg
msg = "PTA Phi inverse is incorrect {}.".format(params)
assert np.allclose(phiinv, np.linalg.inv(phit), rtol=1e-15, atol=1e-17), msg
def test_summary(self):
"""Test PTA summary table as well as its str representation and dict-like interface."""
T1, T3 = 3.16e8, 3.16e8
nf1 = 30
pl = utils.powerlaw(log10_A=parameter.Uniform(-18, -12), gamma=parameter.Uniform(1, 7))
orf = utils.hd_orf()
rn = gp_signals.FourierBasisGP(spectrum=pl, components=nf1, Tspan=T1)
crn = gp_signals.FourierBasisCommonGP(spectrum=pl, orf=orf, components=1, name="gw", Tspan=T3)
model = rn + crn
pta = model(self.psrs[0]) + model(self.psrs[1])
pta.summary(to_stdout=True)
# Test also the PTA and SignalCollection dict-like interfaces
msg = "Incorrect PTA str representation"
assert str(pta) == "<Enterprise PTA object: B1855+09, J1909-3744>", msg
msg = "Incorrect PTA dict-like interface"
assert len(pta) == 2, msg
assert pta.keys() == pta.pulsars, msg
assert pta.values() == pta.pulsarmodels, msg
assert pta.items() == list(zip(pta.pulsars, pta.pulsarmodels)), msg
assert pta["B1855+09"] == pta.pulsarmodels[0], msg
msg = "Incorrect SignalCollection str representation"
assert str(pta["B1855+09"]) == "<Enterprise SignalCollection object B1855+09: red_noise, gw>", msg
msg = "Incorrect SignalCollection dict-like interface"
assert len(pta["B1855+09"]) == 2, msg
assert pta["B1855+09"].keys() == [signal.signal_id for signal in pta.pulsarmodels[0].signals], msg
assert pta["B1855+09"].values() == pta.pulsarmodels[0].signals, msg
assert pta["B1855+09"].items() == list(zip(pta["B1855+09"].keys(), pta["B1855+09"].values())), msg
assert pta["B1855+09"]["red_noise"] == pta.pulsarmodels[0].signals[0], msg
class TestPTASignalsPint(TestPTASignals):
@classmethod
def setUpClass(cls):
"""Setup the Pulsar object."""
# initialize Pulsar class
cls.psrs = [
Pulsar(
datadir + "/B1855+09_NANOGrav_9yv1.gls.par",
datadir + "/B1855+09_NANOGrav_9yv1.tim",
ephem="DE430",
timing_package="pint",
),
Pulsar(
datadir + "/J1909-3744_NANOGrav_9yv1.gls.par",
datadir + "/J1909-3744_NANOGrav_9yv1.tim",
ephem="DE430",
timing_package="pint",
),
]
| #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_pta
----------------------------------
Tests for common signal and PTA class modules.
"""
# import os
# import pickle
import itertools
import unittest
import numpy as np
from enterprise.pulsar import Pulsar
from enterprise.signals import gp_signals, parameter, signal_base, utils, white_signals
from .enterprise_test_data import datadir
# note function is now defined in enterprise.signals.parameter
@signal_base.function
def hd_orf_generic(pos1, pos2, a=1.5, b=0.25, c=0.25):
if np.all(pos1 == pos2):
return 1
else:
xi = 1 - np.dot(pos1, pos2)
omc2 = (1 - np.cos(xi)) / 2
return a * omc2 * np.log(omc2) - b * omc2 + c
@signal_base.function
def hd_powerlaw(f, pos1, pos2, log10_A=-15, gamma=4.3):
return utils.powerlaw(f, log10_A, gamma) * utils.hd_orf(pos1, pos2)
class TestPTASignals(unittest.TestCase):
@classmethod
def setUpClass(cls):
"""Setup the Pulsar object."""
cls.psrs = [
Pulsar(datadir + "/B1855+09_NANOGrav_9yv1.gls.par", datadir + "/B1855+09_NANOGrav_9yv1.tim"),
Pulsar(datadir + "/J1909-3744_NANOGrav_9yv1.gls.par", datadir + "/J1909-3744_NANOGrav_9yv1.tim"),
]
def test_parameterized_orf(self):
T1 = 3.16e8
pl = utils.powerlaw(log10_A=parameter.Uniform(-18, -12), gamma=parameter.Uniform(1, 7))
orf = hd_orf_generic(a=parameter.Uniform(0, 5), b=parameter.Uniform(0, 5), c=parameter.Uniform(0, 5))
rn = gp_signals.FourierBasisGP(spectrum=pl, Tspan=T1, components=30)
crn = gp_signals.FourierBasisCommonGP(spectrum=pl, orf=orf, components=30, name="gw", Tspan=T1)
model = rn + crn
pta = model(self.psrs[0]) + model(self.psrs[1])
lA1, gamma1 = -13, 1e-15
lA2, gamma2 = -13.3, 1e-15
lAc, gammac = -13.1, 1e-15
a, b, c = 1.9, 0.4, 0.23
params = {
"gw_log10_A": lAc,
"gw_gamma": gammac,
"gw_a": a,
"gw_b": b,
"gw_c": c,
"B1855+09_red_noise_log10_A": lA1,
"B1855+09_red_noise_gamma": gamma1,
"J1909-3744_red_noise_log10_A": lA2,
"J1909-3744_red_noise_gamma": gamma2,
}
phi = pta.get_phi(params)
phiinv = pta.get_phiinv(params)
F1, f1 = utils.createfourierdesignmatrix_red(self.psrs[0].toas, nmodes=30, Tspan=T1)
F2, f2 = utils.createfourierdesignmatrix_red(self.psrs[1].toas, nmodes=30, Tspan=T1)
msg = "F matrix incorrect"
assert np.allclose(pta.get_basis(params)[0], F1, rtol=1e-10), msg
assert np.allclose(pta.get_basis(params)[1], F2, rtol=1e-10), msg
nftot = 120
phidiag = np.zeros(nftot)
phit = np.zeros((nftot, nftot))
phidiag[:60] = utils.powerlaw(f1, lA1, gamma1)
phidiag[:60] += utils.powerlaw(f1, lAc, gammac)
phidiag[60:] = utils.powerlaw(f2, lA2, gamma2)
phidiag[60:] += utils.powerlaw(f2, lAc, gammac)
phit[np.diag_indices(nftot)] = phidiag
orf = hd_orf_generic(self.psrs[0].pos, self.psrs[1].pos, a=a, b=b, c=c)
spec = utils.powerlaw(f1, log10_A=lAc, gamma=gammac)
phit[:60, 60:] = np.diag(orf * spec)
phit[60:, :60] = phit[:60, 60:]
msg = "{} {}".format(np.diag(phi), np.diag(phit))
assert np.allclose(phi, phit, rtol=1e-15, atol=1e-17), msg
msg = "PTA Phi inverse is incorrect {}.".format(params)
assert np.allclose(phiinv, np.linalg.inv(phit), rtol=1e-15, atol=1e-17), msg
def test_pta_phiinv_methods(self):
ef = white_signals.MeasurementNoise(efac=parameter.Uniform(0.1, 5))
span = np.max(self.psrs[0].toas) - np.min(self.psrs[0].toas)
pl = utils.powerlaw(log10_A=parameter.Uniform(-16, -13), gamma=parameter.Uniform(1, 7))
orf = utils.hd_orf()
vrf = utils.dipole_orf()
rn = gp_signals.FourierBasisGP(spectrum=pl, components=30, Tspan=span)
hdrn = gp_signals.FourierBasisCommonGP(spectrum=pl, orf=orf, components=20, Tspan=span, name="gw")
vrn = gp_signals.FourierBasisCommonGP(spectrum=pl, orf=vrf, components=20, Tspan=span, name="vec")
vrn2 = gp_signals.FourierBasisCommonGP(spectrum=pl, orf=vrf, components=20, Tspan=span * 1.234, name="vec2")
# two common processes, sharing basis partially
model = ef + rn + hdrn # + vrn
pta = signal_base.PTA([model(psr) for psr in self.psrs])
ps = parameter.sample(pta.params)
phi = pta.get_phi(ps)
ldp = np.linalg.slogdet(phi)[1]
inv1, ld1 = pta.get_phiinv(ps, method="cliques", logdet=True)
inv2, ld2 = pta.get_phiinv(ps, method="partition", logdet=True)
inv3, ld3 = pta.get_phiinv(ps, method="sparse", logdet=True)
if not isinstance(inv3, np.ndarray):
inv3 = inv3.toarray()
for ld in [ld1, ld2, ld3]:
msg = "Wrong phi log determinant for two common processes"
assert np.allclose(ldp, ld, rtol=1e-15, atol=1e-6), msg
for inv in [inv1, inv2, inv3]:
msg = "Wrong phi inverse for two common processes"
assert np.allclose(np.dot(phi, inv), np.eye(phi.shape[0]), rtol=1e-15, atol=1e-6), msg
for inva, invb in itertools.combinations([inv1, inv2, inv3], 2):
assert np.allclose(inva, invb)
# two common processes, no sharing basis
model = ef + rn + vrn2
pta = signal_base.PTA([model(psr) for psr in self.psrs])
ps = parameter.sample(pta.params)
phi = pta.get_phi(ps)
ldp = np.linalg.slogdet(phi)[1]
inv1, ld1 = pta.get_phiinv(ps, method="cliques", logdet=True)
inv2, ld2 = pta.get_phiinv(ps, method="partition", logdet=True)
inv3, ld3 = pta.get_phiinv(ps, method="sparse", logdet=True)
if not isinstance(inv3, np.ndarray):
inv3 = inv3.toarray()
for ld in [ld1, ld2, ld3]:
msg = "Wrong phi log determinant for two common processes"
assert np.allclose(ldp, ld, rtol=1e-15, atol=1e-6), msg
for inv in [inv1, inv2, inv3]:
msg = "Wrong phi inverse for two processes"
assert np.allclose(np.dot(phi, inv), np.eye(phi.shape[0]), rtol=1e-15, atol=1e-6), msg
for inva, invb in itertools.combinations([inv1, inv2, inv3], 2):
assert np.allclose(inva, invb)
# three common processes, sharing basis partially
model = ef + rn + hdrn + vrn
pta = signal_base.PTA([model(psr) for psr in self.psrs])
ps = parameter.sample(pta.params)
phi = pta.get_phi(ps)
ldp = np.linalg.slogdet(phi)[1]
inv1, ld1 = pta.get_phiinv(ps, method="cliques", logdet=True)
inv2, ld2 = pta.get_phiinv(ps, method="partition", logdet=True)
inv3, ld3 = pta.get_phiinv(ps, method="sparse", logdet=True)
if not isinstance(inv3, np.ndarray):
inv3 = inv3.toarray()
for ld in [ld1, ld3]:
msg = "Wrong phi log determinant for two common processes"
assert np.allclose(ldp, ld, rtol=1e-15, atol=1e-6), msg
for inv in [inv1, inv3]:
msg = "Wrong phi inverse for three common processes"
assert np.allclose(np.dot(phi, inv), np.eye(phi.shape[0]), rtol=1e-15, atol=1e-6), msg
for inva, invb in itertools.combinations([inv1, inv3], 2):
assert np.allclose(inva, invb)
# four common processes, three sharing basis partially
model = ef + rn + hdrn + vrn + vrn2
pta = signal_base.PTA([model(psr) for psr in self.psrs])
ps = parameter.sample(pta.params)
phi = pta.get_phi(ps)
ldp = np.linalg.slogdet(phi)[1]
inv1, ld1 = pta.get_phiinv(ps, method="cliques", logdet=True)
inv2, ld2 = pta.get_phiinv(ps, method="partition", logdet=True)
inv3, ld3 = pta.get_phiinv(ps, method="sparse", logdet=True)
if not isinstance(inv3, np.ndarray):
inv3 = inv3.toarray()
for ld in [ld1, ld3]:
msg = "Wrong phi log determinant for two common processes"
assert np.allclose(ldp, ld, rtol=1e-15, atol=1e-6), msg
for inv in [inv1, inv3]:
msg = "Wrong phi inverse for four processes"
assert np.allclose(np.dot(phi, inv), np.eye(phi.shape[0]), rtol=1e-15, atol=1e-6), msg
for inva, invb in itertools.combinations([inv1, inv3], 2):
assert np.allclose(inva, invb)
def test_pta_phi(self):
T1, T2, T3 = 3.16e8, 3.16e8, 3.16e8
nf1, nf2, nf3 = 2, 2, 1
pl = utils.powerlaw(log10_A=parameter.Uniform(-18, -12), gamma=parameter.Uniform(1, 7))
orf = utils.hd_orf()
rn = gp_signals.FourierBasisGP(spectrum=pl, components=nf1, Tspan=T1)
crn = gp_signals.FourierBasisCommonGP(spectrum=pl, orf=orf, components=1, name="gw", Tspan=T3)
model = rn + crn
pta = model(self.psrs[0]) + model(self.psrs[1])
lA1, gamma1 = -13, 1e-15
lA2, gamma2 = -13.3, 1e-15
lAc, gammac = -13.1, 1e-15
params = {
"gw_log10_A": lAc,
"gw_gamma": gammac,
"B1855+09_red_noise_log10_A": lA1,
"B1855+09_red_noise_gamma": gamma1,
"J1909-3744_red_noise_log10_A": lA2,
"J1909-3744_red_noise_gamma": gamma2,
}
phi = pta.get_phi(params)
phiinv = pta.get_phiinv(params)
T1, T2, T3 = 3.16e8, 3.16e8, 3.16e8
nf1, nf2, nf3 = 2, 2, 1
F1, f1 = utils.createfourierdesignmatrix_red(self.psrs[0].toas, nf1, Tspan=T1)
F2, f2 = utils.createfourierdesignmatrix_red(self.psrs[1].toas, nf2, Tspan=T2)
F1c, fc = utils.createfourierdesignmatrix_red(self.psrs[0].toas, nf3, Tspan=T3)
F2c, fc = utils.createfourierdesignmatrix_red(self.psrs[1].toas, nf3, Tspan=T3)
nftot = 2 * 2 * nf1
phidiag = np.zeros(nftot)
phit = np.zeros((nftot, nftot))
phidiag[:4] = utils.powerlaw(f1, lA1, gamma1)
phidiag[:2] += utils.powerlaw(fc, lAc, gammac)
phidiag[4:] = utils.powerlaw(f2, lA2, gamma2)
phidiag[4:6] += utils.powerlaw(fc, lAc, gammac)
phit[np.diag_indices(nftot)] = phidiag
phit[:2, 4:6] = np.diag(hd_powerlaw(fc, self.psrs[0].pos, self.psrs[1].pos, lAc, gammac))
phit[4:6, :2] = np.diag(hd_powerlaw(fc, self.psrs[0].pos, self.psrs[1].pos, lAc, gammac))
msg = "{} {}".format(np.diag(phi), np.diag(phit))
assert np.allclose(phi, phit, rtol=1e-15, atol=1e-17), msg
msg = "PTA Phi inverse is incorrect {}.".format(params)
assert np.allclose(phiinv, np.linalg.inv(phit), rtol=1e-15, atol=1e-17), msg
def test_summary(self):
"""Test PTA summary table as well as its str representation and dict-like interface."""
T1, T3 = 3.16e8, 3.16e8
nf1 = 30
pl = utils.powerlaw(log10_A=parameter.Uniform(-18, -12), gamma=parameter.Uniform(1, 7))
orf = utils.hd_orf()
rn = gp_signals.FourierBasisGP(spectrum=pl, components=nf1, Tspan=T1)
crn = gp_signals.FourierBasisCommonGP(spectrum=pl, orf=orf, components=1, name="gw", Tspan=T3)
model = rn + crn
pta = model(self.psrs[0]) + model(self.psrs[1])
pta.summary(to_stdout=True)
# Test also the PTA and SignalCollection dict-like interfaces
msg = "Incorrect PTA str representation"
assert str(pta) == "<Enterprise PTA object: B1855+09, J1909-3744>", msg
msg = "Incorrect PTA dict-like interface"
assert len(pta) == 2, msg
assert pta.keys() == pta.pulsars, msg
assert pta.values() == pta.pulsarmodels, msg
assert pta.items() == list(zip(pta.pulsars, pta.pulsarmodels)), msg
assert pta["B1855+09"] == pta.pulsarmodels[0], msg
msg = "Incorrect SignalCollection str representation"
assert str(pta["B1855+09"]) == "<Enterprise SignalCollection object B1855+09: red_noise, gw>", msg
msg = "Incorrect SignalCollection dict-like interface"
assert len(pta["B1855+09"]) == 2, msg
assert pta["B1855+09"].keys() == [signal.signal_id for signal in pta.pulsarmodels[0].signals], msg
assert pta["B1855+09"].values() == pta.pulsarmodels[0].signals, msg
assert pta["B1855+09"].items() == list(zip(pta["B1855+09"].keys(), pta["B1855+09"].values())), msg
assert pta["B1855+09"]["red_noise"] == pta.pulsarmodels[0].signals[0], msg
class TestPTASignalsPint(TestPTASignals):
@classmethod
def setUpClass(cls):
"""Setup the Pulsar object."""
# initialize Pulsar class
cls.psrs = [
Pulsar(
datadir + "/B1855+09_NANOGrav_9yv1.gls.par",
datadir + "/B1855+09_NANOGrav_9yv1.tim",
ephem="DE430",
timing_package="pint",
),
Pulsar(
datadir + "/J1909-3744_NANOGrav_9yv1.gls.par",
datadir + "/J1909-3744_NANOGrav_9yv1.tim",
ephem="DE430",
timing_package="pint",
),
] | en | 0.854427 | #!/usr/bin/env python # -*- coding: utf-8 -*- test_pta ---------------------------------- Tests for common signal and PTA class modules. # import os # import pickle # note function is now defined in enterprise.signals.parameter Setup the Pulsar object. # two common processes, sharing basis partially # + vrn # two common processes, no sharing basis # three common processes, sharing basis partially # four common processes, three sharing basis partially Test PTA summary table as well as its str representation and dict-like interface. # Test also the PTA and SignalCollection dict-like interfaces Setup the Pulsar object. # initialize Pulsar class | 2.200942 | 2 |
dependencies/rdflib/plugins/serializers/trig.py | situx/geowebannotation | 8 | 6631947 | """
Trig RDF graph serializer for RDFLib.
See <http://www.w3.org/TR/trig/> for syntax specification.
"""
from collections import defaultdict
from rdflib.plugins.serializers.turtle import TurtleSerializer, _GEN_QNAME_FOR_DT, VERB
from rdflib.term import BNode, Literal
__all__ = ['TrigSerializer']
class TrigSerializer(TurtleSerializer):
short_name = "trig"
indentString = 4 * ' '
def __init__(self, store):
if store.context_aware:
self.contexts = list(store.contexts())
self.default_context = store.default_context.identifier
if store.default_context:
self.contexts.append(store.default_context)
else:
self.contexts = [store]
self.default_context = None
super(TrigSerializer, self).__init__(store)
def preprocess(self):
for context in self.contexts:
self.store = context
self.getQName(context.identifier)
self._references = defaultdict(int)
self._subjects = {}
for triple in context:
self.preprocessTriple(triple)
self._contexts[context]=(self.orderSubjects(), self._subjects, self._references)
def reset(self):
super(TrigSerializer, self).reset()
self._contexts = {}
def serialize(self, stream, base=None, encoding=None,
spacious=None, **args):
self.reset()
self.stream = stream
self.base = base
if spacious is not None:
self._spacious = spacious
self.preprocess()
self.startDocument()
firstTime = True
for store, (ordered_subjects, subjects, ref) in list(self._contexts.items()):
if not ordered_subjects: continue
self._references = ref
self._serialized = {}
self.store = store
self._subjects = subjects
if self.default_context and store.identifier==self.default_context:
self.write(self.indent() + '\n{')
else:
if isinstance(store.identifier, BNode):
iri = store.identifier.n3()
else:
iri = self.getQName(store.identifier)
if iri is None:
iri = store.identifier.n3()
self.write(self.indent() + '\n%s {' % iri)
self.depth += 1
for subject in ordered_subjects:
if self.isDone(subject):
continue
if firstTime:
firstTime = False
if self.statement(subject) and not firstTime:
self.write('\n')
self.depth -= 1
self.write('}\n')
self.endDocument()
stream.write("\n".encode('ascii'))
| """
Trig RDF graph serializer for RDFLib.
See <http://www.w3.org/TR/trig/> for syntax specification.
"""
from collections import defaultdict
from rdflib.plugins.serializers.turtle import TurtleSerializer, _GEN_QNAME_FOR_DT, VERB
from rdflib.term import BNode, Literal
__all__ = ['TrigSerializer']
class TrigSerializer(TurtleSerializer):
short_name = "trig"
indentString = 4 * ' '
def __init__(self, store):
if store.context_aware:
self.contexts = list(store.contexts())
self.default_context = store.default_context.identifier
if store.default_context:
self.contexts.append(store.default_context)
else:
self.contexts = [store]
self.default_context = None
super(TrigSerializer, self).__init__(store)
def preprocess(self):
for context in self.contexts:
self.store = context
self.getQName(context.identifier)
self._references = defaultdict(int)
self._subjects = {}
for triple in context:
self.preprocessTriple(triple)
self._contexts[context]=(self.orderSubjects(), self._subjects, self._references)
def reset(self):
super(TrigSerializer, self).reset()
self._contexts = {}
def serialize(self, stream, base=None, encoding=None,
spacious=None, **args):
self.reset()
self.stream = stream
self.base = base
if spacious is not None:
self._spacious = spacious
self.preprocess()
self.startDocument()
firstTime = True
for store, (ordered_subjects, subjects, ref) in list(self._contexts.items()):
if not ordered_subjects: continue
self._references = ref
self._serialized = {}
self.store = store
self._subjects = subjects
if self.default_context and store.identifier==self.default_context:
self.write(self.indent() + '\n{')
else:
if isinstance(store.identifier, BNode):
iri = store.identifier.n3()
else:
iri = self.getQName(store.identifier)
if iri is None:
iri = store.identifier.n3()
self.write(self.indent() + '\n%s {' % iri)
self.depth += 1
for subject in ordered_subjects:
if self.isDone(subject):
continue
if firstTime:
firstTime = False
if self.statement(subject) and not firstTime:
self.write('\n')
self.depth -= 1
self.write('}\n')
self.endDocument()
stream.write("\n".encode('ascii'))
| en | 0.354563 | Trig RDF graph serializer for RDFLib. See <http://www.w3.org/TR/trig/> for syntax specification. | 2.319793 | 2 |
ss.py | MahaEzzat/Lane-Keeping-Using-Reinforcement-Learning | 0 | 6631948 | <filename>ss.py
import numpy
import pandas
import Qtable
import re
import carState
import GetState2
table = Qtable.maketable()
table.to_csv("../input_path/Qtable.csv",index=False)
print(table.loc[79][1])
| <filename>ss.py
import numpy
import pandas
import Qtable
import re
import carState
import GetState2
table = Qtable.maketable()
table.to_csv("../input_path/Qtable.csv",index=False)
print(table.loc[79][1])
| none | 1 | 2.332222 | 2 |
|
multiple-languages/python/ros-cdk-fc-1.0.3/src/ros_cdk_fc/__init__.py | aliyun/Resource-Orchestration-Service-Cloud-Development-K | 15 | 6631949 | '''
## Aliyun ROS FC Construct Library
This module is part of the AliCloud ROS Cloud Development Kit (ROS CDK) project.
```python
# Example automatically generated from non-compiling source. May contain errors.
import * as FC from '@alicloud/ros-cdk-fc';
```
'''
import abc
import builtins
import datetime
import enum
import typing
import jsii
import publication
import typing_extensions
from ._jsii import *
import ros_cdk_core
class Alias(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.Alias",
):
'''A ROS resource type: ``ALIYUN::FC::Alias``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "AliasProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::FC::Alias``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrAliasName")
def attr_alias_name(self) -> ros_cdk_core.IResolvable:
'''Attribute AliasName: The alias name.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrAliasName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceName")
def attr_service_name(self) -> ros_cdk_core.IResolvable:
'''Attribute ServiceName: The service name.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrVersionId")
def attr_version_id(self) -> ros_cdk_core.IResolvable:
'''Attribute VersionId: The version ID.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrVersionId"))
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.AliasProps",
jsii_struct_bases=[],
name_mapping={
"alias_name": "aliasName",
"service_name": "serviceName",
"additional_version": "additionalVersion",
"additional_weight": "additionalWeight",
"description": "description",
"version_id": "versionId",
},
)
class AliasProps:
def __init__(
self,
*,
alias_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
additional_version: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
additional_weight: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
version_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::Alias``.
:param alias_name: Property aliasName: Alias name.
:param service_name: Property serviceName: Service name.
:param additional_version: Property additionalVersion: Additional version.
:param additional_weight: Property additionalWeight: Traffic weight of additional version. From 0 to 100.
:param description: Property description: Version description.
:param version_id: Property versionId: Version ID.
'''
self._values: typing.Dict[str, typing.Any] = {
"alias_name": alias_name,
"service_name": service_name,
}
if additional_version is not None:
self._values["additional_version"] = additional_version
if additional_weight is not None:
self._values["additional_weight"] = additional_weight
if description is not None:
self._values["description"] = description
if version_id is not None:
self._values["version_id"] = version_id
@builtins.property
def alias_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property aliasName: Alias name.'''
result = self._values.get("alias_name")
assert result is not None, "Required property 'alias_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property serviceName: Service name.'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def additional_version(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property additionalVersion: Additional version.'''
result = self._values.get("additional_version")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def additional_weight(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''Property additionalWeight: Traffic weight of additional version.
From 0 to 100.
'''
result = self._values.get("additional_weight")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property description: Version description.'''
result = self._values.get("description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def version_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property versionId: Version ID.'''
result = self._values.get("version_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "AliasProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class CustomDomain(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.CustomDomain",
):
'''A ROS resource type: ``ALIYUN::FC::CustomDomain``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "CustomDomainProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::FC::CustomDomain``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrDomain")
def attr_domain(self) -> ros_cdk_core.IResolvable:
'''Attribute Domain: The domain with protocol.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrDomain"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrDomainName")
def attr_domain_name(self) -> ros_cdk_core.IResolvable:
'''Attribute DomainName: The domain name.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrDomainName"))
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.CustomDomainProps",
jsii_struct_bases=[],
name_mapping={
"domain_name": "domainName",
"protocol": "protocol",
"api_version": "apiVersion",
"cert_config": "certConfig",
"route_config": "routeConfig",
},
)
class CustomDomainProps:
def __init__(
self,
*,
domain_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
protocol: typing.Union[builtins.str, ros_cdk_core.IResolvable],
api_version: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
cert_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.CertConfigProperty"]] = None,
route_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.RouteConfigProperty"]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::CustomDomain``.
:param domain_name: Property domainName: domain name.
:param protocol: Property protocol: HTTP or HTTP,HTTPS.
:param api_version: Property apiVersion: api version.
:param cert_config: Property certConfig: certificate info.
:param route_config: Property routeConfig: Routing table: path to function mappingwhen a function is called with a custom domain name.
'''
self._values: typing.Dict[str, typing.Any] = {
"domain_name": domain_name,
"protocol": protocol,
}
if api_version is not None:
self._values["api_version"] = api_version
if cert_config is not None:
self._values["cert_config"] = cert_config
if route_config is not None:
self._values["route_config"] = route_config
@builtins.property
def domain_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property domainName: domain name.'''
result = self._values.get("domain_name")
assert result is not None, "Required property 'domain_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def protocol(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property protocol: HTTP or HTTP,HTTPS.'''
result = self._values.get("protocol")
assert result is not None, "Required property 'protocol' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def api_version(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property apiVersion: api version.'''
result = self._values.get("api_version")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def cert_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.CertConfigProperty"]]:
'''Property certConfig: certificate info.'''
result = self._values.get("cert_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.CertConfigProperty"]], result)
@builtins.property
def route_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.RouteConfigProperty"]]:
'''Property routeConfig: Routing table: path to function mappingwhen a function is called with a custom domain name.'''
result = self._values.get("route_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.RouteConfigProperty"]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "CustomDomainProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class Function(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.Function",
):
'''A ROS resource type: ``ALIYUN::FC::Function``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "FunctionProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::FC::Function``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrArn")
def attr_arn(self) -> ros_cdk_core.IResolvable:
'''Attribute ARN: The ARN for ALIYUN::ROS::CustomResource.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrArn"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrFunctionId")
def attr_function_id(self) -> ros_cdk_core.IResolvable:
'''Attribute FunctionId: The function ID.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrFunctionId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrFunctionName")
def attr_function_name(self) -> ros_cdk_core.IResolvable:
'''Attribute FunctionName: The function name.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrFunctionName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceId")
def attr_service_id(self) -> ros_cdk_core.IResolvable:
'''Attribute ServiceId: The service ID.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceName")
def attr_service_name(self) -> ros_cdk_core.IResolvable:
'''Attribute ServiceName: The service name.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceName"))
class FunctionInvoker(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.FunctionInvoker",
):
'''A ROS resource type: ``ALIYUN::FC::FunctionInvoker``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "FunctionInvokerProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::FC::FunctionInvoker``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrResult")
def attr_result(self) -> ros_cdk_core.IResolvable:
'''Attribute Result: Depends on result type: NoResult: Async invoke has no result.
Success: The response of the function. The response should be utf-8 encoded string, otherwise ROS will report an error. If the response is binary, encode it via base64 before it is returned.
Failure: Error Message.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrResult"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrResultType")
def attr_result_type(self) -> ros_cdk_core.IResolvable:
'''Attribute ResultType: Result type: NoResult: Async invoke has no result.
Success: Sync invoke succeeds.
Failure: Sync invoke fails.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrResultType"))
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.FunctionInvokerProps",
jsii_struct_bases=[],
name_mapping={
"function_name": "functionName",
"service_name": "serviceName",
"async_": "async",
"check_error": "checkError",
"event": "event",
"execute_version": "executeVersion",
"qualifier": "qualifier",
"service_region_id": "serviceRegionId",
},
)
class FunctionInvokerProps:
def __init__(
self,
*,
function_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
async_: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]] = None,
check_error: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]] = None,
event: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
execute_version: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
qualifier: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
service_region_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::FunctionInvoker``.
:param function_name: Property functionName: Function name.
:param service_name: Property serviceName: Service name.
:param async_: Property async: Invocation type, Sync or Async. Defaults to Sync.
:param check_error: Property checkError: Whether check error for function invocation result. If set true and function invocation result has error, the resource creation will be regard as failed. Default is false
:param event: Property event: This value is passed to function as utf-8 encoded string.It’s function’s responsibility to interpret the value. If the value needs to be binary, encode it via base64 before passing to this property.
:param execute_version: Property executeVersion: If the property is not specified for creation and update, the function will not be invoked. The change of the property leads to the invoke of the function.
:param qualifier: Property qualifier: service version, can be versionId or aliasName.
:param service_region_id: Property serviceRegionId: Which region service belongs to.
'''
self._values: typing.Dict[str, typing.Any] = {
"function_name": function_name,
"service_name": service_name,
}
if async_ is not None:
self._values["async_"] = async_
if check_error is not None:
self._values["check_error"] = check_error
if event is not None:
self._values["event"] = event
if execute_version is not None:
self._values["execute_version"] = execute_version
if qualifier is not None:
self._values["qualifier"] = qualifier
if service_region_id is not None:
self._values["service_region_id"] = service_region_id
@builtins.property
def function_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property functionName: Function name.'''
result = self._values.get("function_name")
assert result is not None, "Required property 'function_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property serviceName: Service name.'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def async_(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''Property async: Invocation type, Sync or Async.
Defaults to Sync.
'''
result = self._values.get("async_")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
@builtins.property
def check_error(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''Property checkError: Whether check error for function invocation result.
If set true and function invocation result has error, the resource creation will be regard as failed.
Default is false
'''
result = self._values.get("check_error")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
@builtins.property
def event(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property event: This value is passed to function as utf-8 encoded string.It’s function’s responsibility to interpret the value. If the value needs to be binary, encode it via base64 before passing to this property.'''
result = self._values.get("event")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def execute_version(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''Property executeVersion: If the property is not specified for creation and update, the function will not be invoked.
The change of the property leads to the invoke of the function.
'''
result = self._values.get("execute_version")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def qualifier(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property qualifier: service version, can be versionId or aliasName.'''
result = self._values.get("qualifier")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def service_region_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property serviceRegionId: Which region service belongs to.'''
result = self._values.get("service_region_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "FunctionInvokerProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.FunctionProps",
jsii_struct_bases=[],
name_mapping={
"function_name": "functionName",
"handler": "handler",
"runtime": "runtime",
"service_name": "serviceName",
"async_configuration": "asyncConfiguration",
"ca_port": "caPort",
"code": "code",
"custom_container_config": "customContainerConfig",
"description": "description",
"environment_variables": "environmentVariables",
"initialization_timeout": "initializationTimeout",
"initializer": "initializer",
"instance_concurrency": "instanceConcurrency",
"instance_type": "instanceType",
"memory_size": "memorySize",
"timeout": "timeout",
},
)
class FunctionProps:
def __init__(
self,
*,
function_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
handler: typing.Union[builtins.str, ros_cdk_core.IResolvable],
runtime: typing.Union[builtins.str, ros_cdk_core.IResolvable],
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
async_configuration: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.AsyncConfigurationProperty"]] = None,
ca_port: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
code: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.CodeProperty"]] = None,
custom_container_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.CustomContainerConfigProperty"]] = None,
description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
environment_variables: typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]] = None,
initialization_timeout: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
initializer: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
instance_concurrency: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
instance_type: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
memory_size: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
timeout: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::Function``.
:param function_name: Property functionName: Function name.
:param handler: Property handler: The function execution entry point.
:param runtime: Property runtime: The function runtime environment. Supporting nodejs6, nodejs8, nodejs10, nodejs12, python2.7, python3, java8, custom, custom-container and so on
:param service_name: Property serviceName: Service name.
:param async_configuration: Property asyncConfiguration: Configuration of asynchronous function calls.
:param ca_port: Property caPort: Custom runtime and custom container runtime dedicated fields, which represent the port that the started custom http server listens to. The default value is 9000
:param code: Property code: The code that contains the function implementation.
:param custom_container_config: Property customContainerConfig: Custom container runtime related configuration. After configuration, the function can be replaced with a custom container to execute the function
:param description: Property description: Function description.
:param environment_variables: Property environmentVariables: The environment variable set for the function, you can get the value of the environment variable in the function.
:param initialization_timeout: Property initializationTimeout: the max execution time of the initializer, in second.
:param initializer: Property initializer: the entry point of the initializer.
:param instance_concurrency: Property instanceConcurrency: Function instance concurrency. Value can be between 1 to 100.
:param instance_type: Property instanceType: Instance type. Value:e1: flexible instance. Memory size between 128 and 3072c1: performance instance. Memory size allow values are 4096, 8192, 16384 and 32768
:param memory_size: Property memorySize: The amount of memory that’s used to run function, in MB. Function Compute uses this value to allocate CPU resources proportionally. Defaults to 128 MB. It can be multiple of 64 MB and between 128 MB and 3072 MB.
:param timeout: Property timeout: The maximum time duration a function can run, in seconds. After which Function Compute terminates the execution. Defaults to 3 seconds, and can be between 1 to 600 seconds.
'''
self._values: typing.Dict[str, typing.Any] = {
"function_name": function_name,
"handler": handler,
"runtime": runtime,
"service_name": service_name,
}
if async_configuration is not None:
self._values["async_configuration"] = async_configuration
if ca_port is not None:
self._values["ca_port"] = ca_port
if code is not None:
self._values["code"] = code
if custom_container_config is not None:
self._values["custom_container_config"] = custom_container_config
if description is not None:
self._values["description"] = description
if environment_variables is not None:
self._values["environment_variables"] = environment_variables
if initialization_timeout is not None:
self._values["initialization_timeout"] = initialization_timeout
if initializer is not None:
self._values["initializer"] = initializer
if instance_concurrency is not None:
self._values["instance_concurrency"] = instance_concurrency
if instance_type is not None:
self._values["instance_type"] = instance_type
if memory_size is not None:
self._values["memory_size"] = memory_size
if timeout is not None:
self._values["timeout"] = timeout
@builtins.property
def function_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property functionName: Function name.'''
result = self._values.get("function_name")
assert result is not None, "Required property 'function_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def handler(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property handler: The function execution entry point.'''
result = self._values.get("handler")
assert result is not None, "Required property 'handler' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def runtime(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property runtime: The function runtime environment.
Supporting nodejs6, nodejs8, nodejs10, nodejs12, python2.7, python3, java8, custom, custom-container and so on
'''
result = self._values.get("runtime")
assert result is not None, "Required property 'runtime' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property serviceName: Service name.'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def async_configuration(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.AsyncConfigurationProperty"]]:
'''Property asyncConfiguration: Configuration of asynchronous function calls.'''
result = self._values.get("async_configuration")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.AsyncConfigurationProperty"]], result)
@builtins.property
def ca_port(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''Property caPort: Custom runtime and custom container runtime dedicated fields, which represent the port that the started custom http server listens to.
The default value is 9000
'''
result = self._values.get("ca_port")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def code(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.CodeProperty"]]:
'''Property code: The code that contains the function implementation.'''
result = self._values.get("code")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.CodeProperty"]], result)
@builtins.property
def custom_container_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.CustomContainerConfigProperty"]]:
'''Property customContainerConfig: Custom container runtime related configuration.
After configuration, the function can be replaced with a custom container to execute the function
'''
result = self._values.get("custom_container_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.CustomContainerConfigProperty"]], result)
@builtins.property
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property description: Function description.'''
result = self._values.get("description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def environment_variables(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]]:
'''Property environmentVariables: The environment variable set for the function, you can get the value of the environment variable in the function.'''
result = self._values.get("environment_variables")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]], result)
@builtins.property
def initialization_timeout(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''Property initializationTimeout: the max execution time of the initializer, in second.'''
result = self._values.get("initialization_timeout")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def initializer(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property initializer: the entry point of the initializer.'''
result = self._values.get("initializer")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def instance_concurrency(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''Property instanceConcurrency: Function instance concurrency.
Value can be between 1 to 100.
'''
result = self._values.get("instance_concurrency")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def instance_type(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property instanceType: Instance type.
Value:e1: flexible instance. Memory size between 128 and 3072c1: performance instance. Memory size allow values are 4096, 8192, 16384 and 32768
'''
result = self._values.get("instance_type")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def memory_size(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''Property memorySize: The amount of memory that’s used to run function, in MB.
Function Compute uses this value to allocate CPU resources proportionally. Defaults to 128 MB. It can be multiple of 64 MB and between 128 MB and 3072 MB.
'''
result = self._values.get("memory_size")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def timeout(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''Property timeout: The maximum time duration a function can run, in seconds.
After which Function Compute terminates the execution. Defaults to 3 seconds, and can be between 1 to 600 seconds.
'''
result = self._values.get("timeout")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "FunctionProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class Layer(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.Layer",
):
'''A ROS resource type: ``ALIYUN::FC::Layer``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "LayerProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::FC::Layer``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrArn")
def attr_arn(self) -> ros_cdk_core.IResolvable:
'''Attribute Arn: The name of the layer resource.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrArn"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrLayerName")
def attr_layer_name(self) -> ros_cdk_core.IResolvable:
'''Attribute LayerName: The name of layer.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrLayerName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrVersion")
def attr_version(self) -> ros_cdk_core.IResolvable:
'''Attribute Version: The version of the layer resource.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrVersion"))
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.LayerProps",
jsii_struct_bases=[],
name_mapping={
"code": "code",
"compatible_runtime": "compatibleRuntime",
"layer_name": "layerName",
"description": "description",
},
)
class LayerProps:
def __init__(
self,
*,
code: typing.Union[ros_cdk_core.IResolvable, "RosLayer.CodeProperty"],
compatible_runtime: typing.Union[ros_cdk_core.IResolvable, typing.Sequence[typing.Union[builtins.str, ros_cdk_core.IResolvable]]],
layer_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::Layer``.
:param code: Property code: The code of layer.
:param compatible_runtime: Property compatibleRuntime: The runtime environment supported by the layer. For example:nodejs12, nodejs10, nodejs8, nodejs6, python3, and python2.7
:param layer_name: Property layerName: The name of layer.
:param description: Property description: The description of the layer.
'''
self._values: typing.Dict[str, typing.Any] = {
"code": code,
"compatible_runtime": compatible_runtime,
"layer_name": layer_name,
}
if description is not None:
self._values["description"] = description
@builtins.property
def code(self) -> typing.Union[ros_cdk_core.IResolvable, "RosLayer.CodeProperty"]:
'''Property code: The code of layer.'''
result = self._values.get("code")
assert result is not None, "Required property 'code' is missing"
return typing.cast(typing.Union[ros_cdk_core.IResolvable, "RosLayer.CodeProperty"], result)
@builtins.property
def compatible_runtime(
self,
) -> typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]]:
'''Property compatibleRuntime: The runtime environment supported by the layer.
For example:nodejs12, nodejs10, nodejs8, nodejs6, python3, and python2.7
'''
result = self._values.get("compatible_runtime")
assert result is not None, "Required property 'compatible_runtime' is missing"
return typing.cast(typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]], result)
@builtins.property
def layer_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property layerName: The name of layer.'''
result = self._values.get("layer_name")
assert result is not None, "Required property 'layer_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property description: The description of the layer.'''
result = self._values.get("description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "LayerProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class ProvisionConfig(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.ProvisionConfig",
):
'''A ROS resource type: ``ALIYUN::FC::ProvisionConfig``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "ProvisionConfigProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::FC::ProvisionConfig``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrFunctionName")
def attr_function_name(self) -> ros_cdk_core.IResolvable:
'''Attribute FunctionName: The function name.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrFunctionName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrQualifier")
def attr_qualifier(self) -> ros_cdk_core.IResolvable:
'''Attribute Qualifier: The service alias.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrQualifier"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrResource")
def attr_resource(self) -> ros_cdk_core.IResolvable:
'''Attribute Resource: The resource.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrResource"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceName")
def attr_service_name(self) -> ros_cdk_core.IResolvable:
'''Attribute ServiceName: The service name.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrTarget")
def attr_target(self) -> ros_cdk_core.IResolvable:
'''Attribute Target: Number of provision.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrTarget"))
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.ProvisionConfigProps",
jsii_struct_bases=[],
name_mapping={
"function_name": "functionName",
"qualifier": "qualifier",
"service_name": "serviceName",
"target": "target",
},
)
class ProvisionConfigProps:
def __init__(
self,
*,
function_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
qualifier: typing.Union[builtins.str, ros_cdk_core.IResolvable],
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
target: typing.Union[jsii.Number, ros_cdk_core.IResolvable],
) -> None:
'''Properties for defining a ``ALIYUN::FC::ProvisionConfig``.
:param function_name: Property functionName: Function name.
:param qualifier: Property qualifier: Service's alias. Example : "LATEST"
:param service_name: Property serviceName: Service name.
:param target: Property target: Number of provision.
'''
self._values: typing.Dict[str, typing.Any] = {
"function_name": function_name,
"qualifier": qualifier,
"service_name": service_name,
"target": target,
}
@builtins.property
def function_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property functionName: Function name.'''
result = self._values.get("function_name")
assert result is not None, "Required property 'function_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def qualifier(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property qualifier: Service's alias.
Example : "LATEST"
'''
result = self._values.get("qualifier")
assert result is not None, "Required property 'qualifier' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property serviceName: Service name.'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def target(self) -> typing.Union[jsii.Number, ros_cdk_core.IResolvable]:
'''Property target: Number of provision.'''
result = self._values.get("target")
assert result is not None, "Required property 'target' is missing"
return typing.cast(typing.Union[jsii.Number, ros_cdk_core.IResolvable], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ProvisionConfigProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosAlias(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.RosAlias",
):
'''A ROS template type: ``ALIYUN::FC::Alias``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosAliasProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::FC::Alias``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrAliasName")
def attr_alias_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: AliasName: The alias name
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrAliasName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceName")
def attr_service_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ServiceName: The service name
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrVersionId")
def attr_version_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: VersionId: The version ID
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrVersionId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="aliasName")
def alias_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: aliasName: Alias name
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "aliasName"))
@alias_name.setter
def alias_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "aliasName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="serviceName")
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Service name
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "serviceName"))
@service_name.setter
def service_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "serviceName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="additionalVersion")
def additional_version(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: additionalVersion: Additional version
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "additionalVersion"))
@additional_version.setter
def additional_version(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "additionalVersion", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="additionalWeight")
def additional_weight(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: additionalWeight: Traffic weight of additional version. From 0 to 100.
'''
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], jsii.get(self, "additionalWeight"))
@additional_weight.setter
def additional_weight(
self,
value: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "additionalWeight", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="description")
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: description: Version description
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "description"))
@description.setter
def description(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "description", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="versionId")
def version_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: versionId: Version ID
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "versionId"))
@version_id.setter
def version_id(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "versionId", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosAliasProps",
jsii_struct_bases=[],
name_mapping={
"alias_name": "aliasName",
"service_name": "serviceName",
"additional_version": "additionalVersion",
"additional_weight": "additionalWeight",
"description": "description",
"version_id": "versionId",
},
)
class RosAliasProps:
def __init__(
self,
*,
alias_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
additional_version: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
additional_weight: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
version_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::Alias``.
:param alias_name:
:param service_name:
:param additional_version:
:param additional_weight:
:param description:
:param version_id:
'''
self._values: typing.Dict[str, typing.Any] = {
"alias_name": alias_name,
"service_name": service_name,
}
if additional_version is not None:
self._values["additional_version"] = additional_version
if additional_weight is not None:
self._values["additional_weight"] = additional_weight
if description is not None:
self._values["description"] = description
if version_id is not None:
self._values["version_id"] = version_id
@builtins.property
def alias_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: aliasName: Alias name
'''
result = self._values.get("alias_name")
assert result is not None, "Required property 'alias_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Service name
'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def additional_version(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: additionalVersion: Additional version
'''
result = self._values.get("additional_version")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def additional_weight(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: additionalWeight: Traffic weight of additional version. From 0 to 100.
'''
result = self._values.get("additional_weight")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: description: Version description
'''
result = self._values.get("description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def version_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: versionId: Version ID
'''
result = self._values.get("version_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosAliasProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosCustomDomain(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.RosCustomDomain",
):
'''A ROS template type: ``ALIYUN::FC::CustomDomain``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosCustomDomainProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::FC::CustomDomain``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrDomain")
def attr_domain(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: Domain: The domain with protocol.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrDomain"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrDomainName")
def attr_domain_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: DomainName: The domain name
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrDomainName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="domainName")
def domain_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: domainName: domain name
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "domainName"))
@domain_name.setter
def domain_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "domainName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="protocol")
def protocol(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: protocol: HTTP or HTTP,HTTPS
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "protocol"))
@protocol.setter
def protocol(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "protocol", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="apiVersion")
def api_version(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: apiVersion: api version
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "apiVersion"))
@api_version.setter
def api_version(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "apiVersion", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="certConfig")
def cert_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.CertConfigProperty"]]:
'''
:Property: certConfig: certificate info
'''
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.CertConfigProperty"]], jsii.get(self, "certConfig"))
@cert_config.setter
def cert_config(
self,
value: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.CertConfigProperty"]],
) -> None:
jsii.set(self, "certConfig", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="routeConfig")
def route_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.RouteConfigProperty"]]:
'''
:Property: routeConfig: Routing table: path to function mappingwhen a function is called with a custom domain name
'''
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.RouteConfigProperty"]], jsii.get(self, "routeConfig"))
@route_config.setter
def route_config(
self,
value: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.RouteConfigProperty"]],
) -> None:
jsii.set(self, "routeConfig", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosCustomDomain.CertConfigProperty",
jsii_struct_bases=[],
name_mapping={
"certificate": "certificate",
"cert_name": "certName",
"private_key": "privateKey",
},
)
class CertConfigProperty:
def __init__(
self,
*,
certificate: typing.Union[builtins.str, ros_cdk_core.IResolvable],
cert_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
private_key: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
'''
:param certificate:
:param cert_name:
:param private_key:
'''
self._values: typing.Dict[str, typing.Any] = {
"certificate": certificate,
"cert_name": cert_name,
"private_key": private_key,
}
@builtins.property
def certificate(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: certificate: certificate
'''
result = self._values.get("certificate")
assert result is not None, "Required property 'certificate' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def cert_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: certName: custom certificate name
'''
result = self._values.get("cert_name")
assert result is not None, "Required property 'cert_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def private_key(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: privateKey: private key
'''
result = self._values.get("private_key")
assert result is not None, "Required property 'private_key' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "CertConfigProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosCustomDomain.RouteConfigProperty",
jsii_struct_bases=[],
name_mapping={"routes": "routes"},
)
class RouteConfigProperty:
def __init__(
self,
*,
routes: typing.Union[ros_cdk_core.IResolvable, typing.Sequence[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.RoutesProperty"]]],
) -> None:
'''
:param routes:
'''
self._values: typing.Dict[str, typing.Any] = {
"routes": routes,
}
@builtins.property
def routes(
self,
) -> typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.RoutesProperty"]]]:
'''
:Property: routes: PathConfig Array
'''
result = self._values.get("routes")
assert result is not None, "Required property 'routes' is missing"
return typing.cast(typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.RoutesProperty"]]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RouteConfigProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosCustomDomain.RoutesProperty",
jsii_struct_bases=[],
name_mapping={
"function_name": "functionName",
"path": "path",
"service_name": "serviceName",
"qualifier": "qualifier",
},
)
class RoutesProperty:
def __init__(
self,
*,
function_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
path: typing.Union[builtins.str, ros_cdk_core.IResolvable],
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
qualifier: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''
:param function_name:
:param path:
:param service_name:
:param qualifier:
'''
self._values: typing.Dict[str, typing.Any] = {
"function_name": function_name,
"path": path,
"service_name": service_name,
}
if qualifier is not None:
self._values["qualifier"] = qualifier
@builtins.property
def function_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: functionName: Path to the function, for example: "login"
'''
result = self._values.get("function_name")
assert result is not None, "Required property 'function_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def path(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: path: HTTP request path when a function is called with a custom domain name, for example: "/login/*"
'''
result = self._values.get("path")
assert result is not None, "Required property 'path' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Path to the service, for example: "blogService"
'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def qualifier(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: qualifier: Service version or alias
'''
result = self._values.get("qualifier")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RoutesProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosCustomDomainProps",
jsii_struct_bases=[],
name_mapping={
"domain_name": "domainName",
"protocol": "protocol",
"api_version": "apiVersion",
"cert_config": "certConfig",
"route_config": "routeConfig",
},
)
class RosCustomDomainProps:
def __init__(
self,
*,
domain_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
protocol: typing.Union[builtins.str, ros_cdk_core.IResolvable],
api_version: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
cert_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosCustomDomain.CertConfigProperty]] = None,
route_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosCustomDomain.RouteConfigProperty]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::CustomDomain``.
:param domain_name:
:param protocol:
:param api_version:
:param cert_config:
:param route_config:
'''
self._values: typing.Dict[str, typing.Any] = {
"domain_name": domain_name,
"protocol": protocol,
}
if api_version is not None:
self._values["api_version"] = api_version
if cert_config is not None:
self._values["cert_config"] = cert_config
if route_config is not None:
self._values["route_config"] = route_config
@builtins.property
def domain_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: domainName: domain name
'''
result = self._values.get("domain_name")
assert result is not None, "Required property 'domain_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def protocol(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: protocol: HTTP or HTTP,HTTPS
'''
result = self._values.get("protocol")
assert result is not None, "Required property 'protocol' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def api_version(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: apiVersion: api version
'''
result = self._values.get("api_version")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def cert_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosCustomDomain.CertConfigProperty]]:
'''
:Property: certConfig: certificate info
'''
result = self._values.get("cert_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosCustomDomain.CertConfigProperty]], result)
@builtins.property
def route_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosCustomDomain.RouteConfigProperty]]:
'''
:Property: routeConfig: Routing table: path to function mappingwhen a function is called with a custom domain name
'''
result = self._values.get("route_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosCustomDomain.RouteConfigProperty]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosCustomDomainProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosFunction(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.RosFunction",
):
'''A ROS template type: ``ALIYUN::FC::Function``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosFunctionProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::FC::Function``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrArn")
def attr_arn(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ARN: The ARN for ALIYUN::ROS::CustomResource
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrArn"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrFunctionId")
def attr_function_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: FunctionId: The function ID
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrFunctionId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrFunctionName")
def attr_function_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: FunctionName: The function name
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrFunctionName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceId")
def attr_service_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ServiceId: The service ID
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceName")
def attr_service_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ServiceName: The service name
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="functionName")
def function_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: functionName: Function name
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "functionName"))
@function_name.setter
def function_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "functionName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="handler")
def handler(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: handler: The function execution entry point.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "handler"))
@handler.setter
def handler(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "handler", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="runtime")
def runtime(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: runtime: The function runtime environment. Supporting nodejs6, nodejs8, nodejs10, nodejs12, python2.7, python3, java8, custom, custom-container and so on
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "runtime"))
@runtime.setter
def runtime(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "runtime", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="serviceName")
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Service name
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "serviceName"))
@service_name.setter
def service_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "serviceName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="asyncConfiguration")
def async_configuration(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.AsyncConfigurationProperty"]]:
'''
:Property: asyncConfiguration: Configuration of asynchronous function calls
'''
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.AsyncConfigurationProperty"]], jsii.get(self, "asyncConfiguration"))
@async_configuration.setter
def async_configuration(
self,
value: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.AsyncConfigurationProperty"]],
) -> None:
jsii.set(self, "asyncConfiguration", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="caPort")
def ca_port(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: caPort: Custom runtime and custom container runtime dedicated fields, which represent the port that the started custom http server listens to. The default value is 9000
'''
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], jsii.get(self, "caPort"))
@ca_port.setter
def ca_port(
self,
value: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "caPort", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="code")
def code(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.CodeProperty"]]:
'''
:Property: code: The code that contains the function implementation.
'''
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.CodeProperty"]], jsii.get(self, "code"))
@code.setter
def code(
self,
value: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.CodeProperty"]],
) -> None:
jsii.set(self, "code", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="customContainerConfig")
def custom_container_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.CustomContainerConfigProperty"]]:
'''
:Property: customContainerConfig: Custom container runtime related configuration. After configuration, the function can be replaced with a custom container to execute the function
'''
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.CustomContainerConfigProperty"]], jsii.get(self, "customContainerConfig"))
@custom_container_config.setter
def custom_container_config(
self,
value: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.CustomContainerConfigProperty"]],
) -> None:
jsii.set(self, "customContainerConfig", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="description")
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: description: Function description
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "description"))
@description.setter
def description(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "description", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="environmentVariables")
def environment_variables(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]]:
'''
:Property: environmentVariables: The environment variable set for the function, you can get the value of the environment variable in the function.
'''
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]], jsii.get(self, "environmentVariables"))
@environment_variables.setter
def environment_variables(
self,
value: typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]],
) -> None:
jsii.set(self, "environmentVariables", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="initializationTimeout")
def initialization_timeout(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: initializationTimeout: the max execution time of the initializer, in second
'''
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], jsii.get(self, "initializationTimeout"))
@initialization_timeout.setter
def initialization_timeout(
self,
value: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "initializationTimeout", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="initializer")
def initializer(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: initializer: the entry point of the initializer
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "initializer"))
@initializer.setter
def initializer(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "initializer", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="instanceConcurrency")
def instance_concurrency(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: instanceConcurrency: Function instance concurrency. Value can be between 1 to 100.
'''
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], jsii.get(self, "instanceConcurrency"))
@instance_concurrency.setter
def instance_concurrency(
self,
value: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "instanceConcurrency", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="instanceType")
def instance_type(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: instanceType: Instance type. Value:e1: flexible instance. Memory size between 128 and 3072c1: performance instance. Memory size allow values are 4096, 8192, 16384 and 32768
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "instanceType"))
@instance_type.setter
def instance_type(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "instanceType", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="memorySize")
def memory_size(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: memorySize: The amount of memory that’s used to run function, in MB. Function Compute uses this value to allocate CPU resources proportionally. Defaults to 128 MB. It can be multiple of 64 MB and between 128 MB and 3072 MB.
'''
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], jsii.get(self, "memorySize"))
@memory_size.setter
def memory_size(
self,
value: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "memorySize", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="timeout")
def timeout(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: timeout: The maximum time duration a function can run, in seconds. After which Function Compute terminates the execution. Defaults to 3 seconds, and can be between 1 to 600 seconds.
'''
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], jsii.get(self, "timeout"))
@timeout.setter
def timeout(
self,
value: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "timeout", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosFunction.AsyncConfigurationProperty",
jsii_struct_bases=[],
name_mapping={
"destination": "destination",
"max_async_event_age_in_seconds": "maxAsyncEventAgeInSeconds",
"max_async_retry_attempts": "maxAsyncRetryAttempts",
"stateful_invocation": "statefulInvocation",
},
)
class AsyncConfigurationProperty:
def __init__(
self,
*,
destination: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.DestinationProperty"]] = None,
max_async_event_age_in_seconds: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
max_async_retry_attempts: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
stateful_invocation: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]] = None,
) -> None:
'''
:param destination:
:param max_async_event_age_in_seconds:
:param max_async_retry_attempts:
:param stateful_invocation:
'''
self._values: typing.Dict[str, typing.Any] = {}
if destination is not None:
self._values["destination"] = destination
if max_async_event_age_in_seconds is not None:
self._values["max_async_event_age_in_seconds"] = max_async_event_age_in_seconds
if max_async_retry_attempts is not None:
self._values["max_async_retry_attempts"] = max_async_retry_attempts
if stateful_invocation is not None:
self._values["stateful_invocation"] = stateful_invocation
@builtins.property
def destination(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.DestinationProperty"]]:
'''
:Property: destination: Set destination of asynchronous function calls
'''
result = self._values.get("destination")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.DestinationProperty"]], result)
@builtins.property
def max_async_event_age_in_seconds(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: maxAsyncEventAgeInSeconds: Configure the maximum lifetime of messages. The duration is calculated from the time the asynchronous call is triggered, and ends when the message is dequeued for processing. If this period of time is longer than the setting value of MaxAsyncEventAgeInSeconds, the message will be discarded. The unconsumed messages will be counted in the cloud monitoring AsyncEventExpiredDropped indicator.
'''
result = self._values.get("max_async_event_age_in_seconds")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def max_async_retry_attempts(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: maxAsyncRetryAttempts: Configure the number of retries
'''
result = self._values.get("max_async_retry_attempts")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def stateful_invocation(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''
:Property: statefulInvocation: Whether enable stateful invocation
'''
result = self._values.get("stateful_invocation")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "AsyncConfigurationProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosFunction.CodeProperty",
jsii_struct_bases=[],
name_mapping={
"oss_bucket_name": "ossBucketName",
"oss_object_name": "ossObjectName",
"source_code": "sourceCode",
"zip_file": "zipFile",
},
)
class CodeProperty:
def __init__(
self,
*,
oss_bucket_name: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
oss_object_name: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
source_code: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
zip_file: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''
:param oss_bucket_name:
:param oss_object_name:
:param source_code:
:param zip_file:
'''
self._values: typing.Dict[str, typing.Any] = {}
if oss_bucket_name is not None:
self._values["oss_bucket_name"] = oss_bucket_name
if oss_object_name is not None:
self._values["oss_object_name"] = oss_object_name
if source_code is not None:
self._values["source_code"] = source_code
if zip_file is not None:
self._values["zip_file"] = zip_file
@builtins.property
def oss_bucket_name(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
ossBucketName: OSS bucket name.
Priority: ZipFile > SourceCode > OssBucketName&OssObjectName.
'''
result = self._values.get("oss_bucket_name")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def oss_object_name(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
ossObjectName: OSS object name.
Priority: ZipFile > SourceCode > OssBucketName&OssObjectName.
'''
result = self._values.get("oss_object_name")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def source_code(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
sourceCode: (Node.js, PHP and Python) The source code for your FC function. If you include this parameter in a function source inline, ROS places it in a file called index (utf-8 encoded) and then compresses it to create a deployment package. For the Handler property, the first part of the handler identifier must be index. For example: index.handler.
Your source code can contain up to 4096 characters. For JSON, you must use backslashes to escape quotes and special characters, such as line breaks.
Priority: ZipFile > SourceCode > OssBucketName&OssObjectName.
'''
result = self._values.get("source_code")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def zip_file(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
zipFile: Base64 encoded zip file content.
Priority: ZipFile > SourceCode > OssBucketName&OssObjectName.
'''
result = self._values.get("zip_file")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "CodeProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosFunction.CustomContainerConfigProperty",
jsii_struct_bases=[],
name_mapping={
"image": "image",
"acceleration_type": "accelerationType",
"args": "args",
"command": "command",
},
)
class CustomContainerConfigProperty:
def __init__(
self,
*,
image: typing.Union[builtins.str, ros_cdk_core.IResolvable],
acceleration_type: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
args: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
command: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''
:param image:
:param acceleration_type:
:param args:
:param command:
'''
self._values: typing.Dict[str, typing.Any] = {
"image": image,
}
if acceleration_type is not None:
self._values["acceleration_type"] = acceleration_type
if args is not None:
self._values["args"] = args
if command is not None:
self._values["command"] = command
@builtins.property
def image(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: image: Container image address. For example: registry-vpc.cn-hangzhou.aliyuncs.com/fc-demo/helloworld:v1beta1
'''
result = self._values.get("image")
assert result is not None, "Required property 'image' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def acceleration_type(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
accelerationType: Whether to enable image acceleration. Valid Values:
Default: Indicates that image acceleration is enabled.
None: Indicates that image acceleration is disabled.
'''
result = self._values.get("acceleration_type")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def args(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: args: Container startup parameters. For example: ["-arg1", "value1"]
'''
result = self._values.get("args")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def command(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: command: Container start command. For example: ["/code/myserver"]
'''
result = self._values.get("command")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "CustomContainerConfigProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosFunction.DestinationProperty",
jsii_struct_bases=[],
name_mapping={"on_failure": "onFailure", "on_success": "onSuccess"},
)
class DestinationProperty:
def __init__(
self,
*,
on_failure: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
on_success: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''
:param on_failure:
:param on_success:
'''
self._values: typing.Dict[str, typing.Any] = {}
if on_failure is not None:
self._values["on_failure"] = on_failure
if on_success is not None:
self._values["on_success"] = on_success
@builtins.property
def on_failure(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: onFailure: When the function is invoked failed (system error or function internal error), FC will call the target corresponding to the configuration
'''
result = self._values.get("on_failure")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def on_success(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: onSuccess: When the function is invoked successfully, FC will call the target corresponding to the configuration
'''
result = self._values.get("on_success")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DestinationProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosFunctionInvoker(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.RosFunctionInvoker",
):
'''A ROS template type: ``ALIYUN::FC::FunctionInvoker``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosFunctionInvokerProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::FC::FunctionInvoker``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrResult")
def attr_result(self) -> ros_cdk_core.IResolvable:
'''
:Attribute:
Result: Depends on result type:
NoResult: Async invoke has no result.
Success: The response of the function. The response should be utf-8 encoded string, otherwise ROS will report an error. If the response is binary, encode it via base64 before it is returned.
Failure: Error Message.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrResult"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrResultType")
def attr_result_type(self) -> ros_cdk_core.IResolvable:
'''
:Attribute:
ResultType: Result type:
NoResult: Async invoke has no result.
Success: Sync invoke succeeds.
Failure: Sync invoke fails.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrResultType"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="functionName")
def function_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: functionName: Function name
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "functionName"))
@function_name.setter
def function_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "functionName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="serviceName")
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Service name
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "serviceName"))
@service_name.setter
def service_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "serviceName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="async")
def async_(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''
:Property: async: Invocation type, Sync or Async. Defaults to Sync.
'''
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], jsii.get(self, "async"))
@async_.setter
def async_(
self,
value: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "async", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="checkError")
def check_error(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''
:Property:
checkError: Whether check error for function invocation result.
If set true and function invocation result has error, the resource creation will be regard as failed.
Default is false
'''
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], jsii.get(self, "checkError"))
@check_error.setter
def check_error(
self,
value: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "checkError", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="event")
def event(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
event: This value is passed to function as utf-8 encoded string.It’s function’s responsibility to interpret the value.
If the value needs to be binary, encode it via base64 before passing to this property.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "event"))
@event.setter
def event(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "event", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="executeVersion")
def execute_version(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: executeVersion: If the property is not specified for creation and update, the function will not be invoked. The change of the property leads to the invoke of the function.
'''
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], jsii.get(self, "executeVersion"))
@execute_version.setter
def execute_version(
self,
value: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "executeVersion", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="qualifier")
def qualifier(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: qualifier: service version, can be versionId or aliasName
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "qualifier"))
@qualifier.setter
def qualifier(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "qualifier", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="serviceRegionId")
def service_region_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: serviceRegionId: Which region service belongs to.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "serviceRegionId"))
@service_region_id.setter
def service_region_id(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "serviceRegionId", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosFunctionInvokerProps",
jsii_struct_bases=[],
name_mapping={
"function_name": "functionName",
"service_name": "serviceName",
"async_": "async",
"check_error": "checkError",
"event": "event",
"execute_version": "executeVersion",
"qualifier": "qualifier",
"service_region_id": "serviceRegionId",
},
)
class RosFunctionInvokerProps:
def __init__(
self,
*,
function_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
async_: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]] = None,
check_error: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]] = None,
event: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
execute_version: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
qualifier: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
service_region_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::FunctionInvoker``.
:param function_name:
:param service_name:
:param async_:
:param check_error:
:param event:
:param execute_version:
:param qualifier:
:param service_region_id:
'''
self._values: typing.Dict[str, typing.Any] = {
"function_name": function_name,
"service_name": service_name,
}
if async_ is not None:
self._values["async_"] = async_
if check_error is not None:
self._values["check_error"] = check_error
if event is not None:
self._values["event"] = event
if execute_version is not None:
self._values["execute_version"] = execute_version
if qualifier is not None:
self._values["qualifier"] = qualifier
if service_region_id is not None:
self._values["service_region_id"] = service_region_id
@builtins.property
def function_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: functionName: Function name
'''
result = self._values.get("function_name")
assert result is not None, "Required property 'function_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Service name
'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def async_(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''
:Property: async: Invocation type, Sync or Async. Defaults to Sync.
'''
result = self._values.get("async_")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
@builtins.property
def check_error(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''
:Property:
checkError: Whether check error for function invocation result.
If set true and function invocation result has error, the resource creation will be regard as failed.
Default is false
'''
result = self._values.get("check_error")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
@builtins.property
def event(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
event: This value is passed to function as utf-8 encoded string.It’s function’s responsibility to interpret the value.
If the value needs to be binary, encode it via base64 before passing to this property.
'''
result = self._values.get("event")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def execute_version(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: executeVersion: If the property is not specified for creation and update, the function will not be invoked. The change of the property leads to the invoke of the function.
'''
result = self._values.get("execute_version")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def qualifier(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: qualifier: service version, can be versionId or aliasName
'''
result = self._values.get("qualifier")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def service_region_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: serviceRegionId: Which region service belongs to.
'''
result = self._values.get("service_region_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosFunctionInvokerProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosFunctionProps",
jsii_struct_bases=[],
name_mapping={
"function_name": "functionName",
"handler": "handler",
"runtime": "runtime",
"service_name": "serviceName",
"async_configuration": "asyncConfiguration",
"ca_port": "caPort",
"code": "code",
"custom_container_config": "customContainerConfig",
"description": "description",
"environment_variables": "environmentVariables",
"initialization_timeout": "initializationTimeout",
"initializer": "initializer",
"instance_concurrency": "instanceConcurrency",
"instance_type": "instanceType",
"memory_size": "memorySize",
"timeout": "timeout",
},
)
class RosFunctionProps:
def __init__(
self,
*,
function_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
handler: typing.Union[builtins.str, ros_cdk_core.IResolvable],
runtime: typing.Union[builtins.str, ros_cdk_core.IResolvable],
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
async_configuration: typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosFunction.AsyncConfigurationProperty]] = None,
ca_port: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
code: typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosFunction.CodeProperty]] = None,
custom_container_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosFunction.CustomContainerConfigProperty]] = None,
description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
environment_variables: typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]] = None,
initialization_timeout: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
initializer: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
instance_concurrency: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
instance_type: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
memory_size: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
timeout: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::Function``.
:param function_name:
:param handler:
:param runtime:
:param service_name:
:param async_configuration:
:param ca_port:
:param code:
:param custom_container_config:
:param description:
:param environment_variables:
:param initialization_timeout:
:param initializer:
:param instance_concurrency:
:param instance_type:
:param memory_size:
:param timeout:
'''
self._values: typing.Dict[str, typing.Any] = {
"function_name": function_name,
"handler": handler,
"runtime": runtime,
"service_name": service_name,
}
if async_configuration is not None:
self._values["async_configuration"] = async_configuration
if ca_port is not None:
self._values["ca_port"] = ca_port
if code is not None:
self._values["code"] = code
if custom_container_config is not None:
self._values["custom_container_config"] = custom_container_config
if description is not None:
self._values["description"] = description
if environment_variables is not None:
self._values["environment_variables"] = environment_variables
if initialization_timeout is not None:
self._values["initialization_timeout"] = initialization_timeout
if initializer is not None:
self._values["initializer"] = initializer
if instance_concurrency is not None:
self._values["instance_concurrency"] = instance_concurrency
if instance_type is not None:
self._values["instance_type"] = instance_type
if memory_size is not None:
self._values["memory_size"] = memory_size
if timeout is not None:
self._values["timeout"] = timeout
@builtins.property
def function_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: functionName: Function name
'''
result = self._values.get("function_name")
assert result is not None, "Required property 'function_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def handler(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: handler: The function execution entry point.
'''
result = self._values.get("handler")
assert result is not None, "Required property 'handler' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def runtime(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: runtime: The function runtime environment. Supporting nodejs6, nodejs8, nodejs10, nodejs12, python2.7, python3, java8, custom, custom-container and so on
'''
result = self._values.get("runtime")
assert result is not None, "Required property 'runtime' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Service name
'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def async_configuration(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosFunction.AsyncConfigurationProperty]]:
'''
:Property: asyncConfiguration: Configuration of asynchronous function calls
'''
result = self._values.get("async_configuration")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosFunction.AsyncConfigurationProperty]], result)
@builtins.property
def ca_port(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: caPort: Custom runtime and custom container runtime dedicated fields, which represent the port that the started custom http server listens to. The default value is 9000
'''
result = self._values.get("ca_port")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def code(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosFunction.CodeProperty]]:
'''
:Property: code: The code that contains the function implementation.
'''
result = self._values.get("code")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosFunction.CodeProperty]], result)
@builtins.property
def custom_container_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosFunction.CustomContainerConfigProperty]]:
'''
:Property: customContainerConfig: Custom container runtime related configuration. After configuration, the function can be replaced with a custom container to execute the function
'''
result = self._values.get("custom_container_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosFunction.CustomContainerConfigProperty]], result)
@builtins.property
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: description: Function description
'''
result = self._values.get("description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def environment_variables(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]]:
'''
:Property: environmentVariables: The environment variable set for the function, you can get the value of the environment variable in the function.
'''
result = self._values.get("environment_variables")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]], result)
@builtins.property
def initialization_timeout(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: initializationTimeout: the max execution time of the initializer, in second
'''
result = self._values.get("initialization_timeout")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def initializer(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: initializer: the entry point of the initializer
'''
result = self._values.get("initializer")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def instance_concurrency(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: instanceConcurrency: Function instance concurrency. Value can be between 1 to 100.
'''
result = self._values.get("instance_concurrency")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def instance_type(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: instanceType: Instance type. Value:e1: flexible instance. Memory size between 128 and 3072c1: performance instance. Memory size allow values are 4096, 8192, 16384 and 32768
'''
result = self._values.get("instance_type")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def memory_size(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: memorySize: The amount of memory that’s used to run function, in MB. Function Compute uses this value to allocate CPU resources proportionally. Defaults to 128 MB. It can be multiple of 64 MB and between 128 MB and 3072 MB.
'''
result = self._values.get("memory_size")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def timeout(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: timeout: The maximum time duration a function can run, in seconds. After which Function Compute terminates the execution. Defaults to 3 seconds, and can be between 1 to 600 seconds.
'''
result = self._values.get("timeout")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosFunctionProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosLayer(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.RosLayer",
):
'''A ROS template type: ``ALIYUN::FC::Layer``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosLayerProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::FC::Layer``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrArn")
def attr_arn(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: Arn: The name of the layer resource.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrArn"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrLayerName")
def attr_layer_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: LayerName: The name of layer
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrLayerName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrVersion")
def attr_version(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: Version: The version of the layer resource.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrVersion"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="code")
def code(self) -> typing.Union[ros_cdk_core.IResolvable, "RosLayer.CodeProperty"]:
'''
:Property: code: The code of layer.
'''
return typing.cast(typing.Union[ros_cdk_core.IResolvable, "RosLayer.CodeProperty"], jsii.get(self, "code"))
@code.setter
def code(
self,
value: typing.Union[ros_cdk_core.IResolvable, "RosLayer.CodeProperty"],
) -> None:
jsii.set(self, "code", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="compatibleRuntime")
def compatible_runtime(
self,
) -> typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]]:
'''
:Property: compatibleRuntime: The runtime environment supported by the layer. For example:nodejs12, nodejs10, nodejs8, nodejs6, python3, and python2.7
'''
return typing.cast(typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]], jsii.get(self, "compatibleRuntime"))
@compatible_runtime.setter
def compatible_runtime(
self,
value: typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]],
) -> None:
jsii.set(self, "compatibleRuntime", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="layerName")
def layer_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: layerName: The name of layer
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "layerName"))
@layer_name.setter
def layer_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "layerName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="description")
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: description: The description of the layer.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "description"))
@description.setter
def description(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "description", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosLayer.CodeProperty",
jsii_struct_bases=[],
name_mapping={
"oss_bucket_name": "ossBucketName",
"oss_object_name": "ossObjectName",
"zip_file": "zipFile",
},
)
class CodeProperty:
def __init__(
self,
*,
oss_bucket_name: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
oss_object_name: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
zip_file: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''
:param oss_bucket_name:
:param oss_object_name:
:param zip_file:
'''
self._values: typing.Dict[str, typing.Any] = {}
if oss_bucket_name is not None:
self._values["oss_bucket_name"] = oss_bucket_name
if oss_object_name is not None:
self._values["oss_object_name"] = oss_object_name
if zip_file is not None:
self._values["zip_file"] = zip_file
@builtins.property
def oss_bucket_name(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
ossBucketName: The name of the Object Storage Service (OSS) bucket that
stores the ZIP package of the function code.
Priority: ZipFile > OssBucketName&OssObjectName.
'''
result = self._values.get("oss_bucket_name")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def oss_object_name(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
ossObjectName: The name of the OSS object that stores the ZIP package of the function code.
Priority: ZipFile > OssBucketName&OssObjectName.
'''
result = self._values.get("oss_object_name")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def zip_file(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
zipFile: The function code that is encoded in Base64.
Priority: ZipFile > OssBucketName&OssObjectName.
'''
result = self._values.get("zip_file")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "CodeProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosLayerProps",
jsii_struct_bases=[],
name_mapping={
"code": "code",
"compatible_runtime": "compatibleRuntime",
"layer_name": "layerName",
"description": "description",
},
)
class RosLayerProps:
def __init__(
self,
*,
code: typing.Union[ros_cdk_core.IResolvable, RosLayer.CodeProperty],
compatible_runtime: typing.Union[ros_cdk_core.IResolvable, typing.Sequence[typing.Union[builtins.str, ros_cdk_core.IResolvable]]],
layer_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::Layer``.
:param code:
:param compatible_runtime:
:param layer_name:
:param description:
'''
self._values: typing.Dict[str, typing.Any] = {
"code": code,
"compatible_runtime": compatible_runtime,
"layer_name": layer_name,
}
if description is not None:
self._values["description"] = description
@builtins.property
def code(self) -> typing.Union[ros_cdk_core.IResolvable, RosLayer.CodeProperty]:
'''
:Property: code: The code of layer.
'''
result = self._values.get("code")
assert result is not None, "Required property 'code' is missing"
return typing.cast(typing.Union[ros_cdk_core.IResolvable, RosLayer.CodeProperty], result)
@builtins.property
def compatible_runtime(
self,
) -> typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]]:
'''
:Property: compatibleRuntime: The runtime environment supported by the layer. For example:nodejs12, nodejs10, nodejs8, nodejs6, python3, and python2.7
'''
result = self._values.get("compatible_runtime")
assert result is not None, "Required property 'compatible_runtime' is missing"
return typing.cast(typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]], result)
@builtins.property
def layer_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: layerName: The name of layer
'''
result = self._values.get("layer_name")
assert result is not None, "Required property 'layer_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: description: The description of the layer.
'''
result = self._values.get("description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosLayerProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosProvisionConfig(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.RosProvisionConfig",
):
'''A ROS template type: ``ALIYUN::FC::ProvisionConfig``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosProvisionConfigProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::FC::ProvisionConfig``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrFunctionName")
def attr_function_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: FunctionName: The function name
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrFunctionName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrQualifier")
def attr_qualifier(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: Qualifier: The service alias
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrQualifier"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrResource")
def attr_resource(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: Resource: The resource
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrResource"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceName")
def attr_service_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ServiceName: The service name
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrTarget")
def attr_target(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: Target: Number of provision
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrTarget"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="functionName")
def function_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: functionName: Function name
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "functionName"))
@function_name.setter
def function_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "functionName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="qualifier")
def qualifier(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
qualifier: Service's alias.
Example : "LATEST"
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "qualifier"))
@qualifier.setter
def qualifier(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "qualifier", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="serviceName")
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Service name
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "serviceName"))
@service_name.setter
def service_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "serviceName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="target")
def target(self) -> typing.Union[jsii.Number, ros_cdk_core.IResolvable]:
'''
:Property: target: Number of provision
'''
return typing.cast(typing.Union[jsii.Number, ros_cdk_core.IResolvable], jsii.get(self, "target"))
@target.setter
def target(
self,
value: typing.Union[jsii.Number, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "target", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosProvisionConfigProps",
jsii_struct_bases=[],
name_mapping={
"function_name": "functionName",
"qualifier": "qualifier",
"service_name": "serviceName",
"target": "target",
},
)
class RosProvisionConfigProps:
def __init__(
self,
*,
function_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
qualifier: typing.Union[builtins.str, ros_cdk_core.IResolvable],
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
target: typing.Union[jsii.Number, ros_cdk_core.IResolvable],
) -> None:
'''Properties for defining a ``ALIYUN::FC::ProvisionConfig``.
:param function_name:
:param qualifier:
:param service_name:
:param target:
'''
self._values: typing.Dict[str, typing.Any] = {
"function_name": function_name,
"qualifier": qualifier,
"service_name": service_name,
"target": target,
}
@builtins.property
def function_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: functionName: Function name
'''
result = self._values.get("function_name")
assert result is not None, "Required property 'function_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def qualifier(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
qualifier: Service's alias.
Example : "LATEST"
'''
result = self._values.get("qualifier")
assert result is not None, "Required property 'qualifier' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Service name
'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def target(self) -> typing.Union[jsii.Number, ros_cdk_core.IResolvable]:
'''
:Property: target: Number of provision
'''
result = self._values.get("target")
assert result is not None, "Required property 'target' is missing"
return typing.cast(typing.Union[jsii.Number, ros_cdk_core.IResolvable], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosProvisionConfigProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosService(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.RosService",
):
'''A ROS template type: ``ALIYUN::FC::Service``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosServiceProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::FC::Service``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrInternetAccess")
def attr_internet_access(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: InternetAccess: Whether enable Internet access
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrInternetAccess"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrLogProject")
def attr_log_project(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: LogProject: Log project of service
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrLogProject"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrLogstore")
def attr_logstore(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: Logstore: Log store of service
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrLogstore"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrRole")
def attr_role(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: Role: Role of service
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrRole"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceId")
def attr_service_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ServiceId: The service ID
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceName")
def attr_service_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ServiceName: The service name
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrTags")
def attr_tags(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: Tags: Tags of service
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrTags"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrVpcId")
def attr_vpc_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: VpcId: VPC ID
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrVpcId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="serviceName")
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Service name
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "serviceName"))
@service_name.setter
def service_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "serviceName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="deletionForce")
def deletion_force(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''
:Property: deletionForce: Whether force delete the service without waiting for network interfaces to be cleaned up if VpcConfig is specified. Default value is false.
'''
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], jsii.get(self, "deletionForce"))
@deletion_force.setter
def deletion_force(
self,
value: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "deletionForce", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="description")
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: description: Service description
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "description"))
@description.setter
def description(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "description", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="internetAccess")
def internet_access(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''
:Property: internetAccess: Set it to true to enable Internet access.
'''
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], jsii.get(self, "internetAccess"))
@internet_access.setter
def internet_access(
self,
value: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "internetAccess", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="logConfig")
def log_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosService.LogConfigProperty"]]:
'''
:Property: logConfig: Log configuration. Function Compute pushes function execution logs to the configured log store.
'''
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosService.LogConfigProperty"]], jsii.get(self, "logConfig"))
@log_config.setter
def log_config(
self,
value: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosService.LogConfigProperty"]],
) -> None:
jsii.set(self, "logConfig", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="nasConfig")
def nas_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosService.NasConfigProperty"]]:
'''
:Property: nasConfig: NAS configuration. Function Compute uses a specified NAS configured on the service.
'''
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosService.NasConfigProperty"]], jsii.get(self, "nasConfig"))
@nas_config.setter
def nas_config(
self,
value: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosService.NasConfigProperty"]],
) -> None:
jsii.set(self, "nasConfig", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="role")
def role(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: role: The role grants Function Compute the permission to access user’s cloud resources, such as pushing logs to user’s log store. The temporary STS token generated from this role can be retrieved from function context and used to access cloud resources.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "role"))
@role.setter
def role(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "role", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="tags")
def tags(self) -> typing.Optional[typing.List["RosService.TagsProperty"]]:
'''
:Property: tags: Tags to attach to service. Max support 20 tags to add during create service. Each tag with two properties Key and Value, and Key is required.
'''
return typing.cast(typing.Optional[typing.List["RosService.TagsProperty"]], jsii.get(self, "tags"))
@tags.setter
def tags(
self,
value: typing.Optional[typing.List["RosService.TagsProperty"]],
) -> None:
jsii.set(self, "tags", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="tracingConfig")
def tracing_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosService.TracingConfigProperty"]]:
'''
:Property: tracingConfig: The Tracing Analysis configuration. After Function Compute integrates with Tracing Analysis, you can record the stay time of a request in Function Compute, view the cold start time for a function, and record the execution time of a function.
'''
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosService.TracingConfigProperty"]], jsii.get(self, "tracingConfig"))
@tracing_config.setter
def tracing_config(
self,
value: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosService.TracingConfigProperty"]],
) -> None:
jsii.set(self, "tracingConfig", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="vpcConfig")
def vpc_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosService.VpcConfigProperty"]]:
'''
:Property: vpcConfig: VPC configuration. Function Compute uses the config to setup ENI in the specific VPC.
'''
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosService.VpcConfigProperty"]], jsii.get(self, "vpcConfig"))
@vpc_config.setter
def vpc_config(
self,
value: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosService.VpcConfigProperty"]],
) -> None:
jsii.set(self, "vpcConfig", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosService.LogConfigProperty",
jsii_struct_bases=[],
name_mapping={
"enable_request_metrics": "enableRequestMetrics",
"logstore": "logstore",
"project": "project",
},
)
class LogConfigProperty:
def __init__(
self,
*,
enable_request_metrics: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]] = None,
logstore: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
project: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''
:param enable_request_metrics:
:param logstore:
:param project:
'''
self._values: typing.Dict[str, typing.Any] = {}
if enable_request_metrics is not None:
self._values["enable_request_metrics"] = enable_request_metrics
if logstore is not None:
self._values["logstore"] = logstore
if project is not None:
self._values["project"] = project
@builtins.property
def enable_request_metrics(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''
:Property: enableRequestMetrics: Whether enable request metrics.
'''
result = self._values.get("enable_request_metrics")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
@builtins.property
def logstore(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: logstore: The log store name of Logs service
'''
result = self._values.get("logstore")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def project(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: project: The project name of Logs service
'''
result = self._values.get("project")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "LogConfigProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosService.MountPointsProperty",
jsii_struct_bases=[],
name_mapping={"mount_dir": "mountDir", "server_addr": "serverAddr"},
)
class MountPointsProperty:
def __init__(
self,
*,
mount_dir: typing.Union[builtins.str, ros_cdk_core.IResolvable],
server_addr: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
'''
:param mount_dir:
:param server_addr:
'''
self._values: typing.Dict[str, typing.Any] = {
"mount_dir": mount_dir,
"server_addr": server_addr,
}
@builtins.property
def mount_dir(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: mountDir: A local mount point.
'''
result = self._values.get("mount_dir")
assert result is not None, "Required property 'mount_dir' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def server_addr(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serverAddr: The address of NAS instance.
'''
result = self._values.get("server_addr")
assert result is not None, "Required property 'server_addr' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "MountPointsProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosService.NasConfigProperty",
jsii_struct_bases=[],
name_mapping={
"group_id": "groupId",
"mount_points": "mountPoints",
"user_id": "userId",
},
)
class NasConfigProperty:
def __init__(
self,
*,
group_id: typing.Union[jsii.Number, ros_cdk_core.IResolvable],
mount_points: typing.Union[ros_cdk_core.IResolvable, typing.Sequence[typing.Union[ros_cdk_core.IResolvable, "RosService.MountPointsProperty"]]],
user_id: typing.Union[jsii.Number, ros_cdk_core.IResolvable],
) -> None:
'''
:param group_id:
:param mount_points:
:param user_id:
'''
self._values: typing.Dict[str, typing.Any] = {
"group_id": group_id,
"mount_points": mount_points,
"user_id": user_id,
}
@builtins.property
def group_id(self) -> typing.Union[jsii.Number, ros_cdk_core.IResolvable]:
'''
:Property: groupId: Group ID
'''
result = self._values.get("group_id")
assert result is not None, "Required property 'group_id' is missing"
return typing.cast(typing.Union[jsii.Number, ros_cdk_core.IResolvable], result)
@builtins.property
def mount_points(
self,
) -> typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[ros_cdk_core.IResolvable, "RosService.MountPointsProperty"]]]:
'''
:Property: mountPoints: Mount points
'''
result = self._values.get("mount_points")
assert result is not None, "Required property 'mount_points' is missing"
return typing.cast(typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[ros_cdk_core.IResolvable, "RosService.MountPointsProperty"]]], result)
@builtins.property
def user_id(self) -> typing.Union[jsii.Number, ros_cdk_core.IResolvable]:
'''
:Property: userId: User ID
'''
result = self._values.get("user_id")
assert result is not None, "Required property 'user_id' is missing"
return typing.cast(typing.Union[jsii.Number, ros_cdk_core.IResolvable], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "NasConfigProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosService.TagsProperty",
jsii_struct_bases=[],
name_mapping={"key": "key", "value": "value"},
)
class TagsProperty:
def __init__(
self,
*,
key: typing.Union[builtins.str, ros_cdk_core.IResolvable],
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''
:param key:
:param value:
'''
self._values: typing.Dict[str, typing.Any] = {
"key": key,
}
if value is not None:
self._values["value"] = value
@builtins.property
def key(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: key: undefined
'''
result = self._values.get("key")
assert result is not None, "Required property 'key' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def value(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: value: undefined
'''
result = self._values.get("value")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "TagsProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosService.TracingConfigProperty",
jsii_struct_bases=[],
name_mapping={"params": "params", "type": "type"},
)
class TracingConfigProperty:
def __init__(
self,
*,
params: typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]] = None,
type: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''
:param params:
:param type:
'''
self._values: typing.Dict[str, typing.Any] = {}
if params is not None:
self._values["params"] = params
if type is not None:
self._values["type"] = type
@builtins.property
def params(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]]:
'''
:Property: params: The tracing analysis parameters.
'''
result = self._values.get("params")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]], result)
@builtins.property
def type(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: type: The type of the tracing analysis system.
'''
result = self._values.get("type")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "TracingConfigProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosService.VpcConfigProperty",
jsii_struct_bases=[],
name_mapping={
"security_group_id": "securityGroupId",
"vpc_id": "vpcId",
"v_switch_ids": "vSwitchIds",
},
)
class VpcConfigProperty:
def __init__(
self,
*,
security_group_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
vpc_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
v_switch_ids: typing.Union[typing.Sequence[typing.Any], ros_cdk_core.IResolvable],
) -> None:
'''
:param security_group_id:
:param vpc_id:
:param v_switch_ids:
'''
self._values: typing.Dict[str, typing.Any] = {
"security_group_id": security_group_id,
"vpc_id": vpc_id,
"v_switch_ids": v_switch_ids,
}
@builtins.property
def security_group_id(
self,
) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: securityGroupId: Security group ID
'''
result = self._values.get("security_group_id")
assert result is not None, "Required property 'security_group_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def vpc_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: vpcId: VPC ID
'''
result = self._values.get("vpc_id")
assert result is not None, "Required property 'vpc_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def v_switch_ids(
self,
) -> typing.Union[typing.List[typing.Any], ros_cdk_core.IResolvable]:
'''
:Property: vSwitchIds: List of VSwitch IDs
'''
result = self._values.get("v_switch_ids")
assert result is not None, "Required property 'v_switch_ids' is missing"
return typing.cast(typing.Union[typing.List[typing.Any], ros_cdk_core.IResolvable], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "VpcConfigProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosServiceProps",
jsii_struct_bases=[],
name_mapping={
"service_name": "serviceName",
"deletion_force": "deletionForce",
"description": "description",
"internet_access": "internetAccess",
"log_config": "logConfig",
"nas_config": "nasConfig",
"role": "role",
"tags": "tags",
"tracing_config": "tracingConfig",
"vpc_config": "vpcConfig",
},
)
class RosServiceProps:
def __init__(
self,
*,
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
deletion_force: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]] = None,
description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
internet_access: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]] = None,
log_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.LogConfigProperty]] = None,
nas_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.NasConfigProperty]] = None,
role: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
tags: typing.Optional[typing.Sequence[RosService.TagsProperty]] = None,
tracing_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.TracingConfigProperty]] = None,
vpc_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.VpcConfigProperty]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::Service``.
:param service_name:
:param deletion_force:
:param description:
:param internet_access:
:param log_config:
:param nas_config:
:param role:
:param tags:
:param tracing_config:
:param vpc_config:
'''
self._values: typing.Dict[str, typing.Any] = {
"service_name": service_name,
}
if deletion_force is not None:
self._values["deletion_force"] = deletion_force
if description is not None:
self._values["description"] = description
if internet_access is not None:
self._values["internet_access"] = internet_access
if log_config is not None:
self._values["log_config"] = log_config
if nas_config is not None:
self._values["nas_config"] = nas_config
if role is not None:
self._values["role"] = role
if tags is not None:
self._values["tags"] = tags
if tracing_config is not None:
self._values["tracing_config"] = tracing_config
if vpc_config is not None:
self._values["vpc_config"] = vpc_config
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Service name
'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def deletion_force(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''
:Property: deletionForce: Whether force delete the service without waiting for network interfaces to be cleaned up if VpcConfig is specified. Default value is false.
'''
result = self._values.get("deletion_force")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
@builtins.property
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: description: Service description
'''
result = self._values.get("description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def internet_access(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''
:Property: internetAccess: Set it to true to enable Internet access.
'''
result = self._values.get("internet_access")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
@builtins.property
def log_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.LogConfigProperty]]:
'''
:Property: logConfig: Log configuration. Function Compute pushes function execution logs to the configured log store.
'''
result = self._values.get("log_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.LogConfigProperty]], result)
@builtins.property
def nas_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.NasConfigProperty]]:
'''
:Property: nasConfig: NAS configuration. Function Compute uses a specified NAS configured on the service.
'''
result = self._values.get("nas_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.NasConfigProperty]], result)
@builtins.property
def role(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: role: The role grants Function Compute the permission to access user’s cloud resources, such as pushing logs to user’s log store. The temporary STS token generated from this role can be retrieved from function context and used to access cloud resources.
'''
result = self._values.get("role")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def tags(self) -> typing.Optional[typing.List[RosService.TagsProperty]]:
'''
:Property: tags: Tags to attach to service. Max support 20 tags to add during create service. Each tag with two properties Key and Value, and Key is required.
'''
result = self._values.get("tags")
return typing.cast(typing.Optional[typing.List[RosService.TagsProperty]], result)
@builtins.property
def tracing_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.TracingConfigProperty]]:
'''
:Property: tracingConfig: The Tracing Analysis configuration. After Function Compute integrates with Tracing Analysis, you can record the stay time of a request in Function Compute, view the cold start time for a function, and record the execution time of a function.
'''
result = self._values.get("tracing_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.TracingConfigProperty]], result)
@builtins.property
def vpc_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.VpcConfigProperty]]:
'''
:Property: vpcConfig: VPC configuration. Function Compute uses the config to setup ENI in the specific VPC.
'''
result = self._values.get("vpc_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.VpcConfigProperty]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosServiceProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosTrigger(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.RosTrigger",
):
'''A ROS template type: ``ALIYUN::FC::Trigger``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosTriggerProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::FC::Trigger``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrFunctionName")
def attr_function_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: FunctionName: Function name.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrFunctionName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceName")
def attr_service_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ServiceName: Service name.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrTriggerId")
def attr_trigger_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: TriggerId: The trigger ID.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrTriggerId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrTriggerName")
def attr_trigger_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: TriggerName: Trigger name.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrTriggerName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="functionName")
def function_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: functionName: Function name.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "functionName"))
@function_name.setter
def function_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "functionName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="serviceName")
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Service name.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "serviceName"))
@service_name.setter
def service_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "serviceName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="triggerConfig")
def trigger_config(
self,
) -> typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]:
'''
:Property: triggerConfig: Event source specific trigger configuration. The value is different according to trigger type.
'''
return typing.cast(typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]], jsii.get(self, "triggerConfig"))
@trigger_config.setter
def trigger_config(
self,
value: typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]],
) -> None:
jsii.set(self, "triggerConfig", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="triggerName")
def trigger_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
triggerName: Trigger name.
Example : "image_resize"
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "triggerName"))
@trigger_name.setter
def trigger_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "triggerName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="triggerType")
def trigger_type(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
triggerType: Trigger type, e.g. oss, timer, logs. This determines how the trigger config is interpreted.
Example : "oss"
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "triggerType"))
@trigger_type.setter
def trigger_type(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "triggerType", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="invocationRole")
def invocation_role(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
invocationRole: The role grants event source the permission to run function on behalf of user. This is optional for some triggers.
Example : "acs:ram::1234567890:role/fc-test"
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "invocationRole"))
@invocation_role.setter
def invocation_role(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "invocationRole", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="qualifier")
def qualifier(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
qualifier: service version or alias.
Example : "LATEST"
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "qualifier"))
@qualifier.setter
def qualifier(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "qualifier", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="sourceArn")
def source_arn(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
sourceArn: The Aliyun Resource Name (ARN) of event source. This is optional for some triggers.
Example : "acs:oss:cn-shanghai:12345:mybucket"
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "sourceArn"))
@source_arn.setter
def source_arn(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "sourceArn", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosTriggerProps",
jsii_struct_bases=[],
name_mapping={
"function_name": "functionName",
"service_name": "serviceName",
"trigger_config": "triggerConfig",
"trigger_name": "triggerName",
"trigger_type": "triggerType",
"invocation_role": "invocationRole",
"qualifier": "qualifier",
"source_arn": "sourceArn",
},
)
class RosTriggerProps:
def __init__(
self,
*,
function_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
trigger_config: typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]],
trigger_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
trigger_type: typing.Union[builtins.str, ros_cdk_core.IResolvable],
invocation_role: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
qualifier: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
source_arn: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::Trigger``.
:param function_name:
:param service_name:
:param trigger_config:
:param trigger_name:
:param trigger_type:
:param invocation_role:
:param qualifier:
:param source_arn:
'''
self._values: typing.Dict[str, typing.Any] = {
"function_name": function_name,
"service_name": service_name,
"trigger_config": trigger_config,
"trigger_name": trigger_name,
"trigger_type": trigger_type,
}
if invocation_role is not None:
self._values["invocation_role"] = invocation_role
if qualifier is not None:
self._values["qualifier"] = qualifier
if source_arn is not None:
self._values["source_arn"] = source_arn
@builtins.property
def function_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: functionName: Function name.
'''
result = self._values.get("function_name")
assert result is not None, "Required property 'function_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Service name.
'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def trigger_config(
self,
) -> typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]:
'''
:Property: triggerConfig: Event source specific trigger configuration. The value is different according to trigger type.
'''
result = self._values.get("trigger_config")
assert result is not None, "Required property 'trigger_config' is missing"
return typing.cast(typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]], result)
@builtins.property
def trigger_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
triggerName: Trigger name.
Example : "image_resize"
'''
result = self._values.get("trigger_name")
assert result is not None, "Required property 'trigger_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def trigger_type(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
triggerType: Trigger type, e.g. oss, timer, logs. This determines how the trigger config is interpreted.
Example : "oss"
'''
result = self._values.get("trigger_type")
assert result is not None, "Required property 'trigger_type' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def invocation_role(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
invocationRole: The role grants event source the permission to run function on behalf of user. This is optional for some triggers.
Example : "acs:ram::1234567890:role/fc-test"
'''
result = self._values.get("invocation_role")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def qualifier(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
qualifier: service version or alias.
Example : "LATEST"
'''
result = self._values.get("qualifier")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def source_arn(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
sourceArn: The Aliyun Resource Name (ARN) of event source. This is optional for some triggers.
Example : "acs:oss:cn-shanghai:12345:mybucket"
'''
result = self._values.get("source_arn")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosTriggerProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosVersion(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.RosVersion",
):
'''A ROS template type: ``ALIYUN::FC::Version``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosVersionProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::FC::Version``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceName")
def attr_service_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ServiceName: The service name
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrVersionId")
def attr_version_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: VersionId: The version ID
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrVersionId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="serviceName")
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Service name
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "serviceName"))
@service_name.setter
def service_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "serviceName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="description")
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: description: Version description
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "description"))
@description.setter
def description(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "description", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosVersionProps",
jsii_struct_bases=[],
name_mapping={"service_name": "serviceName", "description": "description"},
)
class RosVersionProps:
def __init__(
self,
*,
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::Version``.
:param service_name:
:param description:
'''
self._values: typing.Dict[str, typing.Any] = {
"service_name": service_name,
}
if description is not None:
self._values["description"] = description
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Service name
'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: description: Version description
'''
result = self._values.get("description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosVersionProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class Service(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.Service",
):
'''A ROS resource type: ``ALIYUN::FC::Service``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "ServiceProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::FC::Service``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrInternetAccess")
def attr_internet_access(self) -> ros_cdk_core.IResolvable:
'''Attribute InternetAccess: Whether enable Internet access.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrInternetAccess"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrLogProject")
def attr_log_project(self) -> ros_cdk_core.IResolvable:
'''Attribute LogProject: Log project of service.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrLogProject"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrLogstore")
def attr_logstore(self) -> ros_cdk_core.IResolvable:
'''Attribute Logstore: Log store of service.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrLogstore"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrRole")
def attr_role(self) -> ros_cdk_core.IResolvable:
'''Attribute Role: Role of service.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrRole"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceId")
def attr_service_id(self) -> ros_cdk_core.IResolvable:
'''Attribute ServiceId: The service ID.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceName")
def attr_service_name(self) -> ros_cdk_core.IResolvable:
'''Attribute ServiceName: The service name.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrTags")
def attr_tags(self) -> ros_cdk_core.IResolvable:
'''Attribute Tags: Tags of service.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrTags"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrVpcId")
def attr_vpc_id(self) -> ros_cdk_core.IResolvable:
'''Attribute VpcId: VPC ID.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrVpcId"))
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.ServiceProps",
jsii_struct_bases=[],
name_mapping={
"service_name": "serviceName",
"deletion_force": "deletionForce",
"description": "description",
"internet_access": "internetAccess",
"log_config": "logConfig",
"nas_config": "nasConfig",
"role": "role",
"tags": "tags",
"tracing_config": "tracingConfig",
"vpc_config": "vpcConfig",
},
)
class ServiceProps:
def __init__(
self,
*,
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
deletion_force: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]] = None,
description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
internet_access: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]] = None,
log_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.LogConfigProperty]] = None,
nas_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.NasConfigProperty]] = None,
role: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
tags: typing.Optional[typing.Sequence[RosService.TagsProperty]] = None,
tracing_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.TracingConfigProperty]] = None,
vpc_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.VpcConfigProperty]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::Service``.
:param service_name: Property serviceName: Service name.
:param deletion_force: Property deletionForce: Whether force delete the service without waiting for network interfaces to be cleaned up if VpcConfig is specified. Default value is false.
:param description: Property description: Service description.
:param internet_access: Property internetAccess: Set it to true to enable Internet access.
:param log_config: Property logConfig: Log configuration. Function Compute pushes function execution logs to the configured log store.
:param nas_config: Property nasConfig: NAS configuration. Function Compute uses a specified NAS configured on the service.
:param role: Property role: The role grants Function Compute the permission to access user’s cloud resources, such as pushing logs to user’s log store. The temporary STS token generated from this role can be retrieved from function context and used to access cloud resources.
:param tags: Property tags: Tags to attach to service. Max support 20 tags to add during create service. Each tag with two properties Key and Value, and Key is required.
:param tracing_config: Property tracingConfig: The Tracing Analysis configuration. After Function Compute integrates with Tracing Analysis, you can record the stay time of a request in Function Compute, view the cold start time for a function, and record the execution time of a function.
:param vpc_config: Property vpcConfig: VPC configuration. Function Compute uses the config to setup ENI in the specific VPC.
'''
self._values: typing.Dict[str, typing.Any] = {
"service_name": service_name,
}
if deletion_force is not None:
self._values["deletion_force"] = deletion_force
if description is not None:
self._values["description"] = description
if internet_access is not None:
self._values["internet_access"] = internet_access
if log_config is not None:
self._values["log_config"] = log_config
if nas_config is not None:
self._values["nas_config"] = nas_config
if role is not None:
self._values["role"] = role
if tags is not None:
self._values["tags"] = tags
if tracing_config is not None:
self._values["tracing_config"] = tracing_config
if vpc_config is not None:
self._values["vpc_config"] = vpc_config
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property serviceName: Service name.'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def deletion_force(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''Property deletionForce: Whether force delete the service without waiting for network interfaces to be cleaned up if VpcConfig is specified.
Default value is false.
'''
result = self._values.get("deletion_force")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
@builtins.property
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property description: Service description.'''
result = self._values.get("description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def internet_access(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''Property internetAccess: Set it to true to enable Internet access.'''
result = self._values.get("internet_access")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
@builtins.property
def log_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.LogConfigProperty]]:
'''Property logConfig: Log configuration.
Function Compute pushes function execution logs to the configured log store.
'''
result = self._values.get("log_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.LogConfigProperty]], result)
@builtins.property
def nas_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.NasConfigProperty]]:
'''Property nasConfig: NAS configuration.
Function Compute uses a specified NAS configured on the service.
'''
result = self._values.get("nas_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.NasConfigProperty]], result)
@builtins.property
def role(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property role: The role grants Function Compute the permission to access user’s cloud resources, such as pushing logs to user’s log store.
The temporary STS token generated from this role can be retrieved from function context and used to access cloud resources.
'''
result = self._values.get("role")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def tags(self) -> typing.Optional[typing.List[RosService.TagsProperty]]:
'''Property tags: Tags to attach to service.
Max support 20 tags to add during create service. Each tag with two properties Key and Value, and Key is required.
'''
result = self._values.get("tags")
return typing.cast(typing.Optional[typing.List[RosService.TagsProperty]], result)
@builtins.property
def tracing_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.TracingConfigProperty]]:
'''Property tracingConfig: The Tracing Analysis configuration.
After Function Compute integrates with Tracing Analysis, you can record the stay time of a request in Function Compute, view the cold start time for a function, and record the execution time of a function.
'''
result = self._values.get("tracing_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.TracingConfigProperty]], result)
@builtins.property
def vpc_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.VpcConfigProperty]]:
'''Property vpcConfig: VPC configuration.
Function Compute uses the config to setup ENI in the specific VPC.
'''
result = self._values.get("vpc_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.VpcConfigProperty]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ServiceProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class Trigger(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.Trigger",
):
'''A ROS resource type: ``ALIYUN::FC::Trigger``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "TriggerProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::FC::Trigger``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrFunctionName")
def attr_function_name(self) -> ros_cdk_core.IResolvable:
'''Attribute FunctionName: Function name.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrFunctionName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceName")
def attr_service_name(self) -> ros_cdk_core.IResolvable:
'''Attribute ServiceName: Service name.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrTriggerId")
def attr_trigger_id(self) -> ros_cdk_core.IResolvable:
'''Attribute TriggerId: The trigger ID.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrTriggerId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrTriggerName")
def attr_trigger_name(self) -> ros_cdk_core.IResolvable:
'''Attribute TriggerName: Trigger name.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrTriggerName"))
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.TriggerProps",
jsii_struct_bases=[],
name_mapping={
"function_name": "functionName",
"service_name": "serviceName",
"trigger_config": "triggerConfig",
"trigger_name": "triggerName",
"trigger_type": "triggerType",
"invocation_role": "invocationRole",
"qualifier": "qualifier",
"source_arn": "sourceArn",
},
)
class TriggerProps:
def __init__(
self,
*,
function_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
trigger_config: typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]],
trigger_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
trigger_type: typing.Union[builtins.str, ros_cdk_core.IResolvable],
invocation_role: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
qualifier: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
source_arn: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::Trigger``.
:param function_name: Property functionName: Function name.
:param service_name: Property serviceName: Service name.
:param trigger_config: Property triggerConfig: Event source specific trigger configuration. The value is different according to trigger type.
:param trigger_name: Property triggerName: Trigger name. Example : "image_resize"
:param trigger_type: Property triggerType: Trigger type, e.g. oss, timer, logs. This determines how the trigger config is interpreted. Example : "oss".
:param invocation_role: Property invocationRole: The role grants event source the permission to run function on behalf of user. This is optional for some triggers. Example : "acs:ram::1234567890:role/fc-test"
:param qualifier: Property qualifier: service version or alias. Example : "LATEST"
:param source_arn: Property sourceArn: The Aliyun Resource Name (ARN) of event source. This is optional for some triggers. Example : "acs:oss:cn-shanghai:12345:mybucket"
'''
self._values: typing.Dict[str, typing.Any] = {
"function_name": function_name,
"service_name": service_name,
"trigger_config": trigger_config,
"trigger_name": trigger_name,
"trigger_type": trigger_type,
}
if invocation_role is not None:
self._values["invocation_role"] = invocation_role
if qualifier is not None:
self._values["qualifier"] = qualifier
if source_arn is not None:
self._values["source_arn"] = source_arn
@builtins.property
def function_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property functionName: Function name.'''
result = self._values.get("function_name")
assert result is not None, "Required property 'function_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property serviceName: Service name.'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def trigger_config(
self,
) -> typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]:
'''Property triggerConfig: Event source specific trigger configuration.
The value is different according to trigger type.
'''
result = self._values.get("trigger_config")
assert result is not None, "Required property 'trigger_config' is missing"
return typing.cast(typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]], result)
@builtins.property
def trigger_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property triggerName: Trigger name.
Example : "image_resize"
'''
result = self._values.get("trigger_name")
assert result is not None, "Required property 'trigger_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def trigger_type(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property triggerType: Trigger type, e.g. oss, timer, logs. This determines how the trigger config is interpreted. Example : "oss".'''
result = self._values.get("trigger_type")
assert result is not None, "Required property 'trigger_type' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def invocation_role(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property invocationRole: The role grants event source the permission to run function on behalf of user.
This is optional for some triggers.
Example : "acs:ram::1234567890:role/fc-test"
'''
result = self._values.get("invocation_role")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def qualifier(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property qualifier: service version or alias.
Example : "LATEST"
'''
result = self._values.get("qualifier")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def source_arn(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property sourceArn: The Aliyun Resource Name (ARN) of event source.
This is optional for some triggers.
Example : "acs:oss:cn-shanghai:12345:mybucket"
'''
result = self._values.get("source_arn")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "TriggerProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class Version(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.Version",
):
'''A ROS resource type: ``ALIYUN::FC::Version``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "VersionProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::FC::Version``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceName")
def attr_service_name(self) -> ros_cdk_core.IResolvable:
'''Attribute ServiceName: The service name.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrVersionId")
def attr_version_id(self) -> ros_cdk_core.IResolvable:
'''Attribute VersionId: The version ID.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrVersionId"))
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.VersionProps",
jsii_struct_bases=[],
name_mapping={"service_name": "serviceName", "description": "description"},
)
class VersionProps:
def __init__(
self,
*,
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::Version``.
:param service_name: Property serviceName: Service name.
:param description: Property description: Version description.
'''
self._values: typing.Dict[str, typing.Any] = {
"service_name": service_name,
}
if description is not None:
self._values["description"] = description
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property serviceName: Service name.'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property description: Version description.'''
result = self._values.get("description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "VersionProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
__all__ = [
"Alias",
"AliasProps",
"CustomDomain",
"CustomDomainProps",
"Function",
"FunctionInvoker",
"FunctionInvokerProps",
"FunctionProps",
"Layer",
"LayerProps",
"ProvisionConfig",
"ProvisionConfigProps",
"RosAlias",
"RosAliasProps",
"RosCustomDomain",
"RosCustomDomainProps",
"RosFunction",
"RosFunctionInvoker",
"RosFunctionInvokerProps",
"RosFunctionProps",
"RosLayer",
"RosLayerProps",
"RosProvisionConfig",
"RosProvisionConfigProps",
"RosService",
"RosServiceProps",
"RosTrigger",
"RosTriggerProps",
"RosVersion",
"RosVersionProps",
"Service",
"ServiceProps",
"Trigger",
"TriggerProps",
"Version",
"VersionProps",
]
publication.publish()
| '''
## Aliyun ROS FC Construct Library
This module is part of the AliCloud ROS Cloud Development Kit (ROS CDK) project.
```python
# Example automatically generated from non-compiling source. May contain errors.
import * as FC from '@alicloud/ros-cdk-fc';
```
'''
import abc
import builtins
import datetime
import enum
import typing
import jsii
import publication
import typing_extensions
from ._jsii import *
import ros_cdk_core
class Alias(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.Alias",
):
'''A ROS resource type: ``ALIYUN::FC::Alias``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "AliasProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::FC::Alias``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrAliasName")
def attr_alias_name(self) -> ros_cdk_core.IResolvable:
'''Attribute AliasName: The alias name.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrAliasName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceName")
def attr_service_name(self) -> ros_cdk_core.IResolvable:
'''Attribute ServiceName: The service name.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrVersionId")
def attr_version_id(self) -> ros_cdk_core.IResolvable:
'''Attribute VersionId: The version ID.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrVersionId"))
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.AliasProps",
jsii_struct_bases=[],
name_mapping={
"alias_name": "aliasName",
"service_name": "serviceName",
"additional_version": "additionalVersion",
"additional_weight": "additionalWeight",
"description": "description",
"version_id": "versionId",
},
)
class AliasProps:
def __init__(
self,
*,
alias_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
additional_version: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
additional_weight: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
version_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::Alias``.
:param alias_name: Property aliasName: Alias name.
:param service_name: Property serviceName: Service name.
:param additional_version: Property additionalVersion: Additional version.
:param additional_weight: Property additionalWeight: Traffic weight of additional version. From 0 to 100.
:param description: Property description: Version description.
:param version_id: Property versionId: Version ID.
'''
self._values: typing.Dict[str, typing.Any] = {
"alias_name": alias_name,
"service_name": service_name,
}
if additional_version is not None:
self._values["additional_version"] = additional_version
if additional_weight is not None:
self._values["additional_weight"] = additional_weight
if description is not None:
self._values["description"] = description
if version_id is not None:
self._values["version_id"] = version_id
@builtins.property
def alias_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property aliasName: Alias name.'''
result = self._values.get("alias_name")
assert result is not None, "Required property 'alias_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property serviceName: Service name.'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def additional_version(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property additionalVersion: Additional version.'''
result = self._values.get("additional_version")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def additional_weight(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''Property additionalWeight: Traffic weight of additional version.
From 0 to 100.
'''
result = self._values.get("additional_weight")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property description: Version description.'''
result = self._values.get("description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def version_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property versionId: Version ID.'''
result = self._values.get("version_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "AliasProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class CustomDomain(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.CustomDomain",
):
'''A ROS resource type: ``ALIYUN::FC::CustomDomain``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "CustomDomainProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::FC::CustomDomain``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrDomain")
def attr_domain(self) -> ros_cdk_core.IResolvable:
'''Attribute Domain: The domain with protocol.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrDomain"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrDomainName")
def attr_domain_name(self) -> ros_cdk_core.IResolvable:
'''Attribute DomainName: The domain name.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrDomainName"))
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.CustomDomainProps",
jsii_struct_bases=[],
name_mapping={
"domain_name": "domainName",
"protocol": "protocol",
"api_version": "apiVersion",
"cert_config": "certConfig",
"route_config": "routeConfig",
},
)
class CustomDomainProps:
def __init__(
self,
*,
domain_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
protocol: typing.Union[builtins.str, ros_cdk_core.IResolvable],
api_version: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
cert_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.CertConfigProperty"]] = None,
route_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.RouteConfigProperty"]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::CustomDomain``.
:param domain_name: Property domainName: domain name.
:param protocol: Property protocol: HTTP or HTTP,HTTPS.
:param api_version: Property apiVersion: api version.
:param cert_config: Property certConfig: certificate info.
:param route_config: Property routeConfig: Routing table: path to function mappingwhen a function is called with a custom domain name.
'''
self._values: typing.Dict[str, typing.Any] = {
"domain_name": domain_name,
"protocol": protocol,
}
if api_version is not None:
self._values["api_version"] = api_version
if cert_config is not None:
self._values["cert_config"] = cert_config
if route_config is not None:
self._values["route_config"] = route_config
@builtins.property
def domain_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property domainName: domain name.'''
result = self._values.get("domain_name")
assert result is not None, "Required property 'domain_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def protocol(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property protocol: HTTP or HTTP,HTTPS.'''
result = self._values.get("protocol")
assert result is not None, "Required property 'protocol' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def api_version(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property apiVersion: api version.'''
result = self._values.get("api_version")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def cert_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.CertConfigProperty"]]:
'''Property certConfig: certificate info.'''
result = self._values.get("cert_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.CertConfigProperty"]], result)
@builtins.property
def route_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.RouteConfigProperty"]]:
'''Property routeConfig: Routing table: path to function mappingwhen a function is called with a custom domain name.'''
result = self._values.get("route_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.RouteConfigProperty"]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "CustomDomainProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class Function(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.Function",
):
'''A ROS resource type: ``ALIYUN::FC::Function``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "FunctionProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::FC::Function``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrArn")
def attr_arn(self) -> ros_cdk_core.IResolvable:
'''Attribute ARN: The ARN for ALIYUN::ROS::CustomResource.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrArn"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrFunctionId")
def attr_function_id(self) -> ros_cdk_core.IResolvable:
'''Attribute FunctionId: The function ID.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrFunctionId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrFunctionName")
def attr_function_name(self) -> ros_cdk_core.IResolvable:
'''Attribute FunctionName: The function name.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrFunctionName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceId")
def attr_service_id(self) -> ros_cdk_core.IResolvable:
'''Attribute ServiceId: The service ID.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceName")
def attr_service_name(self) -> ros_cdk_core.IResolvable:
'''Attribute ServiceName: The service name.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceName"))
class FunctionInvoker(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.FunctionInvoker",
):
'''A ROS resource type: ``ALIYUN::FC::FunctionInvoker``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "FunctionInvokerProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::FC::FunctionInvoker``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrResult")
def attr_result(self) -> ros_cdk_core.IResolvable:
'''Attribute Result: Depends on result type: NoResult: Async invoke has no result.
Success: The response of the function. The response should be utf-8 encoded string, otherwise ROS will report an error. If the response is binary, encode it via base64 before it is returned.
Failure: Error Message.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrResult"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrResultType")
def attr_result_type(self) -> ros_cdk_core.IResolvable:
'''Attribute ResultType: Result type: NoResult: Async invoke has no result.
Success: Sync invoke succeeds.
Failure: Sync invoke fails.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrResultType"))
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.FunctionInvokerProps",
jsii_struct_bases=[],
name_mapping={
"function_name": "functionName",
"service_name": "serviceName",
"async_": "async",
"check_error": "checkError",
"event": "event",
"execute_version": "executeVersion",
"qualifier": "qualifier",
"service_region_id": "serviceRegionId",
},
)
class FunctionInvokerProps:
def __init__(
self,
*,
function_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
async_: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]] = None,
check_error: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]] = None,
event: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
execute_version: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
qualifier: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
service_region_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::FunctionInvoker``.
:param function_name: Property functionName: Function name.
:param service_name: Property serviceName: Service name.
:param async_: Property async: Invocation type, Sync or Async. Defaults to Sync.
:param check_error: Property checkError: Whether check error for function invocation result. If set true and function invocation result has error, the resource creation will be regard as failed. Default is false
:param event: Property event: This value is passed to function as utf-8 encoded string.It’s function’s responsibility to interpret the value. If the value needs to be binary, encode it via base64 before passing to this property.
:param execute_version: Property executeVersion: If the property is not specified for creation and update, the function will not be invoked. The change of the property leads to the invoke of the function.
:param qualifier: Property qualifier: service version, can be versionId or aliasName.
:param service_region_id: Property serviceRegionId: Which region service belongs to.
'''
self._values: typing.Dict[str, typing.Any] = {
"function_name": function_name,
"service_name": service_name,
}
if async_ is not None:
self._values["async_"] = async_
if check_error is not None:
self._values["check_error"] = check_error
if event is not None:
self._values["event"] = event
if execute_version is not None:
self._values["execute_version"] = execute_version
if qualifier is not None:
self._values["qualifier"] = qualifier
if service_region_id is not None:
self._values["service_region_id"] = service_region_id
@builtins.property
def function_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property functionName: Function name.'''
result = self._values.get("function_name")
assert result is not None, "Required property 'function_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property serviceName: Service name.'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def async_(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''Property async: Invocation type, Sync or Async.
Defaults to Sync.
'''
result = self._values.get("async_")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
@builtins.property
def check_error(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''Property checkError: Whether check error for function invocation result.
If set true and function invocation result has error, the resource creation will be regard as failed.
Default is false
'''
result = self._values.get("check_error")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
@builtins.property
def event(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property event: This value is passed to function as utf-8 encoded string.It’s function’s responsibility to interpret the value. If the value needs to be binary, encode it via base64 before passing to this property.'''
result = self._values.get("event")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def execute_version(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''Property executeVersion: If the property is not specified for creation and update, the function will not be invoked.
The change of the property leads to the invoke of the function.
'''
result = self._values.get("execute_version")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def qualifier(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property qualifier: service version, can be versionId or aliasName.'''
result = self._values.get("qualifier")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def service_region_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property serviceRegionId: Which region service belongs to.'''
result = self._values.get("service_region_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "FunctionInvokerProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.FunctionProps",
jsii_struct_bases=[],
name_mapping={
"function_name": "functionName",
"handler": "handler",
"runtime": "runtime",
"service_name": "serviceName",
"async_configuration": "asyncConfiguration",
"ca_port": "caPort",
"code": "code",
"custom_container_config": "customContainerConfig",
"description": "description",
"environment_variables": "environmentVariables",
"initialization_timeout": "initializationTimeout",
"initializer": "initializer",
"instance_concurrency": "instanceConcurrency",
"instance_type": "instanceType",
"memory_size": "memorySize",
"timeout": "timeout",
},
)
class FunctionProps:
def __init__(
self,
*,
function_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
handler: typing.Union[builtins.str, ros_cdk_core.IResolvable],
runtime: typing.Union[builtins.str, ros_cdk_core.IResolvable],
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
async_configuration: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.AsyncConfigurationProperty"]] = None,
ca_port: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
code: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.CodeProperty"]] = None,
custom_container_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.CustomContainerConfigProperty"]] = None,
description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
environment_variables: typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]] = None,
initialization_timeout: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
initializer: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
instance_concurrency: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
instance_type: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
memory_size: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
timeout: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::Function``.
:param function_name: Property functionName: Function name.
:param handler: Property handler: The function execution entry point.
:param runtime: Property runtime: The function runtime environment. Supporting nodejs6, nodejs8, nodejs10, nodejs12, python2.7, python3, java8, custom, custom-container and so on
:param service_name: Property serviceName: Service name.
:param async_configuration: Property asyncConfiguration: Configuration of asynchronous function calls.
:param ca_port: Property caPort: Custom runtime and custom container runtime dedicated fields, which represent the port that the started custom http server listens to. The default value is 9000
:param code: Property code: The code that contains the function implementation.
:param custom_container_config: Property customContainerConfig: Custom container runtime related configuration. After configuration, the function can be replaced with a custom container to execute the function
:param description: Property description: Function description.
:param environment_variables: Property environmentVariables: The environment variable set for the function, you can get the value of the environment variable in the function.
:param initialization_timeout: Property initializationTimeout: the max execution time of the initializer, in second.
:param initializer: Property initializer: the entry point of the initializer.
:param instance_concurrency: Property instanceConcurrency: Function instance concurrency. Value can be between 1 to 100.
:param instance_type: Property instanceType: Instance type. Value:e1: flexible instance. Memory size between 128 and 3072c1: performance instance. Memory size allow values are 4096, 8192, 16384 and 32768
:param memory_size: Property memorySize: The amount of memory that’s used to run function, in MB. Function Compute uses this value to allocate CPU resources proportionally. Defaults to 128 MB. It can be multiple of 64 MB and between 128 MB and 3072 MB.
:param timeout: Property timeout: The maximum time duration a function can run, in seconds. After which Function Compute terminates the execution. Defaults to 3 seconds, and can be between 1 to 600 seconds.
'''
self._values: typing.Dict[str, typing.Any] = {
"function_name": function_name,
"handler": handler,
"runtime": runtime,
"service_name": service_name,
}
if async_configuration is not None:
self._values["async_configuration"] = async_configuration
if ca_port is not None:
self._values["ca_port"] = ca_port
if code is not None:
self._values["code"] = code
if custom_container_config is not None:
self._values["custom_container_config"] = custom_container_config
if description is not None:
self._values["description"] = description
if environment_variables is not None:
self._values["environment_variables"] = environment_variables
if initialization_timeout is not None:
self._values["initialization_timeout"] = initialization_timeout
if initializer is not None:
self._values["initializer"] = initializer
if instance_concurrency is not None:
self._values["instance_concurrency"] = instance_concurrency
if instance_type is not None:
self._values["instance_type"] = instance_type
if memory_size is not None:
self._values["memory_size"] = memory_size
if timeout is not None:
self._values["timeout"] = timeout
@builtins.property
def function_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property functionName: Function name.'''
result = self._values.get("function_name")
assert result is not None, "Required property 'function_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def handler(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property handler: The function execution entry point.'''
result = self._values.get("handler")
assert result is not None, "Required property 'handler' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def runtime(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property runtime: The function runtime environment.
Supporting nodejs6, nodejs8, nodejs10, nodejs12, python2.7, python3, java8, custom, custom-container and so on
'''
result = self._values.get("runtime")
assert result is not None, "Required property 'runtime' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property serviceName: Service name.'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def async_configuration(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.AsyncConfigurationProperty"]]:
'''Property asyncConfiguration: Configuration of asynchronous function calls.'''
result = self._values.get("async_configuration")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.AsyncConfigurationProperty"]], result)
@builtins.property
def ca_port(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''Property caPort: Custom runtime and custom container runtime dedicated fields, which represent the port that the started custom http server listens to.
The default value is 9000
'''
result = self._values.get("ca_port")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def code(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.CodeProperty"]]:
'''Property code: The code that contains the function implementation.'''
result = self._values.get("code")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.CodeProperty"]], result)
@builtins.property
def custom_container_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.CustomContainerConfigProperty"]]:
'''Property customContainerConfig: Custom container runtime related configuration.
After configuration, the function can be replaced with a custom container to execute the function
'''
result = self._values.get("custom_container_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.CustomContainerConfigProperty"]], result)
@builtins.property
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property description: Function description.'''
result = self._values.get("description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def environment_variables(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]]:
'''Property environmentVariables: The environment variable set for the function, you can get the value of the environment variable in the function.'''
result = self._values.get("environment_variables")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]], result)
@builtins.property
def initialization_timeout(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''Property initializationTimeout: the max execution time of the initializer, in second.'''
result = self._values.get("initialization_timeout")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def initializer(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property initializer: the entry point of the initializer.'''
result = self._values.get("initializer")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def instance_concurrency(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''Property instanceConcurrency: Function instance concurrency.
Value can be between 1 to 100.
'''
result = self._values.get("instance_concurrency")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def instance_type(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property instanceType: Instance type.
Value:e1: flexible instance. Memory size between 128 and 3072c1: performance instance. Memory size allow values are 4096, 8192, 16384 and 32768
'''
result = self._values.get("instance_type")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def memory_size(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''Property memorySize: The amount of memory that’s used to run function, in MB.
Function Compute uses this value to allocate CPU resources proportionally. Defaults to 128 MB. It can be multiple of 64 MB and between 128 MB and 3072 MB.
'''
result = self._values.get("memory_size")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def timeout(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''Property timeout: The maximum time duration a function can run, in seconds.
After which Function Compute terminates the execution. Defaults to 3 seconds, and can be between 1 to 600 seconds.
'''
result = self._values.get("timeout")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "FunctionProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class Layer(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.Layer",
):
'''A ROS resource type: ``ALIYUN::FC::Layer``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "LayerProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::FC::Layer``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrArn")
def attr_arn(self) -> ros_cdk_core.IResolvable:
'''Attribute Arn: The name of the layer resource.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrArn"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrLayerName")
def attr_layer_name(self) -> ros_cdk_core.IResolvable:
'''Attribute LayerName: The name of layer.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrLayerName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrVersion")
def attr_version(self) -> ros_cdk_core.IResolvable:
'''Attribute Version: The version of the layer resource.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrVersion"))
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.LayerProps",
jsii_struct_bases=[],
name_mapping={
"code": "code",
"compatible_runtime": "compatibleRuntime",
"layer_name": "layerName",
"description": "description",
},
)
class LayerProps:
def __init__(
self,
*,
code: typing.Union[ros_cdk_core.IResolvable, "RosLayer.CodeProperty"],
compatible_runtime: typing.Union[ros_cdk_core.IResolvable, typing.Sequence[typing.Union[builtins.str, ros_cdk_core.IResolvable]]],
layer_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::Layer``.
:param code: Property code: The code of layer.
:param compatible_runtime: Property compatibleRuntime: The runtime environment supported by the layer. For example:nodejs12, nodejs10, nodejs8, nodejs6, python3, and python2.7
:param layer_name: Property layerName: The name of layer.
:param description: Property description: The description of the layer.
'''
self._values: typing.Dict[str, typing.Any] = {
"code": code,
"compatible_runtime": compatible_runtime,
"layer_name": layer_name,
}
if description is not None:
self._values["description"] = description
@builtins.property
def code(self) -> typing.Union[ros_cdk_core.IResolvable, "RosLayer.CodeProperty"]:
'''Property code: The code of layer.'''
result = self._values.get("code")
assert result is not None, "Required property 'code' is missing"
return typing.cast(typing.Union[ros_cdk_core.IResolvable, "RosLayer.CodeProperty"], result)
@builtins.property
def compatible_runtime(
self,
) -> typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]]:
'''Property compatibleRuntime: The runtime environment supported by the layer.
For example:nodejs12, nodejs10, nodejs8, nodejs6, python3, and python2.7
'''
result = self._values.get("compatible_runtime")
assert result is not None, "Required property 'compatible_runtime' is missing"
return typing.cast(typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]], result)
@builtins.property
def layer_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property layerName: The name of layer.'''
result = self._values.get("layer_name")
assert result is not None, "Required property 'layer_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property description: The description of the layer.'''
result = self._values.get("description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "LayerProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class ProvisionConfig(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.ProvisionConfig",
):
'''A ROS resource type: ``ALIYUN::FC::ProvisionConfig``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "ProvisionConfigProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::FC::ProvisionConfig``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrFunctionName")
def attr_function_name(self) -> ros_cdk_core.IResolvable:
'''Attribute FunctionName: The function name.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrFunctionName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrQualifier")
def attr_qualifier(self) -> ros_cdk_core.IResolvable:
'''Attribute Qualifier: The service alias.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrQualifier"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrResource")
def attr_resource(self) -> ros_cdk_core.IResolvable:
'''Attribute Resource: The resource.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrResource"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceName")
def attr_service_name(self) -> ros_cdk_core.IResolvable:
'''Attribute ServiceName: The service name.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrTarget")
def attr_target(self) -> ros_cdk_core.IResolvable:
'''Attribute Target: Number of provision.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrTarget"))
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.ProvisionConfigProps",
jsii_struct_bases=[],
name_mapping={
"function_name": "functionName",
"qualifier": "qualifier",
"service_name": "serviceName",
"target": "target",
},
)
class ProvisionConfigProps:
def __init__(
self,
*,
function_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
qualifier: typing.Union[builtins.str, ros_cdk_core.IResolvable],
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
target: typing.Union[jsii.Number, ros_cdk_core.IResolvable],
) -> None:
'''Properties for defining a ``ALIYUN::FC::ProvisionConfig``.
:param function_name: Property functionName: Function name.
:param qualifier: Property qualifier: Service's alias. Example : "LATEST"
:param service_name: Property serviceName: Service name.
:param target: Property target: Number of provision.
'''
self._values: typing.Dict[str, typing.Any] = {
"function_name": function_name,
"qualifier": qualifier,
"service_name": service_name,
"target": target,
}
@builtins.property
def function_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property functionName: Function name.'''
result = self._values.get("function_name")
assert result is not None, "Required property 'function_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def qualifier(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property qualifier: Service's alias.
Example : "LATEST"
'''
result = self._values.get("qualifier")
assert result is not None, "Required property 'qualifier' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property serviceName: Service name.'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def target(self) -> typing.Union[jsii.Number, ros_cdk_core.IResolvable]:
'''Property target: Number of provision.'''
result = self._values.get("target")
assert result is not None, "Required property 'target' is missing"
return typing.cast(typing.Union[jsii.Number, ros_cdk_core.IResolvable], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ProvisionConfigProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosAlias(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.RosAlias",
):
'''A ROS template type: ``ALIYUN::FC::Alias``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosAliasProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::FC::Alias``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrAliasName")
def attr_alias_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: AliasName: The alias name
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrAliasName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceName")
def attr_service_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ServiceName: The service name
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrVersionId")
def attr_version_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: VersionId: The version ID
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrVersionId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="aliasName")
def alias_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: aliasName: Alias name
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "aliasName"))
@alias_name.setter
def alias_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "aliasName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="serviceName")
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Service name
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "serviceName"))
@service_name.setter
def service_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "serviceName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="additionalVersion")
def additional_version(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: additionalVersion: Additional version
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "additionalVersion"))
@additional_version.setter
def additional_version(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "additionalVersion", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="additionalWeight")
def additional_weight(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: additionalWeight: Traffic weight of additional version. From 0 to 100.
'''
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], jsii.get(self, "additionalWeight"))
@additional_weight.setter
def additional_weight(
self,
value: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "additionalWeight", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="description")
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: description: Version description
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "description"))
@description.setter
def description(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "description", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="versionId")
def version_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: versionId: Version ID
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "versionId"))
@version_id.setter
def version_id(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "versionId", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosAliasProps",
jsii_struct_bases=[],
name_mapping={
"alias_name": "aliasName",
"service_name": "serviceName",
"additional_version": "additionalVersion",
"additional_weight": "additionalWeight",
"description": "description",
"version_id": "versionId",
},
)
class RosAliasProps:
def __init__(
self,
*,
alias_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
additional_version: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
additional_weight: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
version_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::Alias``.
:param alias_name:
:param service_name:
:param additional_version:
:param additional_weight:
:param description:
:param version_id:
'''
self._values: typing.Dict[str, typing.Any] = {
"alias_name": alias_name,
"service_name": service_name,
}
if additional_version is not None:
self._values["additional_version"] = additional_version
if additional_weight is not None:
self._values["additional_weight"] = additional_weight
if description is not None:
self._values["description"] = description
if version_id is not None:
self._values["version_id"] = version_id
@builtins.property
def alias_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: aliasName: Alias name
'''
result = self._values.get("alias_name")
assert result is not None, "Required property 'alias_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Service name
'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def additional_version(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: additionalVersion: Additional version
'''
result = self._values.get("additional_version")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def additional_weight(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: additionalWeight: Traffic weight of additional version. From 0 to 100.
'''
result = self._values.get("additional_weight")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: description: Version description
'''
result = self._values.get("description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def version_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: versionId: Version ID
'''
result = self._values.get("version_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosAliasProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosCustomDomain(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.RosCustomDomain",
):
'''A ROS template type: ``ALIYUN::FC::CustomDomain``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosCustomDomainProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::FC::CustomDomain``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrDomain")
def attr_domain(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: Domain: The domain with protocol.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrDomain"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrDomainName")
def attr_domain_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: DomainName: The domain name
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrDomainName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="domainName")
def domain_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: domainName: domain name
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "domainName"))
@domain_name.setter
def domain_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "domainName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="protocol")
def protocol(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: protocol: HTTP or HTTP,HTTPS
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "protocol"))
@protocol.setter
def protocol(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "protocol", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="apiVersion")
def api_version(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: apiVersion: api version
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "apiVersion"))
@api_version.setter
def api_version(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "apiVersion", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="certConfig")
def cert_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.CertConfigProperty"]]:
'''
:Property: certConfig: certificate info
'''
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.CertConfigProperty"]], jsii.get(self, "certConfig"))
@cert_config.setter
def cert_config(
self,
value: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.CertConfigProperty"]],
) -> None:
jsii.set(self, "certConfig", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="routeConfig")
def route_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.RouteConfigProperty"]]:
'''
:Property: routeConfig: Routing table: path to function mappingwhen a function is called with a custom domain name
'''
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.RouteConfigProperty"]], jsii.get(self, "routeConfig"))
@route_config.setter
def route_config(
self,
value: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.RouteConfigProperty"]],
) -> None:
jsii.set(self, "routeConfig", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosCustomDomain.CertConfigProperty",
jsii_struct_bases=[],
name_mapping={
"certificate": "certificate",
"cert_name": "certName",
"private_key": "privateKey",
},
)
class CertConfigProperty:
def __init__(
self,
*,
certificate: typing.Union[builtins.str, ros_cdk_core.IResolvable],
cert_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
private_key: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
'''
:param certificate:
:param cert_name:
:param private_key:
'''
self._values: typing.Dict[str, typing.Any] = {
"certificate": certificate,
"cert_name": cert_name,
"private_key": private_key,
}
@builtins.property
def certificate(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: certificate: certificate
'''
result = self._values.get("certificate")
assert result is not None, "Required property 'certificate' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def cert_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: certName: custom certificate name
'''
result = self._values.get("cert_name")
assert result is not None, "Required property 'cert_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def private_key(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: privateKey: private key
'''
result = self._values.get("private_key")
assert result is not None, "Required property 'private_key' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "CertConfigProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosCustomDomain.RouteConfigProperty",
jsii_struct_bases=[],
name_mapping={"routes": "routes"},
)
class RouteConfigProperty:
def __init__(
self,
*,
routes: typing.Union[ros_cdk_core.IResolvable, typing.Sequence[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.RoutesProperty"]]],
) -> None:
'''
:param routes:
'''
self._values: typing.Dict[str, typing.Any] = {
"routes": routes,
}
@builtins.property
def routes(
self,
) -> typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.RoutesProperty"]]]:
'''
:Property: routes: PathConfig Array
'''
result = self._values.get("routes")
assert result is not None, "Required property 'routes' is missing"
return typing.cast(typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[ros_cdk_core.IResolvable, "RosCustomDomain.RoutesProperty"]]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RouteConfigProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosCustomDomain.RoutesProperty",
jsii_struct_bases=[],
name_mapping={
"function_name": "functionName",
"path": "path",
"service_name": "serviceName",
"qualifier": "qualifier",
},
)
class RoutesProperty:
def __init__(
self,
*,
function_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
path: typing.Union[builtins.str, ros_cdk_core.IResolvable],
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
qualifier: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''
:param function_name:
:param path:
:param service_name:
:param qualifier:
'''
self._values: typing.Dict[str, typing.Any] = {
"function_name": function_name,
"path": path,
"service_name": service_name,
}
if qualifier is not None:
self._values["qualifier"] = qualifier
@builtins.property
def function_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: functionName: Path to the function, for example: "login"
'''
result = self._values.get("function_name")
assert result is not None, "Required property 'function_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def path(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: path: HTTP request path when a function is called with a custom domain name, for example: "/login/*"
'''
result = self._values.get("path")
assert result is not None, "Required property 'path' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Path to the service, for example: "blogService"
'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def qualifier(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: qualifier: Service version or alias
'''
result = self._values.get("qualifier")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RoutesProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosCustomDomainProps",
jsii_struct_bases=[],
name_mapping={
"domain_name": "domainName",
"protocol": "protocol",
"api_version": "apiVersion",
"cert_config": "certConfig",
"route_config": "routeConfig",
},
)
class RosCustomDomainProps:
def __init__(
self,
*,
domain_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
protocol: typing.Union[builtins.str, ros_cdk_core.IResolvable],
api_version: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
cert_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosCustomDomain.CertConfigProperty]] = None,
route_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosCustomDomain.RouteConfigProperty]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::CustomDomain``.
:param domain_name:
:param protocol:
:param api_version:
:param cert_config:
:param route_config:
'''
self._values: typing.Dict[str, typing.Any] = {
"domain_name": domain_name,
"protocol": protocol,
}
if api_version is not None:
self._values["api_version"] = api_version
if cert_config is not None:
self._values["cert_config"] = cert_config
if route_config is not None:
self._values["route_config"] = route_config
@builtins.property
def domain_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: domainName: domain name
'''
result = self._values.get("domain_name")
assert result is not None, "Required property 'domain_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def protocol(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: protocol: HTTP or HTTP,HTTPS
'''
result = self._values.get("protocol")
assert result is not None, "Required property 'protocol' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def api_version(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: apiVersion: api version
'''
result = self._values.get("api_version")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def cert_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosCustomDomain.CertConfigProperty]]:
'''
:Property: certConfig: certificate info
'''
result = self._values.get("cert_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosCustomDomain.CertConfigProperty]], result)
@builtins.property
def route_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosCustomDomain.RouteConfigProperty]]:
'''
:Property: routeConfig: Routing table: path to function mappingwhen a function is called with a custom domain name
'''
result = self._values.get("route_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosCustomDomain.RouteConfigProperty]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosCustomDomainProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosFunction(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.RosFunction",
):
'''A ROS template type: ``ALIYUN::FC::Function``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosFunctionProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::FC::Function``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrArn")
def attr_arn(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ARN: The ARN for ALIYUN::ROS::CustomResource
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrArn"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrFunctionId")
def attr_function_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: FunctionId: The function ID
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrFunctionId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrFunctionName")
def attr_function_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: FunctionName: The function name
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrFunctionName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceId")
def attr_service_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ServiceId: The service ID
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceName")
def attr_service_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ServiceName: The service name
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="functionName")
def function_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: functionName: Function name
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "functionName"))
@function_name.setter
def function_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "functionName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="handler")
def handler(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: handler: The function execution entry point.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "handler"))
@handler.setter
def handler(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "handler", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="runtime")
def runtime(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: runtime: The function runtime environment. Supporting nodejs6, nodejs8, nodejs10, nodejs12, python2.7, python3, java8, custom, custom-container and so on
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "runtime"))
@runtime.setter
def runtime(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "runtime", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="serviceName")
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Service name
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "serviceName"))
@service_name.setter
def service_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "serviceName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="asyncConfiguration")
def async_configuration(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.AsyncConfigurationProperty"]]:
'''
:Property: asyncConfiguration: Configuration of asynchronous function calls
'''
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.AsyncConfigurationProperty"]], jsii.get(self, "asyncConfiguration"))
@async_configuration.setter
def async_configuration(
self,
value: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.AsyncConfigurationProperty"]],
) -> None:
jsii.set(self, "asyncConfiguration", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="caPort")
def ca_port(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: caPort: Custom runtime and custom container runtime dedicated fields, which represent the port that the started custom http server listens to. The default value is 9000
'''
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], jsii.get(self, "caPort"))
@ca_port.setter
def ca_port(
self,
value: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "caPort", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="code")
def code(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.CodeProperty"]]:
'''
:Property: code: The code that contains the function implementation.
'''
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.CodeProperty"]], jsii.get(self, "code"))
@code.setter
def code(
self,
value: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.CodeProperty"]],
) -> None:
jsii.set(self, "code", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="customContainerConfig")
def custom_container_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.CustomContainerConfigProperty"]]:
'''
:Property: customContainerConfig: Custom container runtime related configuration. After configuration, the function can be replaced with a custom container to execute the function
'''
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.CustomContainerConfigProperty"]], jsii.get(self, "customContainerConfig"))
@custom_container_config.setter
def custom_container_config(
self,
value: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.CustomContainerConfigProperty"]],
) -> None:
jsii.set(self, "customContainerConfig", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="description")
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: description: Function description
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "description"))
@description.setter
def description(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "description", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="environmentVariables")
def environment_variables(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]]:
'''
:Property: environmentVariables: The environment variable set for the function, you can get the value of the environment variable in the function.
'''
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]], jsii.get(self, "environmentVariables"))
@environment_variables.setter
def environment_variables(
self,
value: typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]],
) -> None:
jsii.set(self, "environmentVariables", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="initializationTimeout")
def initialization_timeout(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: initializationTimeout: the max execution time of the initializer, in second
'''
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], jsii.get(self, "initializationTimeout"))
@initialization_timeout.setter
def initialization_timeout(
self,
value: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "initializationTimeout", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="initializer")
def initializer(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: initializer: the entry point of the initializer
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "initializer"))
@initializer.setter
def initializer(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "initializer", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="instanceConcurrency")
def instance_concurrency(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: instanceConcurrency: Function instance concurrency. Value can be between 1 to 100.
'''
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], jsii.get(self, "instanceConcurrency"))
@instance_concurrency.setter
def instance_concurrency(
self,
value: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "instanceConcurrency", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="instanceType")
def instance_type(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: instanceType: Instance type. Value:e1: flexible instance. Memory size between 128 and 3072c1: performance instance. Memory size allow values are 4096, 8192, 16384 and 32768
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "instanceType"))
@instance_type.setter
def instance_type(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "instanceType", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="memorySize")
def memory_size(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: memorySize: The amount of memory that’s used to run function, in MB. Function Compute uses this value to allocate CPU resources proportionally. Defaults to 128 MB. It can be multiple of 64 MB and between 128 MB and 3072 MB.
'''
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], jsii.get(self, "memorySize"))
@memory_size.setter
def memory_size(
self,
value: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "memorySize", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="timeout")
def timeout(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: timeout: The maximum time duration a function can run, in seconds. After which Function Compute terminates the execution. Defaults to 3 seconds, and can be between 1 to 600 seconds.
'''
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], jsii.get(self, "timeout"))
@timeout.setter
def timeout(
self,
value: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "timeout", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosFunction.AsyncConfigurationProperty",
jsii_struct_bases=[],
name_mapping={
"destination": "destination",
"max_async_event_age_in_seconds": "maxAsyncEventAgeInSeconds",
"max_async_retry_attempts": "maxAsyncRetryAttempts",
"stateful_invocation": "statefulInvocation",
},
)
class AsyncConfigurationProperty:
def __init__(
self,
*,
destination: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.DestinationProperty"]] = None,
max_async_event_age_in_seconds: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
max_async_retry_attempts: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
stateful_invocation: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]] = None,
) -> None:
'''
:param destination:
:param max_async_event_age_in_seconds:
:param max_async_retry_attempts:
:param stateful_invocation:
'''
self._values: typing.Dict[str, typing.Any] = {}
if destination is not None:
self._values["destination"] = destination
if max_async_event_age_in_seconds is not None:
self._values["max_async_event_age_in_seconds"] = max_async_event_age_in_seconds
if max_async_retry_attempts is not None:
self._values["max_async_retry_attempts"] = max_async_retry_attempts
if stateful_invocation is not None:
self._values["stateful_invocation"] = stateful_invocation
@builtins.property
def destination(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.DestinationProperty"]]:
'''
:Property: destination: Set destination of asynchronous function calls
'''
result = self._values.get("destination")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosFunction.DestinationProperty"]], result)
@builtins.property
def max_async_event_age_in_seconds(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: maxAsyncEventAgeInSeconds: Configure the maximum lifetime of messages. The duration is calculated from the time the asynchronous call is triggered, and ends when the message is dequeued for processing. If this period of time is longer than the setting value of MaxAsyncEventAgeInSeconds, the message will be discarded. The unconsumed messages will be counted in the cloud monitoring AsyncEventExpiredDropped indicator.
'''
result = self._values.get("max_async_event_age_in_seconds")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def max_async_retry_attempts(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: maxAsyncRetryAttempts: Configure the number of retries
'''
result = self._values.get("max_async_retry_attempts")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def stateful_invocation(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''
:Property: statefulInvocation: Whether enable stateful invocation
'''
result = self._values.get("stateful_invocation")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "AsyncConfigurationProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosFunction.CodeProperty",
jsii_struct_bases=[],
name_mapping={
"oss_bucket_name": "ossBucketName",
"oss_object_name": "ossObjectName",
"source_code": "sourceCode",
"zip_file": "zipFile",
},
)
class CodeProperty:
def __init__(
self,
*,
oss_bucket_name: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
oss_object_name: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
source_code: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
zip_file: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''
:param oss_bucket_name:
:param oss_object_name:
:param source_code:
:param zip_file:
'''
self._values: typing.Dict[str, typing.Any] = {}
if oss_bucket_name is not None:
self._values["oss_bucket_name"] = oss_bucket_name
if oss_object_name is not None:
self._values["oss_object_name"] = oss_object_name
if source_code is not None:
self._values["source_code"] = source_code
if zip_file is not None:
self._values["zip_file"] = zip_file
@builtins.property
def oss_bucket_name(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
ossBucketName: OSS bucket name.
Priority: ZipFile > SourceCode > OssBucketName&OssObjectName.
'''
result = self._values.get("oss_bucket_name")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def oss_object_name(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
ossObjectName: OSS object name.
Priority: ZipFile > SourceCode > OssBucketName&OssObjectName.
'''
result = self._values.get("oss_object_name")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def source_code(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
sourceCode: (Node.js, PHP and Python) The source code for your FC function. If you include this parameter in a function source inline, ROS places it in a file called index (utf-8 encoded) and then compresses it to create a deployment package. For the Handler property, the first part of the handler identifier must be index. For example: index.handler.
Your source code can contain up to 4096 characters. For JSON, you must use backslashes to escape quotes and special characters, such as line breaks.
Priority: ZipFile > SourceCode > OssBucketName&OssObjectName.
'''
result = self._values.get("source_code")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def zip_file(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
zipFile: Base64 encoded zip file content.
Priority: ZipFile > SourceCode > OssBucketName&OssObjectName.
'''
result = self._values.get("zip_file")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "CodeProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosFunction.CustomContainerConfigProperty",
jsii_struct_bases=[],
name_mapping={
"image": "image",
"acceleration_type": "accelerationType",
"args": "args",
"command": "command",
},
)
class CustomContainerConfigProperty:
def __init__(
self,
*,
image: typing.Union[builtins.str, ros_cdk_core.IResolvable],
acceleration_type: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
args: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
command: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''
:param image:
:param acceleration_type:
:param args:
:param command:
'''
self._values: typing.Dict[str, typing.Any] = {
"image": image,
}
if acceleration_type is not None:
self._values["acceleration_type"] = acceleration_type
if args is not None:
self._values["args"] = args
if command is not None:
self._values["command"] = command
@builtins.property
def image(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: image: Container image address. For example: registry-vpc.cn-hangzhou.aliyuncs.com/fc-demo/helloworld:v1beta1
'''
result = self._values.get("image")
assert result is not None, "Required property 'image' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def acceleration_type(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
accelerationType: Whether to enable image acceleration. Valid Values:
Default: Indicates that image acceleration is enabled.
None: Indicates that image acceleration is disabled.
'''
result = self._values.get("acceleration_type")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def args(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: args: Container startup parameters. For example: ["-arg1", "value1"]
'''
result = self._values.get("args")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def command(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: command: Container start command. For example: ["/code/myserver"]
'''
result = self._values.get("command")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "CustomContainerConfigProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosFunction.DestinationProperty",
jsii_struct_bases=[],
name_mapping={"on_failure": "onFailure", "on_success": "onSuccess"},
)
class DestinationProperty:
def __init__(
self,
*,
on_failure: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
on_success: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''
:param on_failure:
:param on_success:
'''
self._values: typing.Dict[str, typing.Any] = {}
if on_failure is not None:
self._values["on_failure"] = on_failure
if on_success is not None:
self._values["on_success"] = on_success
@builtins.property
def on_failure(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: onFailure: When the function is invoked failed (system error or function internal error), FC will call the target corresponding to the configuration
'''
result = self._values.get("on_failure")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def on_success(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: onSuccess: When the function is invoked successfully, FC will call the target corresponding to the configuration
'''
result = self._values.get("on_success")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "DestinationProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosFunctionInvoker(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.RosFunctionInvoker",
):
'''A ROS template type: ``ALIYUN::FC::FunctionInvoker``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosFunctionInvokerProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::FC::FunctionInvoker``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrResult")
def attr_result(self) -> ros_cdk_core.IResolvable:
'''
:Attribute:
Result: Depends on result type:
NoResult: Async invoke has no result.
Success: The response of the function. The response should be utf-8 encoded string, otherwise ROS will report an error. If the response is binary, encode it via base64 before it is returned.
Failure: Error Message.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrResult"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrResultType")
def attr_result_type(self) -> ros_cdk_core.IResolvable:
'''
:Attribute:
ResultType: Result type:
NoResult: Async invoke has no result.
Success: Sync invoke succeeds.
Failure: Sync invoke fails.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrResultType"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="functionName")
def function_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: functionName: Function name
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "functionName"))
@function_name.setter
def function_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "functionName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="serviceName")
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Service name
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "serviceName"))
@service_name.setter
def service_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "serviceName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="async")
def async_(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''
:Property: async: Invocation type, Sync or Async. Defaults to Sync.
'''
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], jsii.get(self, "async"))
@async_.setter
def async_(
self,
value: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "async", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="checkError")
def check_error(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''
:Property:
checkError: Whether check error for function invocation result.
If set true and function invocation result has error, the resource creation will be regard as failed.
Default is false
'''
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], jsii.get(self, "checkError"))
@check_error.setter
def check_error(
self,
value: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "checkError", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="event")
def event(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
event: This value is passed to function as utf-8 encoded string.It’s function’s responsibility to interpret the value.
If the value needs to be binary, encode it via base64 before passing to this property.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "event"))
@event.setter
def event(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "event", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="executeVersion")
def execute_version(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: executeVersion: If the property is not specified for creation and update, the function will not be invoked. The change of the property leads to the invoke of the function.
'''
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], jsii.get(self, "executeVersion"))
@execute_version.setter
def execute_version(
self,
value: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "executeVersion", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="qualifier")
def qualifier(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: qualifier: service version, can be versionId or aliasName
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "qualifier"))
@qualifier.setter
def qualifier(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "qualifier", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="serviceRegionId")
def service_region_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: serviceRegionId: Which region service belongs to.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "serviceRegionId"))
@service_region_id.setter
def service_region_id(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "serviceRegionId", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosFunctionInvokerProps",
jsii_struct_bases=[],
name_mapping={
"function_name": "functionName",
"service_name": "serviceName",
"async_": "async",
"check_error": "checkError",
"event": "event",
"execute_version": "executeVersion",
"qualifier": "qualifier",
"service_region_id": "serviceRegionId",
},
)
class RosFunctionInvokerProps:
def __init__(
self,
*,
function_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
async_: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]] = None,
check_error: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]] = None,
event: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
execute_version: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
qualifier: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
service_region_id: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::FunctionInvoker``.
:param function_name:
:param service_name:
:param async_:
:param check_error:
:param event:
:param execute_version:
:param qualifier:
:param service_region_id:
'''
self._values: typing.Dict[str, typing.Any] = {
"function_name": function_name,
"service_name": service_name,
}
if async_ is not None:
self._values["async_"] = async_
if check_error is not None:
self._values["check_error"] = check_error
if event is not None:
self._values["event"] = event
if execute_version is not None:
self._values["execute_version"] = execute_version
if qualifier is not None:
self._values["qualifier"] = qualifier
if service_region_id is not None:
self._values["service_region_id"] = service_region_id
@builtins.property
def function_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: functionName: Function name
'''
result = self._values.get("function_name")
assert result is not None, "Required property 'function_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Service name
'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def async_(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''
:Property: async: Invocation type, Sync or Async. Defaults to Sync.
'''
result = self._values.get("async_")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
@builtins.property
def check_error(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''
:Property:
checkError: Whether check error for function invocation result.
If set true and function invocation result has error, the resource creation will be regard as failed.
Default is false
'''
result = self._values.get("check_error")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
@builtins.property
def event(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
event: This value is passed to function as utf-8 encoded string.It’s function’s responsibility to interpret the value.
If the value needs to be binary, encode it via base64 before passing to this property.
'''
result = self._values.get("event")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def execute_version(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: executeVersion: If the property is not specified for creation and update, the function will not be invoked. The change of the property leads to the invoke of the function.
'''
result = self._values.get("execute_version")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def qualifier(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: qualifier: service version, can be versionId or aliasName
'''
result = self._values.get("qualifier")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def service_region_id(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: serviceRegionId: Which region service belongs to.
'''
result = self._values.get("service_region_id")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosFunctionInvokerProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosFunctionProps",
jsii_struct_bases=[],
name_mapping={
"function_name": "functionName",
"handler": "handler",
"runtime": "runtime",
"service_name": "serviceName",
"async_configuration": "asyncConfiguration",
"ca_port": "caPort",
"code": "code",
"custom_container_config": "customContainerConfig",
"description": "description",
"environment_variables": "environmentVariables",
"initialization_timeout": "initializationTimeout",
"initializer": "initializer",
"instance_concurrency": "instanceConcurrency",
"instance_type": "instanceType",
"memory_size": "memorySize",
"timeout": "timeout",
},
)
class RosFunctionProps:
def __init__(
self,
*,
function_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
handler: typing.Union[builtins.str, ros_cdk_core.IResolvable],
runtime: typing.Union[builtins.str, ros_cdk_core.IResolvable],
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
async_configuration: typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosFunction.AsyncConfigurationProperty]] = None,
ca_port: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
code: typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosFunction.CodeProperty]] = None,
custom_container_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosFunction.CustomContainerConfigProperty]] = None,
description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
environment_variables: typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]] = None,
initialization_timeout: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
initializer: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
instance_concurrency: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
instance_type: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
memory_size: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
timeout: typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::Function``.
:param function_name:
:param handler:
:param runtime:
:param service_name:
:param async_configuration:
:param ca_port:
:param code:
:param custom_container_config:
:param description:
:param environment_variables:
:param initialization_timeout:
:param initializer:
:param instance_concurrency:
:param instance_type:
:param memory_size:
:param timeout:
'''
self._values: typing.Dict[str, typing.Any] = {
"function_name": function_name,
"handler": handler,
"runtime": runtime,
"service_name": service_name,
}
if async_configuration is not None:
self._values["async_configuration"] = async_configuration
if ca_port is not None:
self._values["ca_port"] = ca_port
if code is not None:
self._values["code"] = code
if custom_container_config is not None:
self._values["custom_container_config"] = custom_container_config
if description is not None:
self._values["description"] = description
if environment_variables is not None:
self._values["environment_variables"] = environment_variables
if initialization_timeout is not None:
self._values["initialization_timeout"] = initialization_timeout
if initializer is not None:
self._values["initializer"] = initializer
if instance_concurrency is not None:
self._values["instance_concurrency"] = instance_concurrency
if instance_type is not None:
self._values["instance_type"] = instance_type
if memory_size is not None:
self._values["memory_size"] = memory_size
if timeout is not None:
self._values["timeout"] = timeout
@builtins.property
def function_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: functionName: Function name
'''
result = self._values.get("function_name")
assert result is not None, "Required property 'function_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def handler(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: handler: The function execution entry point.
'''
result = self._values.get("handler")
assert result is not None, "Required property 'handler' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def runtime(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: runtime: The function runtime environment. Supporting nodejs6, nodejs8, nodejs10, nodejs12, python2.7, python3, java8, custom, custom-container and so on
'''
result = self._values.get("runtime")
assert result is not None, "Required property 'runtime' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Service name
'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def async_configuration(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosFunction.AsyncConfigurationProperty]]:
'''
:Property: asyncConfiguration: Configuration of asynchronous function calls
'''
result = self._values.get("async_configuration")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosFunction.AsyncConfigurationProperty]], result)
@builtins.property
def ca_port(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: caPort: Custom runtime and custom container runtime dedicated fields, which represent the port that the started custom http server listens to. The default value is 9000
'''
result = self._values.get("ca_port")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def code(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosFunction.CodeProperty]]:
'''
:Property: code: The code that contains the function implementation.
'''
result = self._values.get("code")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosFunction.CodeProperty]], result)
@builtins.property
def custom_container_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosFunction.CustomContainerConfigProperty]]:
'''
:Property: customContainerConfig: Custom container runtime related configuration. After configuration, the function can be replaced with a custom container to execute the function
'''
result = self._values.get("custom_container_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosFunction.CustomContainerConfigProperty]], result)
@builtins.property
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: description: Function description
'''
result = self._values.get("description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def environment_variables(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]]:
'''
:Property: environmentVariables: The environment variable set for the function, you can get the value of the environment variable in the function.
'''
result = self._values.get("environment_variables")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]], result)
@builtins.property
def initialization_timeout(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: initializationTimeout: the max execution time of the initializer, in second
'''
result = self._values.get("initialization_timeout")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def initializer(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: initializer: the entry point of the initializer
'''
result = self._values.get("initializer")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def instance_concurrency(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: instanceConcurrency: Function instance concurrency. Value can be between 1 to 100.
'''
result = self._values.get("instance_concurrency")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def instance_type(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: instanceType: Instance type. Value:e1: flexible instance. Memory size between 128 and 3072c1: performance instance. Memory size allow values are 4096, 8192, 16384 and 32768
'''
result = self._values.get("instance_type")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def memory_size(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: memorySize: The amount of memory that’s used to run function, in MB. Function Compute uses this value to allocate CPU resources proportionally. Defaults to 128 MB. It can be multiple of 64 MB and between 128 MB and 3072 MB.
'''
result = self._values.get("memory_size")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
@builtins.property
def timeout(
self,
) -> typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]]:
'''
:Property: timeout: The maximum time duration a function can run, in seconds. After which Function Compute terminates the execution. Defaults to 3 seconds, and can be between 1 to 600 seconds.
'''
result = self._values.get("timeout")
return typing.cast(typing.Optional[typing.Union[jsii.Number, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosFunctionProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosLayer(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.RosLayer",
):
'''A ROS template type: ``ALIYUN::FC::Layer``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosLayerProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::FC::Layer``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrArn")
def attr_arn(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: Arn: The name of the layer resource.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrArn"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrLayerName")
def attr_layer_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: LayerName: The name of layer
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrLayerName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrVersion")
def attr_version(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: Version: The version of the layer resource.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrVersion"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="code")
def code(self) -> typing.Union[ros_cdk_core.IResolvable, "RosLayer.CodeProperty"]:
'''
:Property: code: The code of layer.
'''
return typing.cast(typing.Union[ros_cdk_core.IResolvable, "RosLayer.CodeProperty"], jsii.get(self, "code"))
@code.setter
def code(
self,
value: typing.Union[ros_cdk_core.IResolvable, "RosLayer.CodeProperty"],
) -> None:
jsii.set(self, "code", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="compatibleRuntime")
def compatible_runtime(
self,
) -> typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]]:
'''
:Property: compatibleRuntime: The runtime environment supported by the layer. For example:nodejs12, nodejs10, nodejs8, nodejs6, python3, and python2.7
'''
return typing.cast(typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]], jsii.get(self, "compatibleRuntime"))
@compatible_runtime.setter
def compatible_runtime(
self,
value: typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]],
) -> None:
jsii.set(self, "compatibleRuntime", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="layerName")
def layer_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: layerName: The name of layer
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "layerName"))
@layer_name.setter
def layer_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "layerName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="description")
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: description: The description of the layer.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "description"))
@description.setter
def description(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "description", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosLayer.CodeProperty",
jsii_struct_bases=[],
name_mapping={
"oss_bucket_name": "ossBucketName",
"oss_object_name": "ossObjectName",
"zip_file": "zipFile",
},
)
class CodeProperty:
def __init__(
self,
*,
oss_bucket_name: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
oss_object_name: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
zip_file: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''
:param oss_bucket_name:
:param oss_object_name:
:param zip_file:
'''
self._values: typing.Dict[str, typing.Any] = {}
if oss_bucket_name is not None:
self._values["oss_bucket_name"] = oss_bucket_name
if oss_object_name is not None:
self._values["oss_object_name"] = oss_object_name
if zip_file is not None:
self._values["zip_file"] = zip_file
@builtins.property
def oss_bucket_name(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
ossBucketName: The name of the Object Storage Service (OSS) bucket that
stores the ZIP package of the function code.
Priority: ZipFile > OssBucketName&OssObjectName.
'''
result = self._values.get("oss_bucket_name")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def oss_object_name(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
ossObjectName: The name of the OSS object that stores the ZIP package of the function code.
Priority: ZipFile > OssBucketName&OssObjectName.
'''
result = self._values.get("oss_object_name")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def zip_file(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
zipFile: The function code that is encoded in Base64.
Priority: ZipFile > OssBucketName&OssObjectName.
'''
result = self._values.get("zip_file")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "CodeProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosLayerProps",
jsii_struct_bases=[],
name_mapping={
"code": "code",
"compatible_runtime": "compatibleRuntime",
"layer_name": "layerName",
"description": "description",
},
)
class RosLayerProps:
def __init__(
self,
*,
code: typing.Union[ros_cdk_core.IResolvable, RosLayer.CodeProperty],
compatible_runtime: typing.Union[ros_cdk_core.IResolvable, typing.Sequence[typing.Union[builtins.str, ros_cdk_core.IResolvable]]],
layer_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::Layer``.
:param code:
:param compatible_runtime:
:param layer_name:
:param description:
'''
self._values: typing.Dict[str, typing.Any] = {
"code": code,
"compatible_runtime": compatible_runtime,
"layer_name": layer_name,
}
if description is not None:
self._values["description"] = description
@builtins.property
def code(self) -> typing.Union[ros_cdk_core.IResolvable, RosLayer.CodeProperty]:
'''
:Property: code: The code of layer.
'''
result = self._values.get("code")
assert result is not None, "Required property 'code' is missing"
return typing.cast(typing.Union[ros_cdk_core.IResolvable, RosLayer.CodeProperty], result)
@builtins.property
def compatible_runtime(
self,
) -> typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]]:
'''
:Property: compatibleRuntime: The runtime environment supported by the layer. For example:nodejs12, nodejs10, nodejs8, nodejs6, python3, and python2.7
'''
result = self._values.get("compatible_runtime")
assert result is not None, "Required property 'compatible_runtime' is missing"
return typing.cast(typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[builtins.str, ros_cdk_core.IResolvable]]], result)
@builtins.property
def layer_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: layerName: The name of layer
'''
result = self._values.get("layer_name")
assert result is not None, "Required property 'layer_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: description: The description of the layer.
'''
result = self._values.get("description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosLayerProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosProvisionConfig(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.RosProvisionConfig",
):
'''A ROS template type: ``ALIYUN::FC::ProvisionConfig``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosProvisionConfigProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::FC::ProvisionConfig``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrFunctionName")
def attr_function_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: FunctionName: The function name
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrFunctionName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrQualifier")
def attr_qualifier(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: Qualifier: The service alias
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrQualifier"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrResource")
def attr_resource(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: Resource: The resource
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrResource"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceName")
def attr_service_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ServiceName: The service name
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrTarget")
def attr_target(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: Target: Number of provision
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrTarget"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="functionName")
def function_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: functionName: Function name
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "functionName"))
@function_name.setter
def function_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "functionName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="qualifier")
def qualifier(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
qualifier: Service's alias.
Example : "LATEST"
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "qualifier"))
@qualifier.setter
def qualifier(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "qualifier", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="serviceName")
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Service name
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "serviceName"))
@service_name.setter
def service_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "serviceName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="target")
def target(self) -> typing.Union[jsii.Number, ros_cdk_core.IResolvable]:
'''
:Property: target: Number of provision
'''
return typing.cast(typing.Union[jsii.Number, ros_cdk_core.IResolvable], jsii.get(self, "target"))
@target.setter
def target(
self,
value: typing.Union[jsii.Number, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "target", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosProvisionConfigProps",
jsii_struct_bases=[],
name_mapping={
"function_name": "functionName",
"qualifier": "qualifier",
"service_name": "serviceName",
"target": "target",
},
)
class RosProvisionConfigProps:
def __init__(
self,
*,
function_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
qualifier: typing.Union[builtins.str, ros_cdk_core.IResolvable],
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
target: typing.Union[jsii.Number, ros_cdk_core.IResolvable],
) -> None:
'''Properties for defining a ``ALIYUN::FC::ProvisionConfig``.
:param function_name:
:param qualifier:
:param service_name:
:param target:
'''
self._values: typing.Dict[str, typing.Any] = {
"function_name": function_name,
"qualifier": qualifier,
"service_name": service_name,
"target": target,
}
@builtins.property
def function_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: functionName: Function name
'''
result = self._values.get("function_name")
assert result is not None, "Required property 'function_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def qualifier(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
qualifier: Service's alias.
Example : "LATEST"
'''
result = self._values.get("qualifier")
assert result is not None, "Required property 'qualifier' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Service name
'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def target(self) -> typing.Union[jsii.Number, ros_cdk_core.IResolvable]:
'''
:Property: target: Number of provision
'''
result = self._values.get("target")
assert result is not None, "Required property 'target' is missing"
return typing.cast(typing.Union[jsii.Number, ros_cdk_core.IResolvable], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosProvisionConfigProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosService(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.RosService",
):
'''A ROS template type: ``ALIYUN::FC::Service``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosServiceProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::FC::Service``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrInternetAccess")
def attr_internet_access(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: InternetAccess: Whether enable Internet access
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrInternetAccess"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrLogProject")
def attr_log_project(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: LogProject: Log project of service
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrLogProject"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrLogstore")
def attr_logstore(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: Logstore: Log store of service
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrLogstore"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrRole")
def attr_role(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: Role: Role of service
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrRole"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceId")
def attr_service_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ServiceId: The service ID
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceName")
def attr_service_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ServiceName: The service name
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrTags")
def attr_tags(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: Tags: Tags of service
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrTags"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrVpcId")
def attr_vpc_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: VpcId: VPC ID
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrVpcId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="serviceName")
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Service name
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "serviceName"))
@service_name.setter
def service_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "serviceName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="deletionForce")
def deletion_force(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''
:Property: deletionForce: Whether force delete the service without waiting for network interfaces to be cleaned up if VpcConfig is specified. Default value is false.
'''
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], jsii.get(self, "deletionForce"))
@deletion_force.setter
def deletion_force(
self,
value: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "deletionForce", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="description")
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: description: Service description
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "description"))
@description.setter
def description(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "description", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="internetAccess")
def internet_access(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''
:Property: internetAccess: Set it to true to enable Internet access.
'''
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], jsii.get(self, "internetAccess"))
@internet_access.setter
def internet_access(
self,
value: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "internetAccess", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="logConfig")
def log_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosService.LogConfigProperty"]]:
'''
:Property: logConfig: Log configuration. Function Compute pushes function execution logs to the configured log store.
'''
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosService.LogConfigProperty"]], jsii.get(self, "logConfig"))
@log_config.setter
def log_config(
self,
value: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosService.LogConfigProperty"]],
) -> None:
jsii.set(self, "logConfig", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="nasConfig")
def nas_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosService.NasConfigProperty"]]:
'''
:Property: nasConfig: NAS configuration. Function Compute uses a specified NAS configured on the service.
'''
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosService.NasConfigProperty"]], jsii.get(self, "nasConfig"))
@nas_config.setter
def nas_config(
self,
value: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosService.NasConfigProperty"]],
) -> None:
jsii.set(self, "nasConfig", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="role")
def role(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: role: The role grants Function Compute the permission to access user’s cloud resources, such as pushing logs to user’s log store. The temporary STS token generated from this role can be retrieved from function context and used to access cloud resources.
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "role"))
@role.setter
def role(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "role", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="tags")
def tags(self) -> typing.Optional[typing.List["RosService.TagsProperty"]]:
'''
:Property: tags: Tags to attach to service. Max support 20 tags to add during create service. Each tag with two properties Key and Value, and Key is required.
'''
return typing.cast(typing.Optional[typing.List["RosService.TagsProperty"]], jsii.get(self, "tags"))
@tags.setter
def tags(
self,
value: typing.Optional[typing.List["RosService.TagsProperty"]],
) -> None:
jsii.set(self, "tags", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="tracingConfig")
def tracing_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosService.TracingConfigProperty"]]:
'''
:Property: tracingConfig: The Tracing Analysis configuration. After Function Compute integrates with Tracing Analysis, you can record the stay time of a request in Function Compute, view the cold start time for a function, and record the execution time of a function.
'''
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosService.TracingConfigProperty"]], jsii.get(self, "tracingConfig"))
@tracing_config.setter
def tracing_config(
self,
value: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosService.TracingConfigProperty"]],
) -> None:
jsii.set(self, "tracingConfig", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="vpcConfig")
def vpc_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosService.VpcConfigProperty"]]:
'''
:Property: vpcConfig: VPC configuration. Function Compute uses the config to setup ENI in the specific VPC.
'''
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosService.VpcConfigProperty"]], jsii.get(self, "vpcConfig"))
@vpc_config.setter
def vpc_config(
self,
value: typing.Optional[typing.Union[ros_cdk_core.IResolvable, "RosService.VpcConfigProperty"]],
) -> None:
jsii.set(self, "vpcConfig", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosService.LogConfigProperty",
jsii_struct_bases=[],
name_mapping={
"enable_request_metrics": "enableRequestMetrics",
"logstore": "logstore",
"project": "project",
},
)
class LogConfigProperty:
def __init__(
self,
*,
enable_request_metrics: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]] = None,
logstore: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
project: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''
:param enable_request_metrics:
:param logstore:
:param project:
'''
self._values: typing.Dict[str, typing.Any] = {}
if enable_request_metrics is not None:
self._values["enable_request_metrics"] = enable_request_metrics
if logstore is not None:
self._values["logstore"] = logstore
if project is not None:
self._values["project"] = project
@builtins.property
def enable_request_metrics(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''
:Property: enableRequestMetrics: Whether enable request metrics.
'''
result = self._values.get("enable_request_metrics")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
@builtins.property
def logstore(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: logstore: The log store name of Logs service
'''
result = self._values.get("logstore")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def project(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: project: The project name of Logs service
'''
result = self._values.get("project")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "LogConfigProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosService.MountPointsProperty",
jsii_struct_bases=[],
name_mapping={"mount_dir": "mountDir", "server_addr": "serverAddr"},
)
class MountPointsProperty:
def __init__(
self,
*,
mount_dir: typing.Union[builtins.str, ros_cdk_core.IResolvable],
server_addr: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
'''
:param mount_dir:
:param server_addr:
'''
self._values: typing.Dict[str, typing.Any] = {
"mount_dir": mount_dir,
"server_addr": server_addr,
}
@builtins.property
def mount_dir(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: mountDir: A local mount point.
'''
result = self._values.get("mount_dir")
assert result is not None, "Required property 'mount_dir' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def server_addr(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serverAddr: The address of NAS instance.
'''
result = self._values.get("server_addr")
assert result is not None, "Required property 'server_addr' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "MountPointsProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosService.NasConfigProperty",
jsii_struct_bases=[],
name_mapping={
"group_id": "groupId",
"mount_points": "mountPoints",
"user_id": "userId",
},
)
class NasConfigProperty:
def __init__(
self,
*,
group_id: typing.Union[jsii.Number, ros_cdk_core.IResolvable],
mount_points: typing.Union[ros_cdk_core.IResolvable, typing.Sequence[typing.Union[ros_cdk_core.IResolvable, "RosService.MountPointsProperty"]]],
user_id: typing.Union[jsii.Number, ros_cdk_core.IResolvable],
) -> None:
'''
:param group_id:
:param mount_points:
:param user_id:
'''
self._values: typing.Dict[str, typing.Any] = {
"group_id": group_id,
"mount_points": mount_points,
"user_id": user_id,
}
@builtins.property
def group_id(self) -> typing.Union[jsii.Number, ros_cdk_core.IResolvable]:
'''
:Property: groupId: Group ID
'''
result = self._values.get("group_id")
assert result is not None, "Required property 'group_id' is missing"
return typing.cast(typing.Union[jsii.Number, ros_cdk_core.IResolvable], result)
@builtins.property
def mount_points(
self,
) -> typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[ros_cdk_core.IResolvable, "RosService.MountPointsProperty"]]]:
'''
:Property: mountPoints: Mount points
'''
result = self._values.get("mount_points")
assert result is not None, "Required property 'mount_points' is missing"
return typing.cast(typing.Union[ros_cdk_core.IResolvable, typing.List[typing.Union[ros_cdk_core.IResolvable, "RosService.MountPointsProperty"]]], result)
@builtins.property
def user_id(self) -> typing.Union[jsii.Number, ros_cdk_core.IResolvable]:
'''
:Property: userId: User ID
'''
result = self._values.get("user_id")
assert result is not None, "Required property 'user_id' is missing"
return typing.cast(typing.Union[jsii.Number, ros_cdk_core.IResolvable], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "NasConfigProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosService.TagsProperty",
jsii_struct_bases=[],
name_mapping={"key": "key", "value": "value"},
)
class TagsProperty:
def __init__(
self,
*,
key: typing.Union[builtins.str, ros_cdk_core.IResolvable],
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''
:param key:
:param value:
'''
self._values: typing.Dict[str, typing.Any] = {
"key": key,
}
if value is not None:
self._values["value"] = value
@builtins.property
def key(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: key: undefined
'''
result = self._values.get("key")
assert result is not None, "Required property 'key' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def value(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: value: undefined
'''
result = self._values.get("value")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "TagsProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosService.TracingConfigProperty",
jsii_struct_bases=[],
name_mapping={"params": "params", "type": "type"},
)
class TracingConfigProperty:
def __init__(
self,
*,
params: typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]] = None,
type: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''
:param params:
:param type:
'''
self._values: typing.Dict[str, typing.Any] = {}
if params is not None:
self._values["params"] = params
if type is not None:
self._values["type"] = type
@builtins.property
def params(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]]:
'''
:Property: params: The tracing analysis parameters.
'''
result = self._values.get("params")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]], result)
@builtins.property
def type(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: type: The type of the tracing analysis system.
'''
result = self._values.get("type")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "TracingConfigProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosService.VpcConfigProperty",
jsii_struct_bases=[],
name_mapping={
"security_group_id": "securityGroupId",
"vpc_id": "vpcId",
"v_switch_ids": "vSwitchIds",
},
)
class VpcConfigProperty:
def __init__(
self,
*,
security_group_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
vpc_id: typing.Union[builtins.str, ros_cdk_core.IResolvable],
v_switch_ids: typing.Union[typing.Sequence[typing.Any], ros_cdk_core.IResolvable],
) -> None:
'''
:param security_group_id:
:param vpc_id:
:param v_switch_ids:
'''
self._values: typing.Dict[str, typing.Any] = {
"security_group_id": security_group_id,
"vpc_id": vpc_id,
"v_switch_ids": v_switch_ids,
}
@builtins.property
def security_group_id(
self,
) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: securityGroupId: Security group ID
'''
result = self._values.get("security_group_id")
assert result is not None, "Required property 'security_group_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def vpc_id(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: vpcId: VPC ID
'''
result = self._values.get("vpc_id")
assert result is not None, "Required property 'vpc_id' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def v_switch_ids(
self,
) -> typing.Union[typing.List[typing.Any], ros_cdk_core.IResolvable]:
'''
:Property: vSwitchIds: List of VSwitch IDs
'''
result = self._values.get("v_switch_ids")
assert result is not None, "Required property 'v_switch_ids' is missing"
return typing.cast(typing.Union[typing.List[typing.Any], ros_cdk_core.IResolvable], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "VpcConfigProperty(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosServiceProps",
jsii_struct_bases=[],
name_mapping={
"service_name": "serviceName",
"deletion_force": "deletionForce",
"description": "description",
"internet_access": "internetAccess",
"log_config": "logConfig",
"nas_config": "nasConfig",
"role": "role",
"tags": "tags",
"tracing_config": "tracingConfig",
"vpc_config": "vpcConfig",
},
)
class RosServiceProps:
def __init__(
self,
*,
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
deletion_force: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]] = None,
description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
internet_access: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]] = None,
log_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.LogConfigProperty]] = None,
nas_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.NasConfigProperty]] = None,
role: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
tags: typing.Optional[typing.Sequence[RosService.TagsProperty]] = None,
tracing_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.TracingConfigProperty]] = None,
vpc_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.VpcConfigProperty]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::Service``.
:param service_name:
:param deletion_force:
:param description:
:param internet_access:
:param log_config:
:param nas_config:
:param role:
:param tags:
:param tracing_config:
:param vpc_config:
'''
self._values: typing.Dict[str, typing.Any] = {
"service_name": service_name,
}
if deletion_force is not None:
self._values["deletion_force"] = deletion_force
if description is not None:
self._values["description"] = description
if internet_access is not None:
self._values["internet_access"] = internet_access
if log_config is not None:
self._values["log_config"] = log_config
if nas_config is not None:
self._values["nas_config"] = nas_config
if role is not None:
self._values["role"] = role
if tags is not None:
self._values["tags"] = tags
if tracing_config is not None:
self._values["tracing_config"] = tracing_config
if vpc_config is not None:
self._values["vpc_config"] = vpc_config
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Service name
'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def deletion_force(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''
:Property: deletionForce: Whether force delete the service without waiting for network interfaces to be cleaned up if VpcConfig is specified. Default value is false.
'''
result = self._values.get("deletion_force")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
@builtins.property
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: description: Service description
'''
result = self._values.get("description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def internet_access(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''
:Property: internetAccess: Set it to true to enable Internet access.
'''
result = self._values.get("internet_access")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
@builtins.property
def log_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.LogConfigProperty]]:
'''
:Property: logConfig: Log configuration. Function Compute pushes function execution logs to the configured log store.
'''
result = self._values.get("log_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.LogConfigProperty]], result)
@builtins.property
def nas_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.NasConfigProperty]]:
'''
:Property: nasConfig: NAS configuration. Function Compute uses a specified NAS configured on the service.
'''
result = self._values.get("nas_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.NasConfigProperty]], result)
@builtins.property
def role(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: role: The role grants Function Compute the permission to access user’s cloud resources, such as pushing logs to user’s log store. The temporary STS token generated from this role can be retrieved from function context and used to access cloud resources.
'''
result = self._values.get("role")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def tags(self) -> typing.Optional[typing.List[RosService.TagsProperty]]:
'''
:Property: tags: Tags to attach to service. Max support 20 tags to add during create service. Each tag with two properties Key and Value, and Key is required.
'''
result = self._values.get("tags")
return typing.cast(typing.Optional[typing.List[RosService.TagsProperty]], result)
@builtins.property
def tracing_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.TracingConfigProperty]]:
'''
:Property: tracingConfig: The Tracing Analysis configuration. After Function Compute integrates with Tracing Analysis, you can record the stay time of a request in Function Compute, view the cold start time for a function, and record the execution time of a function.
'''
result = self._values.get("tracing_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.TracingConfigProperty]], result)
@builtins.property
def vpc_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.VpcConfigProperty]]:
'''
:Property: vpcConfig: VPC configuration. Function Compute uses the config to setup ENI in the specific VPC.
'''
result = self._values.get("vpc_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.VpcConfigProperty]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosServiceProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosTrigger(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.RosTrigger",
):
'''A ROS template type: ``ALIYUN::FC::Trigger``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosTriggerProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::FC::Trigger``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrFunctionName")
def attr_function_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: FunctionName: Function name.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrFunctionName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceName")
def attr_service_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ServiceName: Service name.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrTriggerId")
def attr_trigger_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: TriggerId: The trigger ID.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrTriggerId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrTriggerName")
def attr_trigger_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: TriggerName: Trigger name.
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrTriggerName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="functionName")
def function_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: functionName: Function name.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "functionName"))
@function_name.setter
def function_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "functionName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="serviceName")
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Service name.
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "serviceName"))
@service_name.setter
def service_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "serviceName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="triggerConfig")
def trigger_config(
self,
) -> typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]:
'''
:Property: triggerConfig: Event source specific trigger configuration. The value is different according to trigger type.
'''
return typing.cast(typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]], jsii.get(self, "triggerConfig"))
@trigger_config.setter
def trigger_config(
self,
value: typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]],
) -> None:
jsii.set(self, "triggerConfig", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="triggerName")
def trigger_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
triggerName: Trigger name.
Example : "image_resize"
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "triggerName"))
@trigger_name.setter
def trigger_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "triggerName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="triggerType")
def trigger_type(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
triggerType: Trigger type, e.g. oss, timer, logs. This determines how the trigger config is interpreted.
Example : "oss"
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "triggerType"))
@trigger_type.setter
def trigger_type(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "triggerType", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="invocationRole")
def invocation_role(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
invocationRole: The role grants event source the permission to run function on behalf of user. This is optional for some triggers.
Example : "acs:ram::1234567890:role/fc-test"
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "invocationRole"))
@invocation_role.setter
def invocation_role(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "invocationRole", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="qualifier")
def qualifier(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
qualifier: service version or alias.
Example : "LATEST"
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "qualifier"))
@qualifier.setter
def qualifier(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "qualifier", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="sourceArn")
def source_arn(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
sourceArn: The Aliyun Resource Name (ARN) of event source. This is optional for some triggers.
Example : "acs:oss:cn-shanghai:12345:mybucket"
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "sourceArn"))
@source_arn.setter
def source_arn(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "sourceArn", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosTriggerProps",
jsii_struct_bases=[],
name_mapping={
"function_name": "functionName",
"service_name": "serviceName",
"trigger_config": "triggerConfig",
"trigger_name": "triggerName",
"trigger_type": "triggerType",
"invocation_role": "invocationRole",
"qualifier": "qualifier",
"source_arn": "sourceArn",
},
)
class RosTriggerProps:
def __init__(
self,
*,
function_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
trigger_config: typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]],
trigger_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
trigger_type: typing.Union[builtins.str, ros_cdk_core.IResolvable],
invocation_role: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
qualifier: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
source_arn: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::Trigger``.
:param function_name:
:param service_name:
:param trigger_config:
:param trigger_name:
:param trigger_type:
:param invocation_role:
:param qualifier:
:param source_arn:
'''
self._values: typing.Dict[str, typing.Any] = {
"function_name": function_name,
"service_name": service_name,
"trigger_config": trigger_config,
"trigger_name": trigger_name,
"trigger_type": trigger_type,
}
if invocation_role is not None:
self._values["invocation_role"] = invocation_role
if qualifier is not None:
self._values["qualifier"] = qualifier
if source_arn is not None:
self._values["source_arn"] = source_arn
@builtins.property
def function_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: functionName: Function name.
'''
result = self._values.get("function_name")
assert result is not None, "Required property 'function_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Service name.
'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def trigger_config(
self,
) -> typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]:
'''
:Property: triggerConfig: Event source specific trigger configuration. The value is different according to trigger type.
'''
result = self._values.get("trigger_config")
assert result is not None, "Required property 'trigger_config' is missing"
return typing.cast(typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]], result)
@builtins.property
def trigger_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
triggerName: Trigger name.
Example : "image_resize"
'''
result = self._values.get("trigger_name")
assert result is not None, "Required property 'trigger_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def trigger_type(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property:
triggerType: Trigger type, e.g. oss, timer, logs. This determines how the trigger config is interpreted.
Example : "oss"
'''
result = self._values.get("trigger_type")
assert result is not None, "Required property 'trigger_type' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def invocation_role(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
invocationRole: The role grants event source the permission to run function on behalf of user. This is optional for some triggers.
Example : "acs:ram::1234567890:role/fc-test"
'''
result = self._values.get("invocation_role")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def qualifier(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
qualifier: service version or alias.
Example : "LATEST"
'''
result = self._values.get("qualifier")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def source_arn(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property:
sourceArn: The Aliyun Resource Name (ARN) of event source. This is optional for some triggers.
Example : "acs:oss:cn-shanghai:12345:mybucket"
'''
result = self._values.get("source_arn")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosTriggerProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class RosVersion(
ros_cdk_core.RosResource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.RosVersion",
):
'''A ROS template type: ``ALIYUN::FC::Version``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "RosVersionProps",
enable_resource_property_constraint: builtins.bool,
) -> None:
'''Create a new ``ALIYUN::FC::Version``.
:param scope: - scope in which this resource is defined.
:param id: - scoped id of the resource.
:param props: - resource properties.
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@jsii.member(jsii_name="renderProperties")
def _render_properties(
self,
props: typing.Mapping[builtins.str, typing.Any],
) -> typing.Mapping[builtins.str, typing.Any]:
'''
:param props: -
'''
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.invoke(self, "renderProperties", [props]))
@jsii.python.classproperty # type: ignore[misc]
@jsii.member(jsii_name="ROS_RESOURCE_TYPE_NAME")
def ROS_RESOURCE_TYPE_NAME(cls) -> builtins.str:
'''The resource type name for this resource class.'''
return typing.cast(builtins.str, jsii.sget(cls, "ROS_RESOURCE_TYPE_NAME"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceName")
def attr_service_name(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: ServiceName: The service name
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrVersionId")
def attr_version_id(self) -> ros_cdk_core.IResolvable:
'''
:Attribute: VersionId: The version ID
'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrVersionId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="rosProperties")
def _ros_properties(self) -> typing.Mapping[builtins.str, typing.Any]:
return typing.cast(typing.Mapping[builtins.str, typing.Any], jsii.get(self, "rosProperties"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="enableResourcePropertyConstraint")
def enable_resource_property_constraint(self) -> builtins.bool:
return typing.cast(builtins.bool, jsii.get(self, "enableResourcePropertyConstraint"))
@enable_resource_property_constraint.setter
def enable_resource_property_constraint(self, value: builtins.bool) -> None:
jsii.set(self, "enableResourcePropertyConstraint", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="serviceName")
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Service name
'''
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], jsii.get(self, "serviceName"))
@service_name.setter
def service_name(
self,
value: typing.Union[builtins.str, ros_cdk_core.IResolvable],
) -> None:
jsii.set(self, "serviceName", value)
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="description")
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: description: Version description
'''
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], jsii.get(self, "description"))
@description.setter
def description(
self,
value: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]],
) -> None:
jsii.set(self, "description", value)
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.RosVersionProps",
jsii_struct_bases=[],
name_mapping={"service_name": "serviceName", "description": "description"},
)
class RosVersionProps:
def __init__(
self,
*,
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::Version``.
:param service_name:
:param description:
'''
self._values: typing.Dict[str, typing.Any] = {
"service_name": service_name,
}
if description is not None:
self._values["description"] = description
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''
:Property: serviceName: Service name
'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''
:Property: description: Version description
'''
result = self._values.get("description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "RosVersionProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class Service(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.Service",
):
'''A ROS resource type: ``ALIYUN::FC::Service``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "ServiceProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::FC::Service``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrInternetAccess")
def attr_internet_access(self) -> ros_cdk_core.IResolvable:
'''Attribute InternetAccess: Whether enable Internet access.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrInternetAccess"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrLogProject")
def attr_log_project(self) -> ros_cdk_core.IResolvable:
'''Attribute LogProject: Log project of service.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrLogProject"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrLogstore")
def attr_logstore(self) -> ros_cdk_core.IResolvable:
'''Attribute Logstore: Log store of service.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrLogstore"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrRole")
def attr_role(self) -> ros_cdk_core.IResolvable:
'''Attribute Role: Role of service.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrRole"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceId")
def attr_service_id(self) -> ros_cdk_core.IResolvable:
'''Attribute ServiceId: The service ID.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceName")
def attr_service_name(self) -> ros_cdk_core.IResolvable:
'''Attribute ServiceName: The service name.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrTags")
def attr_tags(self) -> ros_cdk_core.IResolvable:
'''Attribute Tags: Tags of service.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrTags"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrVpcId")
def attr_vpc_id(self) -> ros_cdk_core.IResolvable:
'''Attribute VpcId: VPC ID.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrVpcId"))
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.ServiceProps",
jsii_struct_bases=[],
name_mapping={
"service_name": "serviceName",
"deletion_force": "deletionForce",
"description": "description",
"internet_access": "internetAccess",
"log_config": "logConfig",
"nas_config": "nasConfig",
"role": "role",
"tags": "tags",
"tracing_config": "tracingConfig",
"vpc_config": "vpcConfig",
},
)
class ServiceProps:
def __init__(
self,
*,
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
deletion_force: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]] = None,
description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
internet_access: typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]] = None,
log_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.LogConfigProperty]] = None,
nas_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.NasConfigProperty]] = None,
role: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
tags: typing.Optional[typing.Sequence[RosService.TagsProperty]] = None,
tracing_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.TracingConfigProperty]] = None,
vpc_config: typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.VpcConfigProperty]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::Service``.
:param service_name: Property serviceName: Service name.
:param deletion_force: Property deletionForce: Whether force delete the service without waiting for network interfaces to be cleaned up if VpcConfig is specified. Default value is false.
:param description: Property description: Service description.
:param internet_access: Property internetAccess: Set it to true to enable Internet access.
:param log_config: Property logConfig: Log configuration. Function Compute pushes function execution logs to the configured log store.
:param nas_config: Property nasConfig: NAS configuration. Function Compute uses a specified NAS configured on the service.
:param role: Property role: The role grants Function Compute the permission to access user’s cloud resources, such as pushing logs to user’s log store. The temporary STS token generated from this role can be retrieved from function context and used to access cloud resources.
:param tags: Property tags: Tags to attach to service. Max support 20 tags to add during create service. Each tag with two properties Key and Value, and Key is required.
:param tracing_config: Property tracingConfig: The Tracing Analysis configuration. After Function Compute integrates with Tracing Analysis, you can record the stay time of a request in Function Compute, view the cold start time for a function, and record the execution time of a function.
:param vpc_config: Property vpcConfig: VPC configuration. Function Compute uses the config to setup ENI in the specific VPC.
'''
self._values: typing.Dict[str, typing.Any] = {
"service_name": service_name,
}
if deletion_force is not None:
self._values["deletion_force"] = deletion_force
if description is not None:
self._values["description"] = description
if internet_access is not None:
self._values["internet_access"] = internet_access
if log_config is not None:
self._values["log_config"] = log_config
if nas_config is not None:
self._values["nas_config"] = nas_config
if role is not None:
self._values["role"] = role
if tags is not None:
self._values["tags"] = tags
if tracing_config is not None:
self._values["tracing_config"] = tracing_config
if vpc_config is not None:
self._values["vpc_config"] = vpc_config
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property serviceName: Service name.'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def deletion_force(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''Property deletionForce: Whether force delete the service without waiting for network interfaces to be cleaned up if VpcConfig is specified.
Default value is false.
'''
result = self._values.get("deletion_force")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
@builtins.property
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property description: Service description.'''
result = self._values.get("description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def internet_access(
self,
) -> typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]]:
'''Property internetAccess: Set it to true to enable Internet access.'''
result = self._values.get("internet_access")
return typing.cast(typing.Optional[typing.Union[builtins.bool, ros_cdk_core.IResolvable]], result)
@builtins.property
def log_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.LogConfigProperty]]:
'''Property logConfig: Log configuration.
Function Compute pushes function execution logs to the configured log store.
'''
result = self._values.get("log_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.LogConfigProperty]], result)
@builtins.property
def nas_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.NasConfigProperty]]:
'''Property nasConfig: NAS configuration.
Function Compute uses a specified NAS configured on the service.
'''
result = self._values.get("nas_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.NasConfigProperty]], result)
@builtins.property
def role(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property role: The role grants Function Compute the permission to access user’s cloud resources, such as pushing logs to user’s log store.
The temporary STS token generated from this role can be retrieved from function context and used to access cloud resources.
'''
result = self._values.get("role")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def tags(self) -> typing.Optional[typing.List[RosService.TagsProperty]]:
'''Property tags: Tags to attach to service.
Max support 20 tags to add during create service. Each tag with two properties Key and Value, and Key is required.
'''
result = self._values.get("tags")
return typing.cast(typing.Optional[typing.List[RosService.TagsProperty]], result)
@builtins.property
def tracing_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.TracingConfigProperty]]:
'''Property tracingConfig: The Tracing Analysis configuration.
After Function Compute integrates with Tracing Analysis, you can record the stay time of a request in Function Compute, view the cold start time for a function, and record the execution time of a function.
'''
result = self._values.get("tracing_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.TracingConfigProperty]], result)
@builtins.property
def vpc_config(
self,
) -> typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.VpcConfigProperty]]:
'''Property vpcConfig: VPC configuration.
Function Compute uses the config to setup ENI in the specific VPC.
'''
result = self._values.get("vpc_config")
return typing.cast(typing.Optional[typing.Union[ros_cdk_core.IResolvable, RosService.VpcConfigProperty]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "ServiceProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class Trigger(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.Trigger",
):
'''A ROS resource type: ``ALIYUN::FC::Trigger``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "TriggerProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::FC::Trigger``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrFunctionName")
def attr_function_name(self) -> ros_cdk_core.IResolvable:
'''Attribute FunctionName: Function name.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrFunctionName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceName")
def attr_service_name(self) -> ros_cdk_core.IResolvable:
'''Attribute ServiceName: Service name.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrTriggerId")
def attr_trigger_id(self) -> ros_cdk_core.IResolvable:
'''Attribute TriggerId: The trigger ID.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrTriggerId"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrTriggerName")
def attr_trigger_name(self) -> ros_cdk_core.IResolvable:
'''Attribute TriggerName: Trigger name.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrTriggerName"))
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.TriggerProps",
jsii_struct_bases=[],
name_mapping={
"function_name": "functionName",
"service_name": "serviceName",
"trigger_config": "triggerConfig",
"trigger_name": "triggerName",
"trigger_type": "triggerType",
"invocation_role": "invocationRole",
"qualifier": "qualifier",
"source_arn": "sourceArn",
},
)
class TriggerProps:
def __init__(
self,
*,
function_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
trigger_config: typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]],
trigger_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
trigger_type: typing.Union[builtins.str, ros_cdk_core.IResolvable],
invocation_role: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
qualifier: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
source_arn: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::Trigger``.
:param function_name: Property functionName: Function name.
:param service_name: Property serviceName: Service name.
:param trigger_config: Property triggerConfig: Event source specific trigger configuration. The value is different according to trigger type.
:param trigger_name: Property triggerName: Trigger name. Example : "image_resize"
:param trigger_type: Property triggerType: Trigger type, e.g. oss, timer, logs. This determines how the trigger config is interpreted. Example : "oss".
:param invocation_role: Property invocationRole: The role grants event source the permission to run function on behalf of user. This is optional for some triggers. Example : "acs:ram::1234567890:role/fc-test"
:param qualifier: Property qualifier: service version or alias. Example : "LATEST"
:param source_arn: Property sourceArn: The Aliyun Resource Name (ARN) of event source. This is optional for some triggers. Example : "acs:oss:cn-shanghai:12345:mybucket"
'''
self._values: typing.Dict[str, typing.Any] = {
"function_name": function_name,
"service_name": service_name,
"trigger_config": trigger_config,
"trigger_name": trigger_name,
"trigger_type": trigger_type,
}
if invocation_role is not None:
self._values["invocation_role"] = invocation_role
if qualifier is not None:
self._values["qualifier"] = qualifier
if source_arn is not None:
self._values["source_arn"] = source_arn
@builtins.property
def function_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property functionName: Function name.'''
result = self._values.get("function_name")
assert result is not None, "Required property 'function_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property serviceName: Service name.'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def trigger_config(
self,
) -> typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]]:
'''Property triggerConfig: Event source specific trigger configuration.
The value is different according to trigger type.
'''
result = self._values.get("trigger_config")
assert result is not None, "Required property 'trigger_config' is missing"
return typing.cast(typing.Union[ros_cdk_core.IResolvable, typing.Mapping[builtins.str, typing.Any]], result)
@builtins.property
def trigger_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property triggerName: Trigger name.
Example : "image_resize"
'''
result = self._values.get("trigger_name")
assert result is not None, "Required property 'trigger_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def trigger_type(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property triggerType: Trigger type, e.g. oss, timer, logs. This determines how the trigger config is interpreted. Example : "oss".'''
result = self._values.get("trigger_type")
assert result is not None, "Required property 'trigger_type' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def invocation_role(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property invocationRole: The role grants event source the permission to run function on behalf of user.
This is optional for some triggers.
Example : "acs:ram::1234567890:role/fc-test"
'''
result = self._values.get("invocation_role")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def qualifier(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property qualifier: service version or alias.
Example : "LATEST"
'''
result = self._values.get("qualifier")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
@builtins.property
def source_arn(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property sourceArn: The Aliyun Resource Name (ARN) of event source.
This is optional for some triggers.
Example : "acs:oss:cn-shanghai:12345:mybucket"
'''
result = self._values.get("source_arn")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "TriggerProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
class Version(
ros_cdk_core.Resource,
metaclass=jsii.JSIIMeta,
jsii_type="@alicloud/ros-cdk-fc.Version",
):
'''A ROS resource type: ``ALIYUN::FC::Version``.'''
def __init__(
self,
scope: ros_cdk_core.Construct,
id: builtins.str,
props: "VersionProps",
enable_resource_property_constraint: typing.Optional[builtins.bool] = None,
) -> None:
'''Create a new ``ALIYUN::FC::Version``.
Param scope - scope in which this resource is defined
Param id - scoped id of the resource
Param props - resource properties
:param scope: -
:param id: -
:param props: -
:param enable_resource_property_constraint: -
'''
jsii.create(self.__class__, self, [scope, id, props, enable_resource_property_constraint])
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrServiceName")
def attr_service_name(self) -> ros_cdk_core.IResolvable:
'''Attribute ServiceName: The service name.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrServiceName"))
@builtins.property # type: ignore[misc]
@jsii.member(jsii_name="attrVersionId")
def attr_version_id(self) -> ros_cdk_core.IResolvable:
'''Attribute VersionId: The version ID.'''
return typing.cast(ros_cdk_core.IResolvable, jsii.get(self, "attrVersionId"))
@jsii.data_type(
jsii_type="@alicloud/ros-cdk-fc.VersionProps",
jsii_struct_bases=[],
name_mapping={"service_name": "serviceName", "description": "description"},
)
class VersionProps:
def __init__(
self,
*,
service_name: typing.Union[builtins.str, ros_cdk_core.IResolvable],
description: typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]] = None,
) -> None:
'''Properties for defining a ``ALIYUN::FC::Version``.
:param service_name: Property serviceName: Service name.
:param description: Property description: Version description.
'''
self._values: typing.Dict[str, typing.Any] = {
"service_name": service_name,
}
if description is not None:
self._values["description"] = description
@builtins.property
def service_name(self) -> typing.Union[builtins.str, ros_cdk_core.IResolvable]:
'''Property serviceName: Service name.'''
result = self._values.get("service_name")
assert result is not None, "Required property 'service_name' is missing"
return typing.cast(typing.Union[builtins.str, ros_cdk_core.IResolvable], result)
@builtins.property
def description(
self,
) -> typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]]:
'''Property description: Version description.'''
result = self._values.get("description")
return typing.cast(typing.Optional[typing.Union[builtins.str, ros_cdk_core.IResolvable]], result)
def __eq__(self, rhs: typing.Any) -> builtins.bool:
return isinstance(rhs, self.__class__) and rhs._values == self._values
def __ne__(self, rhs: typing.Any) -> builtins.bool:
return not (rhs == self)
def __repr__(self) -> str:
return "VersionProps(%s)" % ", ".join(
k + "=" + repr(v) for k, v in self._values.items()
)
__all__ = [
"Alias",
"AliasProps",
"CustomDomain",
"CustomDomainProps",
"Function",
"FunctionInvoker",
"FunctionInvokerProps",
"FunctionProps",
"Layer",
"LayerProps",
"ProvisionConfig",
"ProvisionConfigProps",
"RosAlias",
"RosAliasProps",
"RosCustomDomain",
"RosCustomDomainProps",
"RosFunction",
"RosFunctionInvoker",
"RosFunctionInvokerProps",
"RosFunctionProps",
"RosLayer",
"RosLayerProps",
"RosProvisionConfig",
"RosProvisionConfigProps",
"RosService",
"RosServiceProps",
"RosTrigger",
"RosTriggerProps",
"RosVersion",
"RosVersionProps",
"Service",
"ServiceProps",
"Trigger",
"TriggerProps",
"Version",
"VersionProps",
]
publication.publish()
| en | 0.657899 | ## Aliyun ROS FC Construct Library This module is part of the AliCloud ROS Cloud Development Kit (ROS CDK) project. ```python # Example automatically generated from non-compiling source. May contain errors. import * as FC from '@alicloud/ros-cdk-fc'; ``` A ROS resource type: ``ALIYUN::FC::Alias``. Create a new ``ALIYUN::FC::Alias``. Param scope - scope in which this resource is defined Param id - scoped id of the resource Param props - resource properties :param scope: - :param id: - :param props: - :param enable_resource_property_constraint: - # type: ignore[misc] Attribute AliasName: The alias name. # type: ignore[misc] Attribute ServiceName: The service name. # type: ignore[misc] Attribute VersionId: The version ID. Properties for defining a ``ALIYUN::FC::Alias``. :param alias_name: Property aliasName: Alias name. :param service_name: Property serviceName: Service name. :param additional_version: Property additionalVersion: Additional version. :param additional_weight: Property additionalWeight: Traffic weight of additional version. From 0 to 100. :param description: Property description: Version description. :param version_id: Property versionId: Version ID. Property aliasName: Alias name. Property serviceName: Service name. Property additionalVersion: Additional version. Property additionalWeight: Traffic weight of additional version. From 0 to 100. Property description: Version description. Property versionId: Version ID. A ROS resource type: ``ALIYUN::FC::CustomDomain``. Create a new ``ALIYUN::FC::CustomDomain``. Param scope - scope in which this resource is defined Param id - scoped id of the resource Param props - resource properties :param scope: - :param id: - :param props: - :param enable_resource_property_constraint: - # type: ignore[misc] Attribute Domain: The domain with protocol. # type: ignore[misc] Attribute DomainName: The domain name. Properties for defining a ``ALIYUN::FC::CustomDomain``. :param domain_name: Property domainName: domain name. :param protocol: Property protocol: HTTP or HTTP,HTTPS. :param api_version: Property apiVersion: api version. :param cert_config: Property certConfig: certificate info. :param route_config: Property routeConfig: Routing table: path to function mappingwhen a function is called with a custom domain name. Property domainName: domain name. Property protocol: HTTP or HTTP,HTTPS. Property apiVersion: api version. Property certConfig: certificate info. Property routeConfig: Routing table: path to function mappingwhen a function is called with a custom domain name. A ROS resource type: ``ALIYUN::FC::Function``. Create a new ``ALIYUN::FC::Function``. Param scope - scope in which this resource is defined Param id - scoped id of the resource Param props - resource properties :param scope: - :param id: - :param props: - :param enable_resource_property_constraint: - # type: ignore[misc] Attribute ARN: The ARN for ALIYUN::ROS::CustomResource. # type: ignore[misc] Attribute FunctionId: The function ID. # type: ignore[misc] Attribute FunctionName: The function name. # type: ignore[misc] Attribute ServiceId: The service ID. # type: ignore[misc] Attribute ServiceName: The service name. A ROS resource type: ``ALIYUN::FC::FunctionInvoker``. Create a new ``ALIYUN::FC::FunctionInvoker``. Param scope - scope in which this resource is defined Param id - scoped id of the resource Param props - resource properties :param scope: - :param id: - :param props: - :param enable_resource_property_constraint: - # type: ignore[misc] Attribute Result: Depends on result type: NoResult: Async invoke has no result. Success: The response of the function. The response should be utf-8 encoded string, otherwise ROS will report an error. If the response is binary, encode it via base64 before it is returned. Failure: Error Message. # type: ignore[misc] Attribute ResultType: Result type: NoResult: Async invoke has no result. Success: Sync invoke succeeds. Failure: Sync invoke fails. Properties for defining a ``ALIYUN::FC::FunctionInvoker``. :param function_name: Property functionName: Function name. :param service_name: Property serviceName: Service name. :param async_: Property async: Invocation type, Sync or Async. Defaults to Sync. :param check_error: Property checkError: Whether check error for function invocation result. If set true and function invocation result has error, the resource creation will be regard as failed. Default is false :param event: Property event: This value is passed to function as utf-8 encoded string.It’s function’s responsibility to interpret the value. If the value needs to be binary, encode it via base64 before passing to this property. :param execute_version: Property executeVersion: If the property is not specified for creation and update, the function will not be invoked. The change of the property leads to the invoke of the function. :param qualifier: Property qualifier: service version, can be versionId or aliasName. :param service_region_id: Property serviceRegionId: Which region service belongs to. Property functionName: Function name. Property serviceName: Service name. Property async: Invocation type, Sync or Async. Defaults to Sync. Property checkError: Whether check error for function invocation result. If set true and function invocation result has error, the resource creation will be regard as failed. Default is false Property event: This value is passed to function as utf-8 encoded string.It’s function’s responsibility to interpret the value. If the value needs to be binary, encode it via base64 before passing to this property. Property executeVersion: If the property is not specified for creation and update, the function will not be invoked. The change of the property leads to the invoke of the function. Property qualifier: service version, can be versionId or aliasName. Property serviceRegionId: Which region service belongs to. Properties for defining a ``ALIYUN::FC::Function``. :param function_name: Property functionName: Function name. :param handler: Property handler: The function execution entry point. :param runtime: Property runtime: The function runtime environment. Supporting nodejs6, nodejs8, nodejs10, nodejs12, python2.7, python3, java8, custom, custom-container and so on :param service_name: Property serviceName: Service name. :param async_configuration: Property asyncConfiguration: Configuration of asynchronous function calls. :param ca_port: Property caPort: Custom runtime and custom container runtime dedicated fields, which represent the port that the started custom http server listens to. The default value is 9000 :param code: Property code: The code that contains the function implementation. :param custom_container_config: Property customContainerConfig: Custom container runtime related configuration. After configuration, the function can be replaced with a custom container to execute the function :param description: Property description: Function description. :param environment_variables: Property environmentVariables: The environment variable set for the function, you can get the value of the environment variable in the function. :param initialization_timeout: Property initializationTimeout: the max execution time of the initializer, in second. :param initializer: Property initializer: the entry point of the initializer. :param instance_concurrency: Property instanceConcurrency: Function instance concurrency. Value can be between 1 to 100. :param instance_type: Property instanceType: Instance type. Value:e1: flexible instance. Memory size between 128 and 3072c1: performance instance. Memory size allow values are 4096, 8192, 16384 and 32768 :param memory_size: Property memorySize: The amount of memory that’s used to run function, in MB. Function Compute uses this value to allocate CPU resources proportionally. Defaults to 128 MB. It can be multiple of 64 MB and between 128 MB and 3072 MB. :param timeout: Property timeout: The maximum time duration a function can run, in seconds. After which Function Compute terminates the execution. Defaults to 3 seconds, and can be between 1 to 600 seconds. Property functionName: Function name. Property handler: The function execution entry point. Property runtime: The function runtime environment. Supporting nodejs6, nodejs8, nodejs10, nodejs12, python2.7, python3, java8, custom, custom-container and so on Property serviceName: Service name. Property asyncConfiguration: Configuration of asynchronous function calls. Property caPort: Custom runtime and custom container runtime dedicated fields, which represent the port that the started custom http server listens to. The default value is 9000 Property code: The code that contains the function implementation. Property customContainerConfig: Custom container runtime related configuration. After configuration, the function can be replaced with a custom container to execute the function Property description: Function description. Property environmentVariables: The environment variable set for the function, you can get the value of the environment variable in the function. Property initializationTimeout: the max execution time of the initializer, in second. Property initializer: the entry point of the initializer. Property instanceConcurrency: Function instance concurrency. Value can be between 1 to 100. Property instanceType: Instance type. Value:e1: flexible instance. Memory size between 128 and 3072c1: performance instance. Memory size allow values are 4096, 8192, 16384 and 32768 Property memorySize: The amount of memory that’s used to run function, in MB. Function Compute uses this value to allocate CPU resources proportionally. Defaults to 128 MB. It can be multiple of 64 MB and between 128 MB and 3072 MB. Property timeout: The maximum time duration a function can run, in seconds. After which Function Compute terminates the execution. Defaults to 3 seconds, and can be between 1 to 600 seconds. A ROS resource type: ``ALIYUN::FC::Layer``. Create a new ``ALIYUN::FC::Layer``. Param scope - scope in which this resource is defined Param id - scoped id of the resource Param props - resource properties :param scope: - :param id: - :param props: - :param enable_resource_property_constraint: - # type: ignore[misc] Attribute Arn: The name of the layer resource. # type: ignore[misc] Attribute LayerName: The name of layer. # type: ignore[misc] Attribute Version: The version of the layer resource. Properties for defining a ``ALIYUN::FC::Layer``. :param code: Property code: The code of layer. :param compatible_runtime: Property compatibleRuntime: The runtime environment supported by the layer. For example:nodejs12, nodejs10, nodejs8, nodejs6, python3, and python2.7 :param layer_name: Property layerName: The name of layer. :param description: Property description: The description of the layer. Property code: The code of layer. Property compatibleRuntime: The runtime environment supported by the layer. For example:nodejs12, nodejs10, nodejs8, nodejs6, python3, and python2.7 Property layerName: The name of layer. Property description: The description of the layer. A ROS resource type: ``ALIYUN::FC::ProvisionConfig``. Create a new ``ALIYUN::FC::ProvisionConfig``. Param scope - scope in which this resource is defined Param id - scoped id of the resource Param props - resource properties :param scope: - :param id: - :param props: - :param enable_resource_property_constraint: - # type: ignore[misc] Attribute FunctionName: The function name. # type: ignore[misc] Attribute Qualifier: The service alias. # type: ignore[misc] Attribute Resource: The resource. # type: ignore[misc] Attribute ServiceName: The service name. # type: ignore[misc] Attribute Target: Number of provision. Properties for defining a ``ALIYUN::FC::ProvisionConfig``. :param function_name: Property functionName: Function name. :param qualifier: Property qualifier: Service's alias. Example : "LATEST" :param service_name: Property serviceName: Service name. :param target: Property target: Number of provision. Property functionName: Function name. Property qualifier: Service's alias. Example : "LATEST" Property serviceName: Service name. Property target: Number of provision. A ROS template type: ``ALIYUN::FC::Alias``. Create a new ``ALIYUN::FC::Alias``. :param scope: - scope in which this resource is defined. :param id: - scoped id of the resource. :param props: - resource properties. :param enable_resource_property_constraint: - :param props: - # type: ignore[misc] The resource type name for this resource class. # type: ignore[misc] :Attribute: AliasName: The alias name # type: ignore[misc] :Attribute: ServiceName: The service name # type: ignore[misc] :Attribute: VersionId: The version ID # type: ignore[misc] # type: ignore[misc] :Property: aliasName: Alias name # type: ignore[misc] # type: ignore[misc] :Property: serviceName: Service name # type: ignore[misc] :Property: additionalVersion: Additional version # type: ignore[misc] :Property: additionalWeight: Traffic weight of additional version. From 0 to 100. # type: ignore[misc] :Property: description: Version description # type: ignore[misc] :Property: versionId: Version ID Properties for defining a ``ALIYUN::FC::Alias``. :param alias_name: :param service_name: :param additional_version: :param additional_weight: :param description: :param version_id: :Property: aliasName: Alias name :Property: serviceName: Service name :Property: additionalVersion: Additional version :Property: additionalWeight: Traffic weight of additional version. From 0 to 100. :Property: description: Version description :Property: versionId: Version ID A ROS template type: ``ALIYUN::FC::CustomDomain``. Create a new ``ALIYUN::FC::CustomDomain``. :param scope: - scope in which this resource is defined. :param id: - scoped id of the resource. :param props: - resource properties. :param enable_resource_property_constraint: - :param props: - # type: ignore[misc] The resource type name for this resource class. # type: ignore[misc] :Attribute: Domain: The domain with protocol. # type: ignore[misc] :Attribute: DomainName: The domain name # type: ignore[misc] # type: ignore[misc] :Property: domainName: domain name # type: ignore[misc] # type: ignore[misc] :Property: protocol: HTTP or HTTP,HTTPS # type: ignore[misc] :Property: apiVersion: api version # type: ignore[misc] :Property: certConfig: certificate info # type: ignore[misc] :Property: routeConfig: Routing table: path to function mappingwhen a function is called with a custom domain name :param certificate: :param cert_name: :param private_key: :Property: certificate: certificate :Property: certName: custom certificate name :Property: privateKey: private key :param routes: :Property: routes: PathConfig Array :param function_name: :param path: :param service_name: :param qualifier: :Property: functionName: Path to the function, for example: "login" :Property: path: HTTP request path when a function is called with a custom domain name, for example: "/login/*" :Property: serviceName: Path to the service, for example: "blogService" :Property: qualifier: Service version or alias Properties for defining a ``ALIYUN::FC::CustomDomain``. :param domain_name: :param protocol: :param api_version: :param cert_config: :param route_config: :Property: domainName: domain name :Property: protocol: HTTP or HTTP,HTTPS :Property: apiVersion: api version :Property: certConfig: certificate info :Property: routeConfig: Routing table: path to function mappingwhen a function is called with a custom domain name A ROS template type: ``ALIYUN::FC::Function``. Create a new ``ALIYUN::FC::Function``. :param scope: - scope in which this resource is defined. :param id: - scoped id of the resource. :param props: - resource properties. :param enable_resource_property_constraint: - :param props: - # type: ignore[misc] The resource type name for this resource class. # type: ignore[misc] :Attribute: ARN: The ARN for ALIYUN::ROS::CustomResource # type: ignore[misc] :Attribute: FunctionId: The function ID # type: ignore[misc] :Attribute: FunctionName: The function name # type: ignore[misc] :Attribute: ServiceId: The service ID # type: ignore[misc] :Attribute: ServiceName: The service name # type: ignore[misc] # type: ignore[misc] # type: ignore[misc] :Property: functionName: Function name # type: ignore[misc] :Property: handler: The function execution entry point. # type: ignore[misc] :Property: runtime: The function runtime environment. Supporting nodejs6, nodejs8, nodejs10, nodejs12, python2.7, python3, java8, custom, custom-container and so on # type: ignore[misc] :Property: serviceName: Service name # type: ignore[misc] :Property: asyncConfiguration: Configuration of asynchronous function calls # type: ignore[misc] :Property: caPort: Custom runtime and custom container runtime dedicated fields, which represent the port that the started custom http server listens to. The default value is 9000 # type: ignore[misc] :Property: code: The code that contains the function implementation. # type: ignore[misc] :Property: customContainerConfig: Custom container runtime related configuration. After configuration, the function can be replaced with a custom container to execute the function # type: ignore[misc] :Property: description: Function description # type: ignore[misc] :Property: environmentVariables: The environment variable set for the function, you can get the value of the environment variable in the function. # type: ignore[misc] :Property: initializationTimeout: the max execution time of the initializer, in second # type: ignore[misc] :Property: initializer: the entry point of the initializer # type: ignore[misc] :Property: instanceConcurrency: Function instance concurrency. Value can be between 1 to 100. # type: ignore[misc] :Property: instanceType: Instance type. Value:e1: flexible instance. Memory size between 128 and 3072c1: performance instance. Memory size allow values are 4096, 8192, 16384 and 32768 # type: ignore[misc] :Property: memorySize: The amount of memory that’s used to run function, in MB. Function Compute uses this value to allocate CPU resources proportionally. Defaults to 128 MB. It can be multiple of 64 MB and between 128 MB and 3072 MB. # type: ignore[misc] :Property: timeout: The maximum time duration a function can run, in seconds. After which Function Compute terminates the execution. Defaults to 3 seconds, and can be between 1 to 600 seconds. :param destination: :param max_async_event_age_in_seconds: :param max_async_retry_attempts: :param stateful_invocation: :Property: destination: Set destination of asynchronous function calls :Property: maxAsyncEventAgeInSeconds: Configure the maximum lifetime of messages. The duration is calculated from the time the asynchronous call is triggered, and ends when the message is dequeued for processing. If this period of time is longer than the setting value of MaxAsyncEventAgeInSeconds, the message will be discarded. The unconsumed messages will be counted in the cloud monitoring AsyncEventExpiredDropped indicator. :Property: maxAsyncRetryAttempts: Configure the number of retries :Property: statefulInvocation: Whether enable stateful invocation :param oss_bucket_name: :param oss_object_name: :param source_code: :param zip_file: :Property: ossBucketName: OSS bucket name. Priority: ZipFile > SourceCode > OssBucketName&OssObjectName. :Property: ossObjectName: OSS object name. Priority: ZipFile > SourceCode > OssBucketName&OssObjectName. :Property: sourceCode: (Node.js, PHP and Python) The source code for your FC function. If you include this parameter in a function source inline, ROS places it in a file called index (utf-8 encoded) and then compresses it to create a deployment package. For the Handler property, the first part of the handler identifier must be index. For example: index.handler. Your source code can contain up to 4096 characters. For JSON, you must use backslashes to escape quotes and special characters, such as line breaks. Priority: ZipFile > SourceCode > OssBucketName&OssObjectName. :Property: zipFile: Base64 encoded zip file content. Priority: ZipFile > SourceCode > OssBucketName&OssObjectName. :param image: :param acceleration_type: :param args: :param command: :Property: image: Container image address. For example: registry-vpc.cn-hangzhou.aliyuncs.com/fc-demo/helloworld:v1beta1 :Property: accelerationType: Whether to enable image acceleration. Valid Values: Default: Indicates that image acceleration is enabled. None: Indicates that image acceleration is disabled. :Property: args: Container startup parameters. For example: ["-arg1", "value1"] :Property: command: Container start command. For example: ["/code/myserver"] :param on_failure: :param on_success: :Property: onFailure: When the function is invoked failed (system error or function internal error), FC will call the target corresponding to the configuration :Property: onSuccess: When the function is invoked successfully, FC will call the target corresponding to the configuration A ROS template type: ``ALIYUN::FC::FunctionInvoker``. Create a new ``ALIYUN::FC::FunctionInvoker``. :param scope: - scope in which this resource is defined. :param id: - scoped id of the resource. :param props: - resource properties. :param enable_resource_property_constraint: - :param props: - # type: ignore[misc] The resource type name for this resource class. # type: ignore[misc] :Attribute: Result: Depends on result type: NoResult: Async invoke has no result. Success: The response of the function. The response should be utf-8 encoded string, otherwise ROS will report an error. If the response is binary, encode it via base64 before it is returned. Failure: Error Message. # type: ignore[misc] :Attribute: ResultType: Result type: NoResult: Async invoke has no result. Success: Sync invoke succeeds. Failure: Sync invoke fails. # type: ignore[misc] # type: ignore[misc] # type: ignore[misc] :Property: functionName: Function name # type: ignore[misc] :Property: serviceName: Service name # type: ignore[misc] :Property: async: Invocation type, Sync or Async. Defaults to Sync. # type: ignore[misc] :Property: checkError: Whether check error for function invocation result. If set true and function invocation result has error, the resource creation will be regard as failed. Default is false # type: ignore[misc] :Property: event: This value is passed to function as utf-8 encoded string.It’s function’s responsibility to interpret the value. If the value needs to be binary, encode it via base64 before passing to this property. # type: ignore[misc] :Property: executeVersion: If the property is not specified for creation and update, the function will not be invoked. The change of the property leads to the invoke of the function. # type: ignore[misc] :Property: qualifier: service version, can be versionId or aliasName # type: ignore[misc] :Property: serviceRegionId: Which region service belongs to. Properties for defining a ``ALIYUN::FC::FunctionInvoker``. :param function_name: :param service_name: :param async_: :param check_error: :param event: :param execute_version: :param qualifier: :param service_region_id: :Property: functionName: Function name :Property: serviceName: Service name :Property: async: Invocation type, Sync or Async. Defaults to Sync. :Property: checkError: Whether check error for function invocation result. If set true and function invocation result has error, the resource creation will be regard as failed. Default is false :Property: event: This value is passed to function as utf-8 encoded string.It’s function’s responsibility to interpret the value. If the value needs to be binary, encode it via base64 before passing to this property. :Property: executeVersion: If the property is not specified for creation and update, the function will not be invoked. The change of the property leads to the invoke of the function. :Property: qualifier: service version, can be versionId or aliasName :Property: serviceRegionId: Which region service belongs to. Properties for defining a ``ALIYUN::FC::Function``. :param function_name: :param handler: :param runtime: :param service_name: :param async_configuration: :param ca_port: :param code: :param custom_container_config: :param description: :param environment_variables: :param initialization_timeout: :param initializer: :param instance_concurrency: :param instance_type: :param memory_size: :param timeout: :Property: functionName: Function name :Property: handler: The function execution entry point. :Property: runtime: The function runtime environment. Supporting nodejs6, nodejs8, nodejs10, nodejs12, python2.7, python3, java8, custom, custom-container and so on :Property: serviceName: Service name :Property: asyncConfiguration: Configuration of asynchronous function calls :Property: caPort: Custom runtime and custom container runtime dedicated fields, which represent the port that the started custom http server listens to. The default value is 9000 :Property: code: The code that contains the function implementation. :Property: customContainerConfig: Custom container runtime related configuration. After configuration, the function can be replaced with a custom container to execute the function :Property: description: Function description :Property: environmentVariables: The environment variable set for the function, you can get the value of the environment variable in the function. :Property: initializationTimeout: the max execution time of the initializer, in second :Property: initializer: the entry point of the initializer :Property: instanceConcurrency: Function instance concurrency. Value can be between 1 to 100. :Property: instanceType: Instance type. Value:e1: flexible instance. Memory size between 128 and 3072c1: performance instance. Memory size allow values are 4096, 8192, 16384 and 32768 :Property: memorySize: The amount of memory that’s used to run function, in MB. Function Compute uses this value to allocate CPU resources proportionally. Defaults to 128 MB. It can be multiple of 64 MB and between 128 MB and 3072 MB. :Property: timeout: The maximum time duration a function can run, in seconds. After which Function Compute terminates the execution. Defaults to 3 seconds, and can be between 1 to 600 seconds. A ROS template type: ``ALIYUN::FC::Layer``. Create a new ``ALIYUN::FC::Layer``. :param scope: - scope in which this resource is defined. :param id: - scoped id of the resource. :param props: - resource properties. :param enable_resource_property_constraint: - :param props: - # type: ignore[misc] The resource type name for this resource class. # type: ignore[misc] :Attribute: Arn: The name of the layer resource. # type: ignore[misc] :Attribute: LayerName: The name of layer # type: ignore[misc] :Attribute: Version: The version of the layer resource. # type: ignore[misc] # type: ignore[misc] :Property: code: The code of layer. # type: ignore[misc] :Property: compatibleRuntime: The runtime environment supported by the layer. For example:nodejs12, nodejs10, nodejs8, nodejs6, python3, and python2.7 # type: ignore[misc] # type: ignore[misc] :Property: layerName: The name of layer # type: ignore[misc] :Property: description: The description of the layer. :param oss_bucket_name: :param oss_object_name: :param zip_file: :Property: ossBucketName: The name of the Object Storage Service (OSS) bucket that stores the ZIP package of the function code. Priority: ZipFile > OssBucketName&OssObjectName. :Property: ossObjectName: The name of the OSS object that stores the ZIP package of the function code. Priority: ZipFile > OssBucketName&OssObjectName. :Property: zipFile: The function code that is encoded in Base64. Priority: ZipFile > OssBucketName&OssObjectName. Properties for defining a ``ALIYUN::FC::Layer``. :param code: :param compatible_runtime: :param layer_name: :param description: :Property: code: The code of layer. :Property: compatibleRuntime: The runtime environment supported by the layer. For example:nodejs12, nodejs10, nodejs8, nodejs6, python3, and python2.7 :Property: layerName: The name of layer :Property: description: The description of the layer. A ROS template type: ``ALIYUN::FC::ProvisionConfig``. Create a new ``ALIYUN::FC::ProvisionConfig``. :param scope: - scope in which this resource is defined. :param id: - scoped id of the resource. :param props: - resource properties. :param enable_resource_property_constraint: - :param props: - # type: ignore[misc] The resource type name for this resource class. # type: ignore[misc] :Attribute: FunctionName: The function name # type: ignore[misc] :Attribute: Qualifier: The service alias # type: ignore[misc] :Attribute: Resource: The resource # type: ignore[misc] :Attribute: ServiceName: The service name # type: ignore[misc] :Attribute: Target: Number of provision # type: ignore[misc] # type: ignore[misc] # type: ignore[misc] :Property: functionName: Function name # type: ignore[misc] :Property: qualifier: Service's alias. Example : "LATEST" # type: ignore[misc] :Property: serviceName: Service name # type: ignore[misc] :Property: target: Number of provision Properties for defining a ``ALIYUN::FC::ProvisionConfig``. :param function_name: :param qualifier: :param service_name: :param target: :Property: functionName: Function name :Property: qualifier: Service's alias. Example : "LATEST" :Property: serviceName: Service name :Property: target: Number of provision A ROS template type: ``ALIYUN::FC::Service``. Create a new ``ALIYUN::FC::Service``. :param scope: - scope in which this resource is defined. :param id: - scoped id of the resource. :param props: - resource properties. :param enable_resource_property_constraint: - :param props: - # type: ignore[misc] The resource type name for this resource class. # type: ignore[misc] :Attribute: InternetAccess: Whether enable Internet access # type: ignore[misc] :Attribute: LogProject: Log project of service # type: ignore[misc] :Attribute: Logstore: Log store of service # type: ignore[misc] :Attribute: Role: Role of service # type: ignore[misc] :Attribute: ServiceId: The service ID # type: ignore[misc] :Attribute: ServiceName: The service name # type: ignore[misc] :Attribute: Tags: Tags of service # type: ignore[misc] :Attribute: VpcId: VPC ID # type: ignore[misc] # type: ignore[misc] # type: ignore[misc] :Property: serviceName: Service name # type: ignore[misc] :Property: deletionForce: Whether force delete the service without waiting for network interfaces to be cleaned up if VpcConfig is specified. Default value is false. # type: ignore[misc] :Property: description: Service description # type: ignore[misc] :Property: internetAccess: Set it to true to enable Internet access. # type: ignore[misc] :Property: logConfig: Log configuration. Function Compute pushes function execution logs to the configured log store. # type: ignore[misc] :Property: nasConfig: NAS configuration. Function Compute uses a specified NAS configured on the service. # type: ignore[misc] :Property: role: The role grants Function Compute the permission to access user’s cloud resources, such as pushing logs to user’s log store. The temporary STS token generated from this role can be retrieved from function context and used to access cloud resources. # type: ignore[misc] :Property: tags: Tags to attach to service. Max support 20 tags to add during create service. Each tag with two properties Key and Value, and Key is required. # type: ignore[misc] :Property: tracingConfig: The Tracing Analysis configuration. After Function Compute integrates with Tracing Analysis, you can record the stay time of a request in Function Compute, view the cold start time for a function, and record the execution time of a function. # type: ignore[misc] :Property: vpcConfig: VPC configuration. Function Compute uses the config to setup ENI in the specific VPC. :param enable_request_metrics: :param logstore: :param project: :Property: enableRequestMetrics: Whether enable request metrics. :Property: logstore: The log store name of Logs service :Property: project: The project name of Logs service :param mount_dir: :param server_addr: :Property: mountDir: A local mount point. :Property: serverAddr: The address of NAS instance. :param group_id: :param mount_points: :param user_id: :Property: groupId: Group ID :Property: mountPoints: Mount points :Property: userId: User ID :param key: :param value: :Property: key: undefined :Property: value: undefined :param params: :param type: :Property: params: The tracing analysis parameters. :Property: type: The type of the tracing analysis system. :param security_group_id: :param vpc_id: :param v_switch_ids: :Property: securityGroupId: Security group ID :Property: vpcId: VPC ID :Property: vSwitchIds: List of VSwitch IDs Properties for defining a ``ALIYUN::FC::Service``. :param service_name: :param deletion_force: :param description: :param internet_access: :param log_config: :param nas_config: :param role: :param tags: :param tracing_config: :param vpc_config: :Property: serviceName: Service name :Property: deletionForce: Whether force delete the service without waiting for network interfaces to be cleaned up if VpcConfig is specified. Default value is false. :Property: description: Service description :Property: internetAccess: Set it to true to enable Internet access. :Property: logConfig: Log configuration. Function Compute pushes function execution logs to the configured log store. :Property: nasConfig: NAS configuration. Function Compute uses a specified NAS configured on the service. :Property: role: The role grants Function Compute the permission to access user’s cloud resources, such as pushing logs to user’s log store. The temporary STS token generated from this role can be retrieved from function context and used to access cloud resources. :Property: tags: Tags to attach to service. Max support 20 tags to add during create service. Each tag with two properties Key and Value, and Key is required. :Property: tracingConfig: The Tracing Analysis configuration. After Function Compute integrates with Tracing Analysis, you can record the stay time of a request in Function Compute, view the cold start time for a function, and record the execution time of a function. :Property: vpcConfig: VPC configuration. Function Compute uses the config to setup ENI in the specific VPC. A ROS template type: ``ALIYUN::FC::Trigger``. Create a new ``ALIYUN::FC::Trigger``. :param scope: - scope in which this resource is defined. :param id: - scoped id of the resource. :param props: - resource properties. :param enable_resource_property_constraint: - :param props: - # type: ignore[misc] The resource type name for this resource class. # type: ignore[misc] :Attribute: FunctionName: Function name. # type: ignore[misc] :Attribute: ServiceName: Service name. # type: ignore[misc] :Attribute: TriggerId: The trigger ID. # type: ignore[misc] :Attribute: TriggerName: Trigger name. # type: ignore[misc] # type: ignore[misc] # type: ignore[misc] :Property: functionName: Function name. # type: ignore[misc] :Property: serviceName: Service name. # type: ignore[misc] :Property: triggerConfig: Event source specific trigger configuration. The value is different according to trigger type. # type: ignore[misc] :Property: triggerName: Trigger name. Example : "image_resize" # type: ignore[misc] :Property: triggerType: Trigger type, e.g. oss, timer, logs. This determines how the trigger config is interpreted. Example : "oss" # type: ignore[misc] :Property: invocationRole: The role grants event source the permission to run function on behalf of user. This is optional for some triggers. Example : "acs:ram::1234567890:role/fc-test" # type: ignore[misc] :Property: qualifier: service version or alias. Example : "LATEST" # type: ignore[misc] :Property: sourceArn: The Aliyun Resource Name (ARN) of event source. This is optional for some triggers. Example : "acs:oss:cn-shanghai:12345:mybucket" Properties for defining a ``ALIYUN::FC::Trigger``. :param function_name: :param service_name: :param trigger_config: :param trigger_name: :param trigger_type: :param invocation_role: :param qualifier: :param source_arn: :Property: functionName: Function name. :Property: serviceName: Service name. :Property: triggerConfig: Event source specific trigger configuration. The value is different according to trigger type. :Property: triggerName: Trigger name. Example : "image_resize" :Property: triggerType: Trigger type, e.g. oss, timer, logs. This determines how the trigger config is interpreted. Example : "oss" :Property: invocationRole: The role grants event source the permission to run function on behalf of user. This is optional for some triggers. Example : "acs:ram::1234567890:role/fc-test" :Property: qualifier: service version or alias. Example : "LATEST" :Property: sourceArn: The Aliyun Resource Name (ARN) of event source. This is optional for some triggers. Example : "acs:oss:cn-shanghai:12345:mybucket" A ROS template type: ``ALIYUN::FC::Version``. Create a new ``ALIYUN::FC::Version``. :param scope: - scope in which this resource is defined. :param id: - scoped id of the resource. :param props: - resource properties. :param enable_resource_property_constraint: - :param props: - # type: ignore[misc] The resource type name for this resource class. # type: ignore[misc] :Attribute: ServiceName: The service name # type: ignore[misc] :Attribute: VersionId: The version ID # type: ignore[misc] # type: ignore[misc] # type: ignore[misc] :Property: serviceName: Service name # type: ignore[misc] :Property: description: Version description Properties for defining a ``ALIYUN::FC::Version``. :param service_name: :param description: :Property: serviceName: Service name :Property: description: Version description A ROS resource type: ``ALIYUN::FC::Service``. Create a new ``ALIYUN::FC::Service``. Param scope - scope in which this resource is defined Param id - scoped id of the resource Param props - resource properties :param scope: - :param id: - :param props: - :param enable_resource_property_constraint: - # type: ignore[misc] Attribute InternetAccess: Whether enable Internet access. # type: ignore[misc] Attribute LogProject: Log project of service. # type: ignore[misc] Attribute Logstore: Log store of service. # type: ignore[misc] Attribute Role: Role of service. # type: ignore[misc] Attribute ServiceId: The service ID. # type: ignore[misc] Attribute ServiceName: The service name. # type: ignore[misc] Attribute Tags: Tags of service. # type: ignore[misc] Attribute VpcId: VPC ID. Properties for defining a ``ALIYUN::FC::Service``. :param service_name: Property serviceName: Service name. :param deletion_force: Property deletionForce: Whether force delete the service without waiting for network interfaces to be cleaned up if VpcConfig is specified. Default value is false. :param description: Property description: Service description. :param internet_access: Property internetAccess: Set it to true to enable Internet access. :param log_config: Property logConfig: Log configuration. Function Compute pushes function execution logs to the configured log store. :param nas_config: Property nasConfig: NAS configuration. Function Compute uses a specified NAS configured on the service. :param role: Property role: The role grants Function Compute the permission to access user’s cloud resources, such as pushing logs to user’s log store. The temporary STS token generated from this role can be retrieved from function context and used to access cloud resources. :param tags: Property tags: Tags to attach to service. Max support 20 tags to add during create service. Each tag with two properties Key and Value, and Key is required. :param tracing_config: Property tracingConfig: The Tracing Analysis configuration. After Function Compute integrates with Tracing Analysis, you can record the stay time of a request in Function Compute, view the cold start time for a function, and record the execution time of a function. :param vpc_config: Property vpcConfig: VPC configuration. Function Compute uses the config to setup ENI in the specific VPC. Property serviceName: Service name. Property deletionForce: Whether force delete the service without waiting for network interfaces to be cleaned up if VpcConfig is specified. Default value is false. Property description: Service description. Property internetAccess: Set it to true to enable Internet access. Property logConfig: Log configuration. Function Compute pushes function execution logs to the configured log store. Property nasConfig: NAS configuration. Function Compute uses a specified NAS configured on the service. Property role: The role grants Function Compute the permission to access user’s cloud resources, such as pushing logs to user’s log store. The temporary STS token generated from this role can be retrieved from function context and used to access cloud resources. Property tags: Tags to attach to service. Max support 20 tags to add during create service. Each tag with two properties Key and Value, and Key is required. Property tracingConfig: The Tracing Analysis configuration. After Function Compute integrates with Tracing Analysis, you can record the stay time of a request in Function Compute, view the cold start time for a function, and record the execution time of a function. Property vpcConfig: VPC configuration. Function Compute uses the config to setup ENI in the specific VPC. A ROS resource type: ``ALIYUN::FC::Trigger``. Create a new ``ALIYUN::FC::Trigger``. Param scope - scope in which this resource is defined Param id - scoped id of the resource Param props - resource properties :param scope: - :param id: - :param props: - :param enable_resource_property_constraint: - # type: ignore[misc] Attribute FunctionName: Function name. # type: ignore[misc] Attribute ServiceName: Service name. # type: ignore[misc] Attribute TriggerId: The trigger ID. # type: ignore[misc] Attribute TriggerName: Trigger name. Properties for defining a ``ALIYUN::FC::Trigger``. :param function_name: Property functionName: Function name. :param service_name: Property serviceName: Service name. :param trigger_config: Property triggerConfig: Event source specific trigger configuration. The value is different according to trigger type. :param trigger_name: Property triggerName: Trigger name. Example : "image_resize" :param trigger_type: Property triggerType: Trigger type, e.g. oss, timer, logs. This determines how the trigger config is interpreted. Example : "oss". :param invocation_role: Property invocationRole: The role grants event source the permission to run function on behalf of user. This is optional for some triggers. Example : "acs:ram::1234567890:role/fc-test" :param qualifier: Property qualifier: service version or alias. Example : "LATEST" :param source_arn: Property sourceArn: The Aliyun Resource Name (ARN) of event source. This is optional for some triggers. Example : "acs:oss:cn-shanghai:12345:mybucket" Property functionName: Function name. Property serviceName: Service name. Property triggerConfig: Event source specific trigger configuration. The value is different according to trigger type. Property triggerName: Trigger name. Example : "image_resize" Property triggerType: Trigger type, e.g. oss, timer, logs. This determines how the trigger config is interpreted. Example : "oss". Property invocationRole: The role grants event source the permission to run function on behalf of user. This is optional for some triggers. Example : "acs:ram::1234567890:role/fc-test" Property qualifier: service version or alias. Example : "LATEST" Property sourceArn: The Aliyun Resource Name (ARN) of event source. This is optional for some triggers. Example : "acs:oss:cn-shanghai:12345:mybucket" A ROS resource type: ``ALIYUN::FC::Version``. Create a new ``ALIYUN::FC::Version``. Param scope - scope in which this resource is defined Param id - scoped id of the resource Param props - resource properties :param scope: - :param id: - :param props: - :param enable_resource_property_constraint: - # type: ignore[misc] Attribute ServiceName: The service name. # type: ignore[misc] Attribute VersionId: The version ID. Properties for defining a ``ALIYUN::FC::Version``. :param service_name: Property serviceName: Service name. :param description: Property description: Version description. Property serviceName: Service name. Property description: Version description. | 1.980103 | 2 |
open_spiel/python/algorithms/policy_gradient.py | asmith26/open_spiel | 0 | 6631950 | <filename>open_spiel/python/algorithms/policy_gradient.py
# Copyright 2019 DeepMind Technologies Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Policy Gradient based agents implemented in TensorFlow.
This class is composed of three policy gradient (PG) algorithms:
- Q-based Policy Gradient (QPG): an "all-actions" advantage actor-critic
algorithm differing from A2C in that all action values are used to estimate the
policy gradient (as opposed to only using the action taken into account):
baseline = \sum_a pi_a * Q_a
loss = - \sum_a pi_a * (Q_a - baseline)
where (Q_a - baseline) is the usual advantage. QPG is also known as Mean
Actor-Critic (https://arxiv.org/abs/1709.00503).
- Regret policy gradient (RPG): a PG algorithm inspired by counterfactual regret
minimization (CFR). Unlike standard actor-critic methods (e.g. A2C), the loss is
defined purely in terms of thresholded regrets as follows:
baseline = \sum_a pi_a * Q_a
loss = regret = \sum_a relu(Q_a - baseline)
where gradients only flow through the action value (Q_a) part and are blocked on
the baseline part (which is trained separately by usual MSE loss).
The lack of negative sign in the front of the loss represents a switch from
gradient ascent on the score to descent on the loss.
- Regret Matching Policy Gradient (RMPG): inspired by regret-matching, the
policy gradient is by weighted by the thresholded regret:
baseline = \sum_a pi_a * Q_a
loss = - \sum_a pi_a * relu(Q_a - baseline)
These algorithms were published in NeurIPS 2018. Paper title: "Actor-Critic
Policy Optimization in Partially Observable Multiagent Environment", the paper
is available at: https://arxiv.org/abs/1810.09026.
- Advantage Actor Critic (A2C): The popular advantage actor critic (A2C)
algorithm. The algorithm uses the baseline (Value function) as a control variate
to reduce variance of the policy gradient. The loss is only computed for the
actions actually taken in the episode as opposed to a loss computed for all
actions in the variants above.
advantages = returns - baseline
loss = -log(pi_a) * advantages
The algorithm can be found in the textbook:
https://incompleteideas.net/book/RLbook2018.pdf under the chapter on
`Policy Gradients`.
See open_spiel/python/algorithms/losses/rl_losses_test.py for an example of the
loss computation.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import numpy as np
import sonnet as snt
import tensorflow as tf
from open_spiel.python import rl_agent
from open_spiel.python.algorithms.losses import rl_losses
Transition = collections.namedtuple(
"Transition", "info_state action reward discount legal_actions_mask")
class PolicyGradient(rl_agent.AbstractAgent):
"""RPG Agent implementation in TensorFlow.
See open_spiel/python/examples/single_agent_catch.py for an usage example.
"""
def __init__(self,
session,
player_id,
info_state_size,
num_actions,
loss_str="rpg",
loss_class=None,
hidden_layers_sizes=(128,),
batch_size=128,
critic_learning_rate=0.01,
pi_learning_rate=0.001,
entropy_cost=0.01,
num_critic_before_pi=8,
additional_discount_factor=1.0):
"""Initialize the PolicyGradient agent.
Args:
session: Tensorflow session.
player_id: int, player identifier. Usually its position in the game.
info_state_size: int, info_state vector size.
num_actions: int, number of actions per info state.
loss_str: string or None. If string, must be one of ["rpg", "qpg", "rm",
"a2c"] and defined in `_get_loss_class`. If None, a loss class must be
passed through `loss_class`. Defaults to "rpg".
loss_class: Class or None. If Class, it must define the policy gradient
loss. If None a loss class in a string format must be passed through
`loss_str`. Defaults to None.
hidden_layers_sizes: iterable, defines the neural network layers. Defaults
to (128,), which produces a NN: [INPUT] -> [128] -> ReLU -> [OUTPUT].
batch_size: int, batch size to use for Q and Pi learning. Defaults to 128.
critic_learning_rate: float, learning rate used for Critic (Q or V).
Defaults to 0.001.
pi_learning_rate: float, learning rate used for Pi. Defaults to 0.001.
entropy_cost: float, entropy cost used to multiply the entropy loss. Can
be set to None to skip entropy computation. Defaults to 0.001.
num_critic_before_pi: int, number of Critic (Q or V) updates before each
Pi update. Defaults to 8 (every 8th critic learning step, Pi also
learns).
additional_discount_factor: float, additional discount to compute returns.
Defaults to 1.0, in which case, no extra discount is applied. None that
users must provide *only one of* `loss_str` or `loss_class`.
"""
assert bool(loss_str) ^ bool(loss_class), "Please provide only one option."
loss_class = loss_class if loss_class else self._get_loss_class(loss_str)
self.player_id = player_id
self._session = session
self._num_actions = num_actions
self._layer_sizes = hidden_layers_sizes
self._batch_size = batch_size
self._extra_discount = additional_discount_factor
self._num_critic_before_pi = num_critic_before_pi
self._episode_data = []
self._dataset = collections.defaultdict(list)
self._prev_time_step = None
self._prev_action = None
# Step counters
self._step_counter = 0
self._episode_counter = 0
self._num_learn_steps = 0
# Keep track of the last training loss achieved in an update step.
self._last_loss_value = None
# Placeholders
self._info_state_ph = tf.placeholder(
shape=[None, info_state_size], dtype=tf.float32, name="info_state_ph")
self._action_ph = tf.placeholder(
shape=[None], dtype=tf.int32, name="action_ph")
self._return_ph = tf.placeholder(
shape=[None], dtype=tf.float32, name="return_ph")
# Network
# activate final as we plug logit and qvalue heads afterwards.
net_torso = snt.nets.MLP(
output_sizes=self._layer_sizes, activate_final=True)
torso_out = net_torso(self._info_state_ph)
self._policy_logits = snt.Linear(
output_size=self._num_actions, name="policy_head")(
torso_out)
self._policy_probs = tf.nn.softmax(self._policy_logits)
# Add baseline (V) head for A2C.
if loss_class.__name__ == "BatchA2CLoss":
self._baseline = tf.squeeze(
snt.Linear(output_size=1, name="baseline")(torso_out), axis=1)
else:
# Add q-values head otherwise
self._q_values = snt.Linear(
output_size=self._num_actions, name="q_values_head")(
torso_out)
# Critic loss
# Baseline loss in case of A2C
if loss_class.__name__ == "BatchA2CLoss":
self._critic_loss = tf.reduce_mean(
tf.losses.mean_squared_error(
labels=self._return_ph, predictions=self._baseline))
else:
# Q-loss otherwise.
action_indices = tf.stack(
[tf.range(tf.shape(self._q_values)[0]), self._action_ph], axis=-1)
value_predictions = tf.gather_nd(self._q_values, action_indices)
self._critic_loss = tf.reduce_mean(
tf.losses.mean_squared_error(
labels=self._return_ph, predictions=value_predictions))
critic_optimizer = tf.train.GradientDescentOptimizer(
learning_rate=critic_learning_rate)
self._critic_learn_step = critic_optimizer.minimize(self._critic_loss)
# Pi loss
pg_class = loss_class(entropy_cost=entropy_cost)
if loss_class.__name__ == "BatchA2CLoss":
self._pi_loss = pg_class.loss(
policy_logits=self._policy_logits,
baseline=self._baseline,
actions=self._action_ph,
returns=self._return_ph)
else:
self._pi_loss = pg_class.loss(
policy_logits=self._policy_logits, action_values=self._q_values)
pi_optimizer = tf.train.GradientDescentOptimizer(
learning_rate=pi_learning_rate)
self._pi_learn_step = pi_optimizer.minimize(self._pi_loss)
def _get_loss_class(self, loss_str):
if loss_str == "rpg":
return rl_losses.BatchRPGLoss
elif loss_str == "qpg":
return rl_losses.BatchQPGLoss
elif loss_str == "rm":
return rl_losses.BatchRMLoss
elif loss_str == "a2c":
return rl_losses.BatchA2CLoss
def _act(self, info_state, legal_actions):
# make a singleton batch for NN compatibility: [1, info_state_size]
info_state = np.reshape(info_state, [1, -1])
policy_probs = self._session.run(
self._policy_probs, feed_dict={self._info_state_ph: info_state})
# Remove illegal actions, re-normalize probs
probs = np.zeros(self._num_actions)
probs[legal_actions] = policy_probs[0][legal_actions]
probs /= sum(probs)
action = np.random.choice(len(probs), p=probs)
return action, probs
def step(self, time_step, is_evaluation=False):
"""Returns the action to be taken and updates the network if needed.
Args:
time_step: an instance of rl_environment.TimeStep.
is_evaluation: bool, whether this is a training or evaluation call.
Returns:
A `rl_agent.StepOutput` containing the action probs and chosen action.
"""
# Act step: don't act at terminal info states or if its not our turn.
if (not time_step.last()) and (
time_step.is_simultaneous_move() or
self.player_id == time_step.current_player()):
info_state = time_step.observations["info_state"][self.player_id]
legal_actions = time_step.observations["legal_actions"][self.player_id]
action, probs = self._act(info_state, legal_actions)
else:
action = None
probs = []
if not is_evaluation:
self._step_counter += 1
# Add data points to current episode buffer.
if self._prev_time_step:
self._add_transition(time_step)
# Episode done, add to dataset and maybe learn.
if time_step.last():
self._add_episode_data_to_dataset()
self._episode_counter += 1
if len(self._dataset["returns"]) >= self._batch_size:
self._critic_update()
self._num_learn_steps += 1
if self._num_learn_steps % self._num_critic_before_pi == 0:
self._pi_update()
self._dataset = collections.defaultdict(list)
self._prev_time_step = None
self._prev_action = None
return
else:
self._prev_time_step = time_step
self._prev_action = action
return rl_agent.StepOutput(action=action, probs=probs)
@property
def loss(self):
return (self._last_critic_loss_value, self._last_pi_loss_value)
def _add_episode_data_to_dataset(self):
"""Add episode data to the buffer."""
info_states = [data.info_state for data in self._episode_data]
rewards = [data.reward for data in self._episode_data]
discount = [data.discount for data in self._episode_data]
actions = [data.action for data in self._episode_data]
# Calculate returns
returns = np.array(rewards)
for idx in reversed(range(len(rewards[:-1]))):
returns[idx] = (
rewards[idx] +
discount[idx] * returns[idx + 1] * self._extra_discount)
# Add flattened data points to dataset
self._dataset["actions"].extend(actions)
self._dataset["returns"].extend(returns)
self._dataset["info_states"].extend(info_states)
self._episode_data = []
def _add_transition(self, time_step):
"""Adds intra-episode transition to the `_episode_data` buffer.
Adds the transition from `self._prev_time_step` to `time_step`.
Args:
time_step: an instance of rl_environment.TimeStep.
"""
assert self._prev_time_step is not None
legal_actions = (
self._prev_time_step.observations["legal_actions"][self.player_id])
legal_actions_mask = np.zeros(self._num_actions)
legal_actions_mask[legal_actions] = 1.0
transition = Transition(
info_state=(
self._prev_time_step.observations["info_state"][self.player_id][:]),
action=self._prev_action,
reward=time_step.rewards[self.player_id],
discount=time_step.discounts[self.player_id],
legal_actions_mask=legal_actions_mask)
self._episode_data.append(transition)
def _critic_update(self):
"""Compute the Critic loss on sampled transitions & perform a critic update.
Returns:
The average Critic loss obtained on this batch.
"""
# TODO(author3): illegal action handling.
critic_loss, _ = self._session.run(
[self._critic_loss, self._critic_learn_step],
feed_dict={
self._info_state_ph: self._dataset["info_states"],
self._action_ph: self._dataset["actions"],
self._return_ph: self._dataset["returns"],
})
self._last_critic_loss_value = critic_loss
return critic_loss
def _pi_update(self):
"""Compute the Pi loss on sampled transitions and perform a Pi update.
Returns:
The average Pi loss obtained on this batch.
"""
# TODO(author3): illegal action handling.
pi_loss, _ = self._session.run(
[self._pi_loss, self._pi_learn_step],
feed_dict={
self._info_state_ph: self._dataset["info_states"],
self._action_ph: self._dataset["actions"],
self._return_ph: self._dataset["returns"],
})
self._last_pi_loss_value = pi_loss
return pi_loss
| <filename>open_spiel/python/algorithms/policy_gradient.py
# Copyright 2019 DeepMind Technologies Ltd. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
r"""Policy Gradient based agents implemented in TensorFlow.
This class is composed of three policy gradient (PG) algorithms:
- Q-based Policy Gradient (QPG): an "all-actions" advantage actor-critic
algorithm differing from A2C in that all action values are used to estimate the
policy gradient (as opposed to only using the action taken into account):
baseline = \sum_a pi_a * Q_a
loss = - \sum_a pi_a * (Q_a - baseline)
where (Q_a - baseline) is the usual advantage. QPG is also known as Mean
Actor-Critic (https://arxiv.org/abs/1709.00503).
- Regret policy gradient (RPG): a PG algorithm inspired by counterfactual regret
minimization (CFR). Unlike standard actor-critic methods (e.g. A2C), the loss is
defined purely in terms of thresholded regrets as follows:
baseline = \sum_a pi_a * Q_a
loss = regret = \sum_a relu(Q_a - baseline)
where gradients only flow through the action value (Q_a) part and are blocked on
the baseline part (which is trained separately by usual MSE loss).
The lack of negative sign in the front of the loss represents a switch from
gradient ascent on the score to descent on the loss.
- Regret Matching Policy Gradient (RMPG): inspired by regret-matching, the
policy gradient is by weighted by the thresholded regret:
baseline = \sum_a pi_a * Q_a
loss = - \sum_a pi_a * relu(Q_a - baseline)
These algorithms were published in NeurIPS 2018. Paper title: "Actor-Critic
Policy Optimization in Partially Observable Multiagent Environment", the paper
is available at: https://arxiv.org/abs/1810.09026.
- Advantage Actor Critic (A2C): The popular advantage actor critic (A2C)
algorithm. The algorithm uses the baseline (Value function) as a control variate
to reduce variance of the policy gradient. The loss is only computed for the
actions actually taken in the episode as opposed to a loss computed for all
actions in the variants above.
advantages = returns - baseline
loss = -log(pi_a) * advantages
The algorithm can be found in the textbook:
https://incompleteideas.net/book/RLbook2018.pdf under the chapter on
`Policy Gradients`.
See open_spiel/python/algorithms/losses/rl_losses_test.py for an example of the
loss computation.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import numpy as np
import sonnet as snt
import tensorflow as tf
from open_spiel.python import rl_agent
from open_spiel.python.algorithms.losses import rl_losses
Transition = collections.namedtuple(
"Transition", "info_state action reward discount legal_actions_mask")
class PolicyGradient(rl_agent.AbstractAgent):
"""RPG Agent implementation in TensorFlow.
See open_spiel/python/examples/single_agent_catch.py for an usage example.
"""
def __init__(self,
session,
player_id,
info_state_size,
num_actions,
loss_str="rpg",
loss_class=None,
hidden_layers_sizes=(128,),
batch_size=128,
critic_learning_rate=0.01,
pi_learning_rate=0.001,
entropy_cost=0.01,
num_critic_before_pi=8,
additional_discount_factor=1.0):
"""Initialize the PolicyGradient agent.
Args:
session: Tensorflow session.
player_id: int, player identifier. Usually its position in the game.
info_state_size: int, info_state vector size.
num_actions: int, number of actions per info state.
loss_str: string or None. If string, must be one of ["rpg", "qpg", "rm",
"a2c"] and defined in `_get_loss_class`. If None, a loss class must be
passed through `loss_class`. Defaults to "rpg".
loss_class: Class or None. If Class, it must define the policy gradient
loss. If None a loss class in a string format must be passed through
`loss_str`. Defaults to None.
hidden_layers_sizes: iterable, defines the neural network layers. Defaults
to (128,), which produces a NN: [INPUT] -> [128] -> ReLU -> [OUTPUT].
batch_size: int, batch size to use for Q and Pi learning. Defaults to 128.
critic_learning_rate: float, learning rate used for Critic (Q or V).
Defaults to 0.001.
pi_learning_rate: float, learning rate used for Pi. Defaults to 0.001.
entropy_cost: float, entropy cost used to multiply the entropy loss. Can
be set to None to skip entropy computation. Defaults to 0.001.
num_critic_before_pi: int, number of Critic (Q or V) updates before each
Pi update. Defaults to 8 (every 8th critic learning step, Pi also
learns).
additional_discount_factor: float, additional discount to compute returns.
Defaults to 1.0, in which case, no extra discount is applied. None that
users must provide *only one of* `loss_str` or `loss_class`.
"""
assert bool(loss_str) ^ bool(loss_class), "Please provide only one option."
loss_class = loss_class if loss_class else self._get_loss_class(loss_str)
self.player_id = player_id
self._session = session
self._num_actions = num_actions
self._layer_sizes = hidden_layers_sizes
self._batch_size = batch_size
self._extra_discount = additional_discount_factor
self._num_critic_before_pi = num_critic_before_pi
self._episode_data = []
self._dataset = collections.defaultdict(list)
self._prev_time_step = None
self._prev_action = None
# Step counters
self._step_counter = 0
self._episode_counter = 0
self._num_learn_steps = 0
# Keep track of the last training loss achieved in an update step.
self._last_loss_value = None
# Placeholders
self._info_state_ph = tf.placeholder(
shape=[None, info_state_size], dtype=tf.float32, name="info_state_ph")
self._action_ph = tf.placeholder(
shape=[None], dtype=tf.int32, name="action_ph")
self._return_ph = tf.placeholder(
shape=[None], dtype=tf.float32, name="return_ph")
# Network
# activate final as we plug logit and qvalue heads afterwards.
net_torso = snt.nets.MLP(
output_sizes=self._layer_sizes, activate_final=True)
torso_out = net_torso(self._info_state_ph)
self._policy_logits = snt.Linear(
output_size=self._num_actions, name="policy_head")(
torso_out)
self._policy_probs = tf.nn.softmax(self._policy_logits)
# Add baseline (V) head for A2C.
if loss_class.__name__ == "BatchA2CLoss":
self._baseline = tf.squeeze(
snt.Linear(output_size=1, name="baseline")(torso_out), axis=1)
else:
# Add q-values head otherwise
self._q_values = snt.Linear(
output_size=self._num_actions, name="q_values_head")(
torso_out)
# Critic loss
# Baseline loss in case of A2C
if loss_class.__name__ == "BatchA2CLoss":
self._critic_loss = tf.reduce_mean(
tf.losses.mean_squared_error(
labels=self._return_ph, predictions=self._baseline))
else:
# Q-loss otherwise.
action_indices = tf.stack(
[tf.range(tf.shape(self._q_values)[0]), self._action_ph], axis=-1)
value_predictions = tf.gather_nd(self._q_values, action_indices)
self._critic_loss = tf.reduce_mean(
tf.losses.mean_squared_error(
labels=self._return_ph, predictions=value_predictions))
critic_optimizer = tf.train.GradientDescentOptimizer(
learning_rate=critic_learning_rate)
self._critic_learn_step = critic_optimizer.minimize(self._critic_loss)
# Pi loss
pg_class = loss_class(entropy_cost=entropy_cost)
if loss_class.__name__ == "BatchA2CLoss":
self._pi_loss = pg_class.loss(
policy_logits=self._policy_logits,
baseline=self._baseline,
actions=self._action_ph,
returns=self._return_ph)
else:
self._pi_loss = pg_class.loss(
policy_logits=self._policy_logits, action_values=self._q_values)
pi_optimizer = tf.train.GradientDescentOptimizer(
learning_rate=pi_learning_rate)
self._pi_learn_step = pi_optimizer.minimize(self._pi_loss)
def _get_loss_class(self, loss_str):
if loss_str == "rpg":
return rl_losses.BatchRPGLoss
elif loss_str == "qpg":
return rl_losses.BatchQPGLoss
elif loss_str == "rm":
return rl_losses.BatchRMLoss
elif loss_str == "a2c":
return rl_losses.BatchA2CLoss
def _act(self, info_state, legal_actions):
# make a singleton batch for NN compatibility: [1, info_state_size]
info_state = np.reshape(info_state, [1, -1])
policy_probs = self._session.run(
self._policy_probs, feed_dict={self._info_state_ph: info_state})
# Remove illegal actions, re-normalize probs
probs = np.zeros(self._num_actions)
probs[legal_actions] = policy_probs[0][legal_actions]
probs /= sum(probs)
action = np.random.choice(len(probs), p=probs)
return action, probs
def step(self, time_step, is_evaluation=False):
"""Returns the action to be taken and updates the network if needed.
Args:
time_step: an instance of rl_environment.TimeStep.
is_evaluation: bool, whether this is a training or evaluation call.
Returns:
A `rl_agent.StepOutput` containing the action probs and chosen action.
"""
# Act step: don't act at terminal info states or if its not our turn.
if (not time_step.last()) and (
time_step.is_simultaneous_move() or
self.player_id == time_step.current_player()):
info_state = time_step.observations["info_state"][self.player_id]
legal_actions = time_step.observations["legal_actions"][self.player_id]
action, probs = self._act(info_state, legal_actions)
else:
action = None
probs = []
if not is_evaluation:
self._step_counter += 1
# Add data points to current episode buffer.
if self._prev_time_step:
self._add_transition(time_step)
# Episode done, add to dataset and maybe learn.
if time_step.last():
self._add_episode_data_to_dataset()
self._episode_counter += 1
if len(self._dataset["returns"]) >= self._batch_size:
self._critic_update()
self._num_learn_steps += 1
if self._num_learn_steps % self._num_critic_before_pi == 0:
self._pi_update()
self._dataset = collections.defaultdict(list)
self._prev_time_step = None
self._prev_action = None
return
else:
self._prev_time_step = time_step
self._prev_action = action
return rl_agent.StepOutput(action=action, probs=probs)
@property
def loss(self):
return (self._last_critic_loss_value, self._last_pi_loss_value)
def _add_episode_data_to_dataset(self):
"""Add episode data to the buffer."""
info_states = [data.info_state for data in self._episode_data]
rewards = [data.reward for data in self._episode_data]
discount = [data.discount for data in self._episode_data]
actions = [data.action for data in self._episode_data]
# Calculate returns
returns = np.array(rewards)
for idx in reversed(range(len(rewards[:-1]))):
returns[idx] = (
rewards[idx] +
discount[idx] * returns[idx + 1] * self._extra_discount)
# Add flattened data points to dataset
self._dataset["actions"].extend(actions)
self._dataset["returns"].extend(returns)
self._dataset["info_states"].extend(info_states)
self._episode_data = []
def _add_transition(self, time_step):
"""Adds intra-episode transition to the `_episode_data` buffer.
Adds the transition from `self._prev_time_step` to `time_step`.
Args:
time_step: an instance of rl_environment.TimeStep.
"""
assert self._prev_time_step is not None
legal_actions = (
self._prev_time_step.observations["legal_actions"][self.player_id])
legal_actions_mask = np.zeros(self._num_actions)
legal_actions_mask[legal_actions] = 1.0
transition = Transition(
info_state=(
self._prev_time_step.observations["info_state"][self.player_id][:]),
action=self._prev_action,
reward=time_step.rewards[self.player_id],
discount=time_step.discounts[self.player_id],
legal_actions_mask=legal_actions_mask)
self._episode_data.append(transition)
def _critic_update(self):
"""Compute the Critic loss on sampled transitions & perform a critic update.
Returns:
The average Critic loss obtained on this batch.
"""
# TODO(author3): illegal action handling.
critic_loss, _ = self._session.run(
[self._critic_loss, self._critic_learn_step],
feed_dict={
self._info_state_ph: self._dataset["info_states"],
self._action_ph: self._dataset["actions"],
self._return_ph: self._dataset["returns"],
})
self._last_critic_loss_value = critic_loss
return critic_loss
def _pi_update(self):
"""Compute the Pi loss on sampled transitions and perform a Pi update.
Returns:
The average Pi loss obtained on this batch.
"""
# TODO(author3): illegal action handling.
pi_loss, _ = self._session.run(
[self._pi_loss, self._pi_learn_step],
feed_dict={
self._info_state_ph: self._dataset["info_states"],
self._action_ph: self._dataset["actions"],
self._return_ph: self._dataset["returns"],
})
self._last_pi_loss_value = pi_loss
return pi_loss
| en | 0.836105 | # Copyright 2019 DeepMind Technologies Ltd. All rights reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. Policy Gradient based agents implemented in TensorFlow. This class is composed of three policy gradient (PG) algorithms: - Q-based Policy Gradient (QPG): an "all-actions" advantage actor-critic algorithm differing from A2C in that all action values are used to estimate the policy gradient (as opposed to only using the action taken into account): baseline = \sum_a pi_a * Q_a loss = - \sum_a pi_a * (Q_a - baseline) where (Q_a - baseline) is the usual advantage. QPG is also known as Mean Actor-Critic (https://arxiv.org/abs/1709.00503). - Regret policy gradient (RPG): a PG algorithm inspired by counterfactual regret minimization (CFR). Unlike standard actor-critic methods (e.g. A2C), the loss is defined purely in terms of thresholded regrets as follows: baseline = \sum_a pi_a * Q_a loss = regret = \sum_a relu(Q_a - baseline) where gradients only flow through the action value (Q_a) part and are blocked on the baseline part (which is trained separately by usual MSE loss). The lack of negative sign in the front of the loss represents a switch from gradient ascent on the score to descent on the loss. - Regret Matching Policy Gradient (RMPG): inspired by regret-matching, the policy gradient is by weighted by the thresholded regret: baseline = \sum_a pi_a * Q_a loss = - \sum_a pi_a * relu(Q_a - baseline) These algorithms were published in NeurIPS 2018. Paper title: "Actor-Critic Policy Optimization in Partially Observable Multiagent Environment", the paper is available at: https://arxiv.org/abs/1810.09026. - Advantage Actor Critic (A2C): The popular advantage actor critic (A2C) algorithm. The algorithm uses the baseline (Value function) as a control variate to reduce variance of the policy gradient. The loss is only computed for the actions actually taken in the episode as opposed to a loss computed for all actions in the variants above. advantages = returns - baseline loss = -log(pi_a) * advantages The algorithm can be found in the textbook: https://incompleteideas.net/book/RLbook2018.pdf under the chapter on `Policy Gradients`. See open_spiel/python/algorithms/losses/rl_losses_test.py for an example of the loss computation. RPG Agent implementation in TensorFlow. See open_spiel/python/examples/single_agent_catch.py for an usage example. Initialize the PolicyGradient agent. Args: session: Tensorflow session. player_id: int, player identifier. Usually its position in the game. info_state_size: int, info_state vector size. num_actions: int, number of actions per info state. loss_str: string or None. If string, must be one of ["rpg", "qpg", "rm", "a2c"] and defined in `_get_loss_class`. If None, a loss class must be passed through `loss_class`. Defaults to "rpg". loss_class: Class or None. If Class, it must define the policy gradient loss. If None a loss class in a string format must be passed through `loss_str`. Defaults to None. hidden_layers_sizes: iterable, defines the neural network layers. Defaults to (128,), which produces a NN: [INPUT] -> [128] -> ReLU -> [OUTPUT]. batch_size: int, batch size to use for Q and Pi learning. Defaults to 128. critic_learning_rate: float, learning rate used for Critic (Q or V). Defaults to 0.001. pi_learning_rate: float, learning rate used for Pi. Defaults to 0.001. entropy_cost: float, entropy cost used to multiply the entropy loss. Can be set to None to skip entropy computation. Defaults to 0.001. num_critic_before_pi: int, number of Critic (Q or V) updates before each Pi update. Defaults to 8 (every 8th critic learning step, Pi also learns). additional_discount_factor: float, additional discount to compute returns. Defaults to 1.0, in which case, no extra discount is applied. None that users must provide *only one of* `loss_str` or `loss_class`. # Step counters # Keep track of the last training loss achieved in an update step. # Placeholders # Network # activate final as we plug logit and qvalue heads afterwards. # Add baseline (V) head for A2C. # Add q-values head otherwise # Critic loss # Baseline loss in case of A2C # Q-loss otherwise. # Pi loss # make a singleton batch for NN compatibility: [1, info_state_size] # Remove illegal actions, re-normalize probs Returns the action to be taken and updates the network if needed. Args: time_step: an instance of rl_environment.TimeStep. is_evaluation: bool, whether this is a training or evaluation call. Returns: A `rl_agent.StepOutput` containing the action probs and chosen action. # Act step: don't act at terminal info states or if its not our turn. # Add data points to current episode buffer. # Episode done, add to dataset and maybe learn. Add episode data to the buffer. # Calculate returns # Add flattened data points to dataset Adds intra-episode transition to the `_episode_data` buffer. Adds the transition from `self._prev_time_step` to `time_step`. Args: time_step: an instance of rl_environment.TimeStep. Compute the Critic loss on sampled transitions & perform a critic update. Returns: The average Critic loss obtained on this batch. # TODO(author3): illegal action handling. Compute the Pi loss on sampled transitions and perform a Pi update. Returns: The average Pi loss obtained on this batch. # TODO(author3): illegal action handling. | 2.289402 | 2 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.